blob: 252660b0e11e0d826264f23107a5ea3017cd2c95 [file] [log] [blame]
george.karpenkov29efa6d2017-08-21 23:25:50 +00001//===- FuzzerTracePC.cpp - PC tracing--------------------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9// Trace PCs.
10// This module implements __sanitizer_cov_trace_pc_guard[_init],
11// the callback required for -fsanitize-coverage=trace-pc-guard instrumentation.
12//
13//===----------------------------------------------------------------------===//
14
15#include "FuzzerTracePC.h"
metzman40132972019-01-09 21:46:09 +000016#include "FuzzerBuiltins.h"
17#include "FuzzerBuiltinsMsvc.h"
george.karpenkov29efa6d2017-08-21 23:25:50 +000018#include "FuzzerCorpus.h"
19#include "FuzzerDefs.h"
20#include "FuzzerDictionary.h"
21#include "FuzzerExtFunctions.h"
22#include "FuzzerIO.h"
23#include "FuzzerUtil.h"
24#include "FuzzerValueBitMap.h"
25#include <set>
26
27// The coverage counters and PCs.
28// These are declared as global variables named "__sancov_*" to simplify
29// experiments with inlined instrumentation.
30alignas(64) ATTRIBUTE_INTERFACE
31uint8_t __sancov_trace_pc_guard_8bit_counters[fuzzer::TracePC::kNumPCs];
32
33ATTRIBUTE_INTERFACE
34uintptr_t __sancov_trace_pc_pcs[fuzzer::TracePC::kNumPCs];
35
kcc1c0379f2017-08-22 01:28:32 +000036// Used by -fsanitize-coverage=stack-depth to track stack depth
morehouse68f46432018-08-30 15:54:44 +000037ATTRIBUTES_INTERFACE_TLS_INITIAL_EXEC uintptr_t __sancov_lowest_stack;
kcc1c0379f2017-08-22 01:28:32 +000038
george.karpenkov29efa6d2017-08-21 23:25:50 +000039namespace fuzzer {
40
41TracePC TPC;
42
george.karpenkov29efa6d2017-08-21 23:25:50 +000043uint8_t *TracePC::Counters() const {
44 return __sancov_trace_pc_guard_8bit_counters;
45}
46
47uintptr_t *TracePC::PCs() const {
48 return __sancov_trace_pc_pcs;
49}
50
51size_t TracePC::GetTotalPCCoverage() {
52 if (ObservedPCs.size())
53 return ObservedPCs.size();
54 size_t Res = 0;
55 for (size_t i = 1, N = GetNumPCs(); i < N; i++)
56 if (PCs()[i])
57 Res++;
58 return Res;
59}
60
dor1sd7197f42018-07-18 17:03:27 +000061template<class CallBack>
62void TracePC::IterateInline8bitCounters(CallBack CB) const {
dor1sbb933292018-07-16 16:01:31 +000063 if (NumInline8bitCounters && NumInline8bitCounters == NumPCsInPCTables) {
dor1sd7197f42018-07-18 17:03:27 +000064 size_t CounterIdx = 0;
dor1sbb933292018-07-16 16:01:31 +000065 for (size_t i = 0; i < NumModulesWithInline8bitCounters; i++) {
66 uint8_t *Beg = ModuleCounters[i].Start;
67 size_t Size = ModuleCounters[i].Stop - Beg;
68 assert(Size == (size_t)(ModulePCTable[i].Stop - ModulePCTable[i].Start));
dor1sd7197f42018-07-18 17:03:27 +000069 for (size_t j = 0; j < Size; j++, CounterIdx++)
70 CB(i, j, CounterIdx);
dor1sbb933292018-07-16 16:01:31 +000071 }
72 }
73}
74
dor1sd7197f42018-07-18 17:03:27 +000075// Initializes unstable counters by copying Inline8bitCounters to unstable
76// counters.
77void TracePC::InitializeUnstableCounters() {
78 IterateInline8bitCounters([&](int i, int j, int UnstableIdx) {
dor1sf50b3bb2018-07-23 14:20:52 +000079 UnstableCounters[UnstableIdx].Counter = ModuleCounters[i].Start[j];
dor1sd7197f42018-07-18 17:03:27 +000080 });
81}
82
dor1sbb933292018-07-16 16:01:31 +000083// Compares the current counters with counters from previous runs
84// and records differences as unstable edges.
dor1s658ff782018-08-08 14:32:46 +000085bool TracePC::UpdateUnstableCounters(int UnstableMode) {
86 bool Updated = false;
dor1sd7197f42018-07-18 17:03:27 +000087 IterateInline8bitCounters([&](int i, int j, int UnstableIdx) {
dor1sd6266262018-07-24 21:02:44 +000088 if (ModuleCounters[i].Start[j] != UnstableCounters[UnstableIdx].Counter) {
dor1s658ff782018-08-08 14:32:46 +000089 Updated = true;
dor1sf50b3bb2018-07-23 14:20:52 +000090 UnstableCounters[UnstableIdx].IsUnstable = true;
dor1sd6266262018-07-24 21:02:44 +000091 if (UnstableMode == ZeroUnstable)
92 UnstableCounters[UnstableIdx].Counter = 0;
93 else if (UnstableMode == MinUnstable)
94 UnstableCounters[UnstableIdx].Counter = std::min(
95 ModuleCounters[i].Start[j], UnstableCounters[UnstableIdx].Counter);
96 }
dor1sf50b3bb2018-07-23 14:20:52 +000097 });
dor1s658ff782018-08-08 14:32:46 +000098 return Updated;
dor1sf50b3bb2018-07-23 14:20:52 +000099}
100
dor1s658ff782018-08-08 14:32:46 +0000101// Updates and applies unstable counters to ModuleCounters in single iteration
102void TracePC::UpdateAndApplyUnstableCounters(int UnstableMode) {
dor1sf50b3bb2018-07-23 14:20:52 +0000103 IterateInline8bitCounters([&](int i, int j, int UnstableIdx) {
dor1s658ff782018-08-08 14:32:46 +0000104 if (ModuleCounters[i].Start[j] != UnstableCounters[UnstableIdx].Counter) {
105 UnstableCounters[UnstableIdx].IsUnstable = true;
106 if (UnstableMode == ZeroUnstable)
107 ModuleCounters[i].Start[j] = 0;
108 else if (UnstableMode == MinUnstable)
109 ModuleCounters[i].Start[j] = std::min(
110 ModuleCounters[i].Start[j], UnstableCounters[UnstableIdx].Counter);
111 }
dor1sd7197f42018-07-18 17:03:27 +0000112 });
dor1sbb933292018-07-16 16:01:31 +0000113}
george.karpenkov29efa6d2017-08-21 23:25:50 +0000114
115void TracePC::HandleInline8bitCountersInit(uint8_t *Start, uint8_t *Stop) {
116 if (Start == Stop) return;
117 if (NumModulesWithInline8bitCounters &&
118 ModuleCounters[NumModulesWithInline8bitCounters-1].Start == Start) return;
119 assert(NumModulesWithInline8bitCounters <
120 sizeof(ModuleCounters) / sizeof(ModuleCounters[0]));
121 ModuleCounters[NumModulesWithInline8bitCounters++] = {Start, Stop};
122 NumInline8bitCounters += Stop - Start;
123}
124
kcc98957a12017-08-25 19:29:47 +0000125void TracePC::HandlePCsInit(const uintptr_t *Start, const uintptr_t *Stop) {
126 const PCTableEntry *B = reinterpret_cast<const PCTableEntry *>(Start);
127 const PCTableEntry *E = reinterpret_cast<const PCTableEntry *>(Stop);
george.karpenkov29efa6d2017-08-21 23:25:50 +0000128 if (NumPCTables && ModulePCTable[NumPCTables - 1].Start == B) return;
129 assert(NumPCTables < sizeof(ModulePCTable) / sizeof(ModulePCTable[0]));
130 ModulePCTable[NumPCTables++] = {B, E};
131 NumPCsInPCTables += E - B;
132}
133
134void TracePC::HandleInit(uint32_t *Start, uint32_t *Stop) {
135 if (Start == Stop || *Start) return;
136 assert(NumModules < sizeof(Modules) / sizeof(Modules[0]));
137 for (uint32_t *P = Start; P < Stop; P++) {
138 NumGuards++;
139 if (NumGuards == kNumPCs) {
140 RawPrint(
141 "WARNING: The binary has too many instrumented PCs.\n"
142 " You may want to reduce the size of the binary\n"
143 " for more efficient fuzzing and precise coverage data\n");
144 }
145 *P = NumGuards % kNumPCs;
146 }
147 Modules[NumModules].Start = Start;
148 Modules[NumModules].Stop = Stop;
149 NumModules++;
150}
151
152void TracePC::PrintModuleInfo() {
153 if (NumGuards) {
154 Printf("INFO: Loaded %zd modules (%zd guards): ", NumModules, NumGuards);
155 for (size_t i = 0; i < NumModules; i++)
156 Printf("%zd [%p, %p), ", Modules[i].Stop - Modules[i].Start,
157 Modules[i].Start, Modules[i].Stop);
158 Printf("\n");
159 }
160 if (NumModulesWithInline8bitCounters) {
161 Printf("INFO: Loaded %zd modules (%zd inline 8-bit counters): ",
162 NumModulesWithInline8bitCounters, NumInline8bitCounters);
163 for (size_t i = 0; i < NumModulesWithInline8bitCounters; i++)
164 Printf("%zd [%p, %p), ", ModuleCounters[i].Stop - ModuleCounters[i].Start,
165 ModuleCounters[i].Start, ModuleCounters[i].Stop);
166 Printf("\n");
167 }
168 if (NumPCTables) {
169 Printf("INFO: Loaded %zd PC tables (%zd PCs): ", NumPCTables,
170 NumPCsInPCTables);
171 for (size_t i = 0; i < NumPCTables; i++) {
172 Printf("%zd [%p,%p), ", ModulePCTable[i].Stop - ModulePCTable[i].Start,
173 ModulePCTable[i].Start, ModulePCTable[i].Stop);
174 }
175 Printf("\n");
176
177 if ((NumGuards && NumGuards != NumPCsInPCTables) ||
178 (NumInline8bitCounters && NumInline8bitCounters != NumPCsInPCTables)) {
kcce220ebb2017-10-14 00:07:11 +0000179 Printf("ERROR: The size of coverage PC tables does not match the\n"
180 "number of instrumented PCs. This might be a compiler bug,\n"
181 "please contact the libFuzzer developers.\n"
182 "Also check https://bugs.llvm.org/show_bug.cgi?id=34636\n"
183 "for possible workarounds (tl;dr: don't use the old GNU ld)\n");
george.karpenkov29efa6d2017-08-21 23:25:50 +0000184 _Exit(1);
185 }
186 }
delcypherfcd19a82018-04-20 06:46:19 +0000187 if (size_t NumExtraCounters = ExtraCountersEnd() - ExtraCountersBegin())
188 Printf("INFO: %zd Extra Counters\n", NumExtraCounters);
george.karpenkov29efa6d2017-08-21 23:25:50 +0000189}
190
191ATTRIBUTE_NO_SANITIZE_ALL
192void TracePC::HandleCallerCallee(uintptr_t Caller, uintptr_t Callee) {
193 const uintptr_t kBits = 12;
194 const uintptr_t kMask = (1 << kBits) - 1;
195 uintptr_t Idx = (Caller & kMask) | ((Callee & kMask) << kBits);
196 ValueProfileMap.AddValueModPrime(Idx);
197}
198
george.karpenkovdc8a8c72018-10-10 00:57:44 +0000199/// \return the address of the previous instruction.
200/// Note: the logic is copied from `sanitizer_common/sanitizer_stacktrace.h`
201inline ALWAYS_INLINE uintptr_t GetPreviousInstructionPc(uintptr_t PC) {
202#if defined(__arm__)
203 // T32 (Thumb) branch instructions might be 16 or 32 bit long,
204 // so we return (pc-2) in that case in order to be safe.
205 // For A32 mode we return (pc-4) because all instructions are 32 bit long.
206 return (PC - 3) & (~1);
207#elif defined(__powerpc__) || defined(__powerpc64__) || defined(__aarch64__)
208 // PCs are always 4 byte aligned.
209 return PC - 4;
210#elif defined(__sparc__) || defined(__mips__)
211 return PC - 8;
212#else
213 return PC - 1;
214#endif
215}
216
217/// \return the address of the next instruction.
218/// Note: the logic is copied from `sanitizer_common/sanitizer_stacktrace.cc`
219inline ALWAYS_INLINE uintptr_t GetNextInstructionPc(uintptr_t PC) {
220#if defined(__mips__)
221 return PC + 8;
222#elif defined(__powerpc__) || defined(__sparc__) || defined(__arm__) || \
223 defined(__aarch64__)
224 return PC + 4;
225#else
226 return PC + 1;
227#endif
228}
229
george.karpenkov29efa6d2017-08-21 23:25:50 +0000230void TracePC::UpdateObservedPCs() {
kccec9da662017-08-28 22:52:22 +0000231 Vector<uintptr_t> CoveredFuncs;
kcc00da6482017-08-25 20:09:25 +0000232 auto ObservePC = [&](uintptr_t PC) {
kcc09b3e5f2018-07-06 19:47:00 +0000233 if (ObservedPCs.insert(PC).second && DoPrintNewPCs) {
george.karpenkovdc8a8c72018-10-10 00:57:44 +0000234 PrintPC("\tNEW_PC: %p %F %L", "\tNEW_PC: %p", GetNextInstructionPc(PC));
kcc09b3e5f2018-07-06 19:47:00 +0000235 Printf("\n");
236 }
kcc1c0379f2017-08-22 01:28:32 +0000237 };
kcc00da6482017-08-25 20:09:25 +0000238
239 auto Observe = [&](const PCTableEntry &TE) {
240 if (TE.PCFlags & 1)
kccb3080d02018-07-19 22:00:48 +0000241 if (++ObservedFuncs[TE.PC] == 1 && NumPrintNewFuncs)
kccec9da662017-08-28 22:52:22 +0000242 CoveredFuncs.push_back(TE.PC);
kcc00da6482017-08-25 20:09:25 +0000243 ObservePC(TE.PC);
244 };
245
george.karpenkov29efa6d2017-08-21 23:25:50 +0000246 if (NumPCsInPCTables) {
george.karpenkov29efa6d2017-08-21 23:25:50 +0000247 if (NumInline8bitCounters == NumPCsInPCTables) {
dor1sd7197f42018-07-18 17:03:27 +0000248 IterateInline8bitCounters([&](int i, int j, int CounterIdx) {
249 if (ModuleCounters[i].Start[j])
250 Observe(ModulePCTable[i].Start[j]);
251 });
george.karpenkov29efa6d2017-08-21 23:25:50 +0000252 } else if (NumGuards == NumPCsInPCTables) {
253 size_t GuardIdx = 1;
254 for (size_t i = 0; i < NumModules; i++) {
255 uint32_t *Beg = Modules[i].Start;
256 size_t Size = Modules[i].Stop - Beg;
257 assert(Size ==
258 (size_t)(ModulePCTable[i].Stop - ModulePCTable[i].Start));
259 for (size_t j = 0; j < Size; j++, GuardIdx++)
260 if (Counters()[GuardIdx])
kcc00da6482017-08-25 20:09:25 +0000261 Observe(ModulePCTable[i].Start[j]);
george.karpenkov29efa6d2017-08-21 23:25:50 +0000262 }
263 }
264 }
kccec9da662017-08-28 22:52:22 +0000265
kccb3080d02018-07-19 22:00:48 +0000266 for (size_t i = 0, N = Min(CoveredFuncs.size(), NumPrintNewFuncs); i < N;
267 i++) {
kcc873dc112018-06-07 21:15:24 +0000268 Printf("\tNEW_FUNC[%zd/%zd]: ", i + 1, CoveredFuncs.size());
george.karpenkovdc8a8c72018-10-10 00:57:44 +0000269 PrintPC("%p %F %L", "%p", GetNextInstructionPc(CoveredFuncs[i]));
kcc09b3e5f2018-07-06 19:47:00 +0000270 Printf("\n");
kccec9da662017-08-28 22:52:22 +0000271 }
george.karpenkov29efa6d2017-08-21 23:25:50 +0000272}
273
george.karpenkov29efa6d2017-08-21 23:25:50 +0000274
275static std::string GetModuleName(uintptr_t PC) {
276 char ModulePathRaw[4096] = ""; // What's PATH_MAX in portable C++?
277 void *OffsetRaw = nullptr;
278 if (!EF->__sanitizer_get_module_and_offset_for_pc(
279 reinterpret_cast<void *>(PC), ModulePathRaw,
280 sizeof(ModulePathRaw), &OffsetRaw))
281 return "";
282 return ModulePathRaw;
283}
284
kcc85cad3d2018-05-11 01:17:52 +0000285template<class CallBack>
286void TracePC::IterateCoveredFunctions(CallBack CB) {
287 for (size_t i = 0; i < NumPCTables; i++) {
288 auto &M = ModulePCTable[i];
289 assert(M.Start < M.Stop);
290 auto ModuleName = GetModuleName(M.Start->PC);
291 for (auto NextFE = M.Start; NextFE < M.Stop; ) {
292 auto FE = NextFE;
293 assert((FE->PCFlags & 1) && "Not a function entry point");
294 do {
295 NextFE++;
296 } while (NextFE < M.Stop && !(NextFE->PCFlags & 1));
297 if (ObservedFuncs.count(FE->PC))
kccb3080d02018-07-19 22:00:48 +0000298 CB(FE, NextFE, ObservedFuncs[FE->PC]);
kcc85cad3d2018-05-11 01:17:52 +0000299 }
300 }
301}
302
kcc3acbe072018-05-16 23:26:37 +0000303void TracePC::SetFocusFunction(const std::string &FuncName) {
304 // This function should be called once.
305 assert(FocusFunction.first > NumModulesWithInline8bitCounters);
306 if (FuncName.empty())
307 return;
308 for (size_t M = 0; M < NumModulesWithInline8bitCounters; M++) {
309 auto &PCTE = ModulePCTable[M];
310 size_t N = PCTE.Stop - PCTE.Start;
311 for (size_t I = 0; I < N; I++) {
312 if (!(PCTE.Start[I].PCFlags & 1)) continue; // not a function entry.
313 auto Name = DescribePC("%F", GetNextInstructionPc(PCTE.Start[I].PC));
314 if (Name[0] == 'i' && Name[1] == 'n' && Name[2] == ' ')
315 Name = Name.substr(3, std::string::npos);
316 if (FuncName != Name) continue;
317 Printf("INFO: Focus function is set to '%s'\n", Name.c_str());
318 FocusFunction = {M, I};
319 return;
320 }
321 }
322}
323
324bool TracePC::ObservedFocusFunction() {
325 size_t I = FocusFunction.first;
326 size_t J = FocusFunction.second;
327 if (I >= NumModulesWithInline8bitCounters)
328 return false;
329 auto &MC = ModuleCounters[I];
330 size_t Size = MC.Stop - MC.Start;
331 if (J >= Size)
332 return false;
333 return MC.Start[J] != 0;
334}
335
george.karpenkov29efa6d2017-08-21 23:25:50 +0000336void TracePC::PrintCoverage() {
337 if (!EF->__sanitizer_symbolize_pc ||
338 !EF->__sanitizer_get_module_and_offset_for_pc) {
339 Printf("INFO: __sanitizer_symbolize_pc or "
340 "__sanitizer_get_module_and_offset_for_pc is not available,"
341 " not printing coverage\n");
342 return;
343 }
344 Printf("COVERAGE:\n");
kccb3080d02018-07-19 22:00:48 +0000345 auto CoveredFunctionCallback = [&](const PCTableEntry *First,
346 const PCTableEntry *Last,
347 uintptr_t Counter) {
kcc85cad3d2018-05-11 01:17:52 +0000348 assert(First < Last);
349 auto VisualizePC = GetNextInstructionPc(First->PC);
350 std::string FileStr = DescribePC("%s", VisualizePC);
kccb3080d02018-07-19 22:00:48 +0000351 if (!IsInterestingCoverageFile(FileStr))
352 return;
kcc85cad3d2018-05-11 01:17:52 +0000353 std::string FunctionStr = DescribePC("%F", VisualizePC);
kccb3080d02018-07-19 22:00:48 +0000354 if (FunctionStr.find("in ") == 0)
355 FunctionStr = FunctionStr.substr(3);
kcc85cad3d2018-05-11 01:17:52 +0000356 std::string LineStr = DescribePC("%l", VisualizePC);
357 size_t Line = std::stoul(LineStr);
kccb3080d02018-07-19 22:00:48 +0000358 size_t NumEdges = Last - First;
morehousef64b9402018-06-25 15:59:24 +0000359 Vector<uintptr_t> UncoveredPCs;
kcc85cad3d2018-05-11 01:17:52 +0000360 for (auto TE = First; TE < Last; TE++)
361 if (!ObservedPCs.count(TE->PC))
362 UncoveredPCs.push_back(TE->PC);
kccb3080d02018-07-19 22:00:48 +0000363 Printf("COVERED_FUNC: hits: %zd", Counter);
364 Printf(" edges: %zd/%zd", NumEdges - UncoveredPCs.size(), NumEdges);
365 Printf(" %s %s:%zd\n", FunctionStr.c_str(), FileStr.c_str(), Line);
366 for (auto PC: UncoveredPCs)
kcc85cad3d2018-05-11 01:17:52 +0000367 Printf(" UNCOVERED_PC: %s\n",
368 DescribePC("%s:%l", GetNextInstructionPc(PC)).c_str());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000369 };
370
kcc85cad3d2018-05-11 01:17:52 +0000371 IterateCoveredFunctions(CoveredFunctionCallback);
george.karpenkov29efa6d2017-08-21 23:25:50 +0000372}
373
kcca7dd2a92018-05-21 19:47:00 +0000374void TracePC::DumpCoverage() {
375 if (EF->__sanitizer_dump_coverage) {
376 Vector<uintptr_t> PCsCopy(GetNumPCs());
377 for (size_t i = 0; i < GetNumPCs(); i++)
378 PCsCopy[i] = PCs()[i] ? GetPreviousInstructionPc(PCs()[i]) : 0;
379 EF->__sanitizer_dump_coverage(PCsCopy.data(), PCsCopy.size());
380 }
381}
382
dor1sbb933292018-07-16 16:01:31 +0000383void TracePC::PrintUnstableStats() {
384 size_t count = 0;
dor1s72667772018-08-06 23:14:13 +0000385 Printf("UNSTABLE_FUNCTIONS:\n");
386 IterateInline8bitCounters([&](int i, int j, int UnstableIdx) {
387 const PCTableEntry &TE = ModulePCTable[i].Start[j];
388 if (UnstableCounters[UnstableIdx].IsUnstable) {
dor1sbb933292018-07-16 16:01:31 +0000389 count++;
dor1s72667772018-08-06 23:14:13 +0000390 if (ObservedFuncs.count(TE.PC)) {
391 auto VisualizePC = GetNextInstructionPc(TE.PC);
392 std::string FunctionStr = DescribePC("%F", VisualizePC);
393 if (FunctionStr.find("in ") == 0)
394 FunctionStr = FunctionStr.substr(3);
395 Printf("%s\n", FunctionStr.c_str());
396 }
397 }
398 });
399
dor1sbb933292018-07-16 16:01:31 +0000400 Printf("stat::stability_rate: %.2f\n",
401 100 - static_cast<float>(count * 100) / NumInline8bitCounters);
402}
403
george.karpenkov29efa6d2017-08-21 23:25:50 +0000404// Value profile.
405// We keep track of various values that affect control flow.
406// These values are inserted into a bit-set-based hash map.
407// Every new bit in the map is treated as a new coverage.
408//
409// For memcmp/strcmp/etc the interesting value is the length of the common
410// prefix of the parameters.
411// For cmp instructions the interesting value is a XOR of the parameters.
412// The interesting value is mixed up with the PC and is then added to the map.
413
414ATTRIBUTE_NO_SANITIZE_ALL
415void TracePC::AddValueForMemcmp(void *caller_pc, const void *s1, const void *s2,
416 size_t n, bool StopAtZero) {
417 if (!n) return;
418 size_t Len = std::min(n, Word::GetMaxSize());
419 const uint8_t *A1 = reinterpret_cast<const uint8_t *>(s1);
420 const uint8_t *A2 = reinterpret_cast<const uint8_t *>(s2);
421 uint8_t B1[Word::kMaxSize];
422 uint8_t B2[Word::kMaxSize];
423 // Copy the data into locals in this non-msan-instrumented function
424 // to avoid msan complaining further.
425 size_t Hash = 0; // Compute some simple hash of both strings.
426 for (size_t i = 0; i < Len; i++) {
427 B1[i] = A1[i];
428 B2[i] = A2[i];
429 size_t T = B1[i];
430 Hash ^= (T << 8) | B2[i];
431 }
432 size_t I = 0;
433 for (; I < Len; I++)
434 if (B1[I] != B2[I] || (StopAtZero && B1[I] == 0))
435 break;
436 size_t PC = reinterpret_cast<size_t>(caller_pc);
437 size_t Idx = (PC & 4095) | (I << 12);
438 ValueProfileMap.AddValue(Idx);
439 TORCW.Insert(Idx ^ Hash, Word(B1, Len), Word(B2, Len));
440}
441
442template <class T>
443ATTRIBUTE_TARGET_POPCNT ALWAYS_INLINE
444ATTRIBUTE_NO_SANITIZE_ALL
445void TracePC::HandleCmp(uintptr_t PC, T Arg1, T Arg2) {
446 uint64_t ArgXor = Arg1 ^ Arg2;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000447 if (sizeof(T) == 4)
dor1se6729cb2018-07-16 15:15:34 +0000448 TORC4.Insert(ArgXor, Arg1, Arg2);
george.karpenkov29efa6d2017-08-21 23:25:50 +0000449 else if (sizeof(T) == 8)
dor1se6729cb2018-07-16 15:15:34 +0000450 TORC8.Insert(ArgXor, Arg1, Arg2);
metzman40132972019-01-09 21:46:09 +0000451 uint64_t HammingDistance = Popcountll(ArgXor); // [0,64]
452 uint64_t AbsoluteDistance = (Arg1 == Arg2 ? 0 : Clzll(Arg1 - Arg2) + 1);
kcc6d3b8e92018-08-02 00:24:49 +0000453 ValueProfileMap.AddValue(PC * 128 + HammingDistance);
454 ValueProfileMap.AddValue(PC * 128 + 64 + AbsoluteDistance);
george.karpenkov29efa6d2017-08-21 23:25:50 +0000455}
456
457static size_t InternalStrnlen(const char *S, size_t MaxLen) {
458 size_t Len = 0;
459 for (; Len < MaxLen && S[Len]; Len++) {}
460 return Len;
461}
462
463// Finds min of (strlen(S1), strlen(S2)).
464// Needed bacause one of these strings may actually be non-zero terminated.
465static size_t InternalStrnlen2(const char *S1, const char *S2) {
466 size_t Len = 0;
467 for (; S1[Len] && S2[Len]; Len++) {}
468 return Len;
469}
470
471void TracePC::ClearInlineCounters() {
472 for (size_t i = 0; i < NumModulesWithInline8bitCounters; i++) {
473 uint8_t *Beg = ModuleCounters[i].Start;
474 size_t Size = ModuleCounters[i].Stop - Beg;
475 memset(Beg, 0, Size);
476 }
477}
478
kcc0f3c0312017-08-22 01:50:00 +0000479ATTRIBUTE_NO_SANITIZE_ALL
kcc1c0379f2017-08-22 01:28:32 +0000480void TracePC::RecordInitialStack() {
kcc0f3c0312017-08-22 01:50:00 +0000481 int stack;
482 __sancov_lowest_stack = InitialStack = reinterpret_cast<uintptr_t>(&stack);
kcc1c0379f2017-08-22 01:28:32 +0000483}
484
485uintptr_t TracePC::GetMaxStackOffset() const {
486 return InitialStack - __sancov_lowest_stack; // Stack grows down
487}
488
george.karpenkov29efa6d2017-08-21 23:25:50 +0000489} // namespace fuzzer
490
491extern "C" {
492ATTRIBUTE_INTERFACE
493ATTRIBUTE_NO_SANITIZE_ALL
494void __sanitizer_cov_trace_pc_guard(uint32_t *Guard) {
metzman40132972019-01-09 21:46:09 +0000495 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000496 uint32_t Idx = *Guard;
497 __sancov_trace_pc_pcs[Idx] = PC;
498 __sancov_trace_pc_guard_8bit_counters[Idx]++;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000499}
500
501// Best-effort support for -fsanitize-coverage=trace-pc, which is available
502// in both Clang and GCC.
503ATTRIBUTE_INTERFACE
504ATTRIBUTE_NO_SANITIZE_ALL
505void __sanitizer_cov_trace_pc() {
metzman40132972019-01-09 21:46:09 +0000506 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000507 uintptr_t Idx = PC & (((uintptr_t)1 << fuzzer::TracePC::kTracePcBits) - 1);
508 __sancov_trace_pc_pcs[Idx] = PC;
509 __sancov_trace_pc_guard_8bit_counters[Idx]++;
510}
511
512ATTRIBUTE_INTERFACE
513void __sanitizer_cov_trace_pc_guard_init(uint32_t *Start, uint32_t *Stop) {
514 fuzzer::TPC.HandleInit(Start, Stop);
515}
516
517ATTRIBUTE_INTERFACE
518void __sanitizer_cov_8bit_counters_init(uint8_t *Start, uint8_t *Stop) {
519 fuzzer::TPC.HandleInline8bitCountersInit(Start, Stop);
520}
521
522ATTRIBUTE_INTERFACE
kcc98957a12017-08-25 19:29:47 +0000523void __sanitizer_cov_pcs_init(const uintptr_t *pcs_beg,
524 const uintptr_t *pcs_end) {
george.karpenkov29efa6d2017-08-21 23:25:50 +0000525 fuzzer::TPC.HandlePCsInit(pcs_beg, pcs_end);
526}
527
528ATTRIBUTE_INTERFACE
529ATTRIBUTE_NO_SANITIZE_ALL
530void __sanitizer_cov_trace_pc_indir(uintptr_t Callee) {
metzman40132972019-01-09 21:46:09 +0000531 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000532 fuzzer::TPC.HandleCallerCallee(PC, Callee);
533}
534
535ATTRIBUTE_INTERFACE
536ATTRIBUTE_NO_SANITIZE_ALL
537ATTRIBUTE_TARGET_POPCNT
538void __sanitizer_cov_trace_cmp8(uint64_t Arg1, uint64_t Arg2) {
metzman40132972019-01-09 21:46:09 +0000539 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000540 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
541}
542
543ATTRIBUTE_INTERFACE
544ATTRIBUTE_NO_SANITIZE_ALL
545ATTRIBUTE_TARGET_POPCNT
546// Now the __sanitizer_cov_trace_const_cmp[1248] callbacks just mimic
547// the behaviour of __sanitizer_cov_trace_cmp[1248] ones. This, however,
548// should be changed later to make full use of instrumentation.
549void __sanitizer_cov_trace_const_cmp8(uint64_t Arg1, uint64_t Arg2) {
metzman40132972019-01-09 21:46:09 +0000550 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000551 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
552}
553
554ATTRIBUTE_INTERFACE
555ATTRIBUTE_NO_SANITIZE_ALL
556ATTRIBUTE_TARGET_POPCNT
557void __sanitizer_cov_trace_cmp4(uint32_t Arg1, uint32_t Arg2) {
metzman40132972019-01-09 21:46:09 +0000558 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000559 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
560}
561
562ATTRIBUTE_INTERFACE
563ATTRIBUTE_NO_SANITIZE_ALL
564ATTRIBUTE_TARGET_POPCNT
565void __sanitizer_cov_trace_const_cmp4(uint32_t Arg1, uint32_t Arg2) {
metzman40132972019-01-09 21:46:09 +0000566 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000567 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
568}
569
570ATTRIBUTE_INTERFACE
571ATTRIBUTE_NO_SANITIZE_ALL
572ATTRIBUTE_TARGET_POPCNT
573void __sanitizer_cov_trace_cmp2(uint16_t Arg1, uint16_t Arg2) {
metzman40132972019-01-09 21:46:09 +0000574 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000575 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
576}
577
578ATTRIBUTE_INTERFACE
579ATTRIBUTE_NO_SANITIZE_ALL
580ATTRIBUTE_TARGET_POPCNT
581void __sanitizer_cov_trace_const_cmp2(uint16_t Arg1, uint16_t Arg2) {
metzman40132972019-01-09 21:46:09 +0000582 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000583 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
584}
585
586ATTRIBUTE_INTERFACE
587ATTRIBUTE_NO_SANITIZE_ALL
588ATTRIBUTE_TARGET_POPCNT
589void __sanitizer_cov_trace_cmp1(uint8_t Arg1, uint8_t Arg2) {
metzman40132972019-01-09 21:46:09 +0000590 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000591 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
592}
593
594ATTRIBUTE_INTERFACE
595ATTRIBUTE_NO_SANITIZE_ALL
596ATTRIBUTE_TARGET_POPCNT
597void __sanitizer_cov_trace_const_cmp1(uint8_t Arg1, uint8_t Arg2) {
metzman40132972019-01-09 21:46:09 +0000598 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000599 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
600}
601
602ATTRIBUTE_INTERFACE
603ATTRIBUTE_NO_SANITIZE_ALL
604ATTRIBUTE_TARGET_POPCNT
605void __sanitizer_cov_trace_switch(uint64_t Val, uint64_t *Cases) {
606 uint64_t N = Cases[0];
607 uint64_t ValSizeInBits = Cases[1];
608 uint64_t *Vals = Cases + 2;
609 // Skip the most common and the most boring case.
610 if (Vals[N - 1] < 256 && Val < 256)
611 return;
metzman40132972019-01-09 21:46:09 +0000612 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000613 size_t i;
614 uint64_t Token = 0;
615 for (i = 0; i < N; i++) {
616 Token = Val ^ Vals[i];
617 if (Val < Vals[i])
618 break;
619 }
620
621 if (ValSizeInBits == 16)
622 fuzzer::TPC.HandleCmp(PC + i, static_cast<uint16_t>(Token), (uint16_t)(0));
623 else if (ValSizeInBits == 32)
624 fuzzer::TPC.HandleCmp(PC + i, static_cast<uint32_t>(Token), (uint32_t)(0));
625 else
626 fuzzer::TPC.HandleCmp(PC + i, Token, (uint64_t)(0));
627}
628
629ATTRIBUTE_INTERFACE
630ATTRIBUTE_NO_SANITIZE_ALL
631ATTRIBUTE_TARGET_POPCNT
632void __sanitizer_cov_trace_div4(uint32_t Val) {
metzman40132972019-01-09 21:46:09 +0000633 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000634 fuzzer::TPC.HandleCmp(PC, Val, (uint32_t)0);
635}
636
637ATTRIBUTE_INTERFACE
638ATTRIBUTE_NO_SANITIZE_ALL
639ATTRIBUTE_TARGET_POPCNT
640void __sanitizer_cov_trace_div8(uint64_t Val) {
metzman40132972019-01-09 21:46:09 +0000641 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000642 fuzzer::TPC.HandleCmp(PC, Val, (uint64_t)0);
643}
644
645ATTRIBUTE_INTERFACE
646ATTRIBUTE_NO_SANITIZE_ALL
647ATTRIBUTE_TARGET_POPCNT
648void __sanitizer_cov_trace_gep(uintptr_t Idx) {
metzman40132972019-01-09 21:46:09 +0000649 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000650 fuzzer::TPC.HandleCmp(PC, Idx, (uintptr_t)0);
651}
652
653ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
654void __sanitizer_weak_hook_memcmp(void *caller_pc, const void *s1,
655 const void *s2, size_t n, int result) {
morehousec6ee8752018-07-17 16:12:00 +0000656 if (!fuzzer::RunningUserCallback) return;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000657 if (result == 0) return; // No reason to mutate.
658 if (n <= 1) return; // Not interesting.
659 fuzzer::TPC.AddValueForMemcmp(caller_pc, s1, s2, n, /*StopAtZero*/false);
660}
661
662ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
663void __sanitizer_weak_hook_strncmp(void *caller_pc, const char *s1,
664 const char *s2, size_t n, int result) {
morehousec6ee8752018-07-17 16:12:00 +0000665 if (!fuzzer::RunningUserCallback) return;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000666 if (result == 0) return; // No reason to mutate.
667 size_t Len1 = fuzzer::InternalStrnlen(s1, n);
668 size_t Len2 = fuzzer::InternalStrnlen(s2, n);
669 n = std::min(n, Len1);
670 n = std::min(n, Len2);
671 if (n <= 1) return; // Not interesting.
672 fuzzer::TPC.AddValueForMemcmp(caller_pc, s1, s2, n, /*StopAtZero*/true);
673}
674
675ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
676void __sanitizer_weak_hook_strcmp(void *caller_pc, const char *s1,
dor1se6729cb2018-07-16 15:15:34 +0000677 const char *s2, int result) {
morehousec6ee8752018-07-17 16:12:00 +0000678 if (!fuzzer::RunningUserCallback) return;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000679 if (result == 0) return; // No reason to mutate.
680 size_t N = fuzzer::InternalStrnlen2(s1, s2);
681 if (N <= 1) return; // Not interesting.
682 fuzzer::TPC.AddValueForMemcmp(caller_pc, s1, s2, N, /*StopAtZero*/true);
683}
684
685ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
686void __sanitizer_weak_hook_strncasecmp(void *called_pc, const char *s1,
687 const char *s2, size_t n, int result) {
morehousec6ee8752018-07-17 16:12:00 +0000688 if (!fuzzer::RunningUserCallback) return;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000689 return __sanitizer_weak_hook_strncmp(called_pc, s1, s2, n, result);
690}
691
692ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
693void __sanitizer_weak_hook_strcasecmp(void *called_pc, const char *s1,
694 const char *s2, int result) {
morehousec6ee8752018-07-17 16:12:00 +0000695 if (!fuzzer::RunningUserCallback) return;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000696 return __sanitizer_weak_hook_strcmp(called_pc, s1, s2, result);
697}
698
699ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
700void __sanitizer_weak_hook_strstr(void *called_pc, const char *s1,
701 const char *s2, char *result) {
morehousec6ee8752018-07-17 16:12:00 +0000702 if (!fuzzer::RunningUserCallback) return;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000703 fuzzer::TPC.MMT.Add(reinterpret_cast<const uint8_t *>(s2), strlen(s2));
704}
705
706ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
707void __sanitizer_weak_hook_strcasestr(void *called_pc, const char *s1,
708 const char *s2, char *result) {
morehousec6ee8752018-07-17 16:12:00 +0000709 if (!fuzzer::RunningUserCallback) return;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000710 fuzzer::TPC.MMT.Add(reinterpret_cast<const uint8_t *>(s2), strlen(s2));
711}
712
713ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
714void __sanitizer_weak_hook_memmem(void *called_pc, const void *s1, size_t len1,
715 const void *s2, size_t len2, void *result) {
morehousec6ee8752018-07-17 16:12:00 +0000716 if (!fuzzer::RunningUserCallback) return;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000717 fuzzer::TPC.MMT.Add(reinterpret_cast<const uint8_t *>(s2), len2);
718}
719} // extern "C"