blob: 240d76fb61e2a80850a037b81f7b2f32649948a6 [file] [log] [blame]
george.karpenkov29efa6d2017-08-21 23:25:50 +00001//===- FuzzerTracePC.cpp - PC tracing--------------------------------------===//
2//
chandlerc40284492019-01-19 08:50:56 +00003// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
george.karpenkov29efa6d2017-08-21 23:25:50 +00006//
7//===----------------------------------------------------------------------===//
8// Trace PCs.
9// This module implements __sanitizer_cov_trace_pc_guard[_init],
10// the callback required for -fsanitize-coverage=trace-pc-guard instrumentation.
11//
12//===----------------------------------------------------------------------===//
13
14#include "FuzzerTracePC.h"
metzman40132972019-01-09 21:46:09 +000015#include "FuzzerBuiltins.h"
16#include "FuzzerBuiltinsMsvc.h"
george.karpenkov29efa6d2017-08-21 23:25:50 +000017#include "FuzzerCorpus.h"
18#include "FuzzerDefs.h"
19#include "FuzzerDictionary.h"
20#include "FuzzerExtFunctions.h"
21#include "FuzzerIO.h"
22#include "FuzzerUtil.h"
23#include "FuzzerValueBitMap.h"
24#include <set>
25
kcc1c0379f2017-08-22 01:28:32 +000026// Used by -fsanitize-coverage=stack-depth to track stack depth
morehouse68f46432018-08-30 15:54:44 +000027ATTRIBUTES_INTERFACE_TLS_INITIAL_EXEC uintptr_t __sancov_lowest_stack;
kcc1c0379f2017-08-22 01:28:32 +000028
george.karpenkov29efa6d2017-08-21 23:25:50 +000029namespace fuzzer {
30
31TracePC TPC;
32
george.karpenkov29efa6d2017-08-21 23:25:50 +000033size_t TracePC::GetTotalPCCoverage() {
kcc569b3fa2019-01-29 23:53:28 +000034 return ObservedPCs.size();
george.karpenkov29efa6d2017-08-21 23:25:50 +000035}
36
37
38void TracePC::HandleInline8bitCountersInit(uint8_t *Start, uint8_t *Stop) {
39 if (Start == Stop) return;
kcce2469202019-01-30 06:15:52 +000040 if (NumModules &&
41 Modules[NumModules - 1].Start() == Start)
42 return;
43 assert(NumModules <
44 sizeof(Modules) / sizeof(Modules[0]));
45 auto &M = Modules[NumModules++];
46 uint8_t *AlignedStart = RoundUpByPage(Start);
47 uint8_t *AlignedStop = RoundDownByPage(Stop);
48 size_t NumFullPages = AlignedStop > AlignedStart ?
49 (AlignedStop - AlignedStart) / PageSize() : 0;
50 bool NeedFirst = Start < AlignedStart || !NumFullPages;
51 bool NeedLast = Stop > AlignedStop && AlignedStop >= AlignedStart;
52 M.NumRegions = NumFullPages + NeedFirst + NeedLast;;
53 assert(M.NumRegions > 0);
54 M.Regions = new Module::Region[M.NumRegions];
55 assert(M.Regions);
56 size_t R = 0;
57 if (NeedFirst)
58 M.Regions[R++] = {Start, std::min(Stop, AlignedStart), true, false};
59 for (uint8_t *P = AlignedStart; P < AlignedStop; P += PageSize())
60 M.Regions[R++] = {P, P + PageSize(), true, true};
61 if (NeedLast)
62 M.Regions[R++] = {AlignedStop, Stop, true, false};
63 assert(R == M.NumRegions);
64 assert(M.Size() == (size_t)(Stop - Start));
65 assert(M.Stop() == Stop);
66 assert(M.Start() == Start);
67 NumInline8bitCounters += M.Size();
george.karpenkov29efa6d2017-08-21 23:25:50 +000068}
69
kccda168932019-01-31 00:09:43 +000070// Mark all full page counter regions as PROT_NONE and set Enabled=false.
71// The first time the instrumented code hits such a protected/disabled
72// counter region we should catch a SEGV and call UnprotectLazyCounters,
73// which will mark the page as PROT_READ|PROT_WRITE and set Enabled=true.
74//
75// Whenever other functions iterate over the counters they should ignore
76// regions with Enabled=false.
77void TracePC::ProtectLazyCounters() {
78 size_t NumPagesProtected = 0;
79 IterateCounterRegions([&](Module::Region &R) {
80 if (!R.OneFullPage) return;
81 if (Mprotect(R.Start, R.Stop - R.Start, false)) {
82 R.Enabled = false;
83 NumPagesProtected++;
84 }
85 });
86 if (NumPagesProtected)
87 Printf("INFO: %zd pages of counters where protected;"
88 " libFuzzer's SEGV handler must be installed\n",
89 NumPagesProtected);
90}
91
92bool TracePC::UnprotectLazyCounters(void *CounterPtr) {
93 // Printf("UnprotectLazyCounters: %p\n", CounterPtr);
94 if (!CounterPtr)
95 return false;
96 bool Done = false;
97 uint8_t *Addr = reinterpret_cast<uint8_t *>(CounterPtr);
98 IterateCounterRegions([&](Module::Region &R) {
99 if (!R.OneFullPage || R.Enabled || Done) return;
100 if (Addr >= R.Start && Addr < R.Stop)
101 if (Mprotect(R.Start, R.Stop - R.Start, true)) {
102 R.Enabled = true;
103 Done = true;
104 }
105 });
106 return Done;
107}
108
kcc98957a12017-08-25 19:29:47 +0000109void TracePC::HandlePCsInit(const uintptr_t *Start, const uintptr_t *Stop) {
110 const PCTableEntry *B = reinterpret_cast<const PCTableEntry *>(Start);
111 const PCTableEntry *E = reinterpret_cast<const PCTableEntry *>(Stop);
george.karpenkov29efa6d2017-08-21 23:25:50 +0000112 if (NumPCTables && ModulePCTable[NumPCTables - 1].Start == B) return;
113 assert(NumPCTables < sizeof(ModulePCTable) / sizeof(ModulePCTable[0]));
114 ModulePCTable[NumPCTables++] = {B, E};
115 NumPCsInPCTables += E - B;
116}
117
george.karpenkov29efa6d2017-08-21 23:25:50 +0000118void TracePC::PrintModuleInfo() {
kcce2469202019-01-30 06:15:52 +0000119 if (NumModules) {
george.karpenkov29efa6d2017-08-21 23:25:50 +0000120 Printf("INFO: Loaded %zd modules (%zd inline 8-bit counters): ",
kcce2469202019-01-30 06:15:52 +0000121 NumModules, NumInline8bitCounters);
122 for (size_t i = 0; i < NumModules; i++)
123 Printf("%zd [%p, %p), ", Modules[i].Size(), Modules[i].Start(),
124 Modules[i].Stop());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000125 Printf("\n");
126 }
127 if (NumPCTables) {
128 Printf("INFO: Loaded %zd PC tables (%zd PCs): ", NumPCTables,
129 NumPCsInPCTables);
130 for (size_t i = 0; i < NumPCTables; i++) {
131 Printf("%zd [%p,%p), ", ModulePCTable[i].Stop - ModulePCTable[i].Start,
132 ModulePCTable[i].Start, ModulePCTable[i].Stop);
133 }
134 Printf("\n");
135
kcc569b3fa2019-01-29 23:53:28 +0000136 if (NumInline8bitCounters && NumInline8bitCounters != NumPCsInPCTables) {
kcce220ebb2017-10-14 00:07:11 +0000137 Printf("ERROR: The size of coverage PC tables does not match the\n"
138 "number of instrumented PCs. This might be a compiler bug,\n"
139 "please contact the libFuzzer developers.\n"
140 "Also check https://bugs.llvm.org/show_bug.cgi?id=34636\n"
141 "for possible workarounds (tl;dr: don't use the old GNU ld)\n");
george.karpenkov29efa6d2017-08-21 23:25:50 +0000142 _Exit(1);
143 }
144 }
delcypherfcd19a82018-04-20 06:46:19 +0000145 if (size_t NumExtraCounters = ExtraCountersEnd() - ExtraCountersBegin())
146 Printf("INFO: %zd Extra Counters\n", NumExtraCounters);
george.karpenkov29efa6d2017-08-21 23:25:50 +0000147}
148
149ATTRIBUTE_NO_SANITIZE_ALL
150void TracePC::HandleCallerCallee(uintptr_t Caller, uintptr_t Callee) {
151 const uintptr_t kBits = 12;
152 const uintptr_t kMask = (1 << kBits) - 1;
153 uintptr_t Idx = (Caller & kMask) | ((Callee & kMask) << kBits);
154 ValueProfileMap.AddValueModPrime(Idx);
155}
156
george.karpenkovdc8a8c72018-10-10 00:57:44 +0000157/// \return the address of the previous instruction.
158/// Note: the logic is copied from `sanitizer_common/sanitizer_stacktrace.h`
159inline ALWAYS_INLINE uintptr_t GetPreviousInstructionPc(uintptr_t PC) {
160#if defined(__arm__)
161 // T32 (Thumb) branch instructions might be 16 or 32 bit long,
162 // so we return (pc-2) in that case in order to be safe.
163 // For A32 mode we return (pc-4) because all instructions are 32 bit long.
164 return (PC - 3) & (~1);
165#elif defined(__powerpc__) || defined(__powerpc64__) || defined(__aarch64__)
166 // PCs are always 4 byte aligned.
167 return PC - 4;
168#elif defined(__sparc__) || defined(__mips__)
169 return PC - 8;
170#else
171 return PC - 1;
172#endif
173}
174
175/// \return the address of the next instruction.
176/// Note: the logic is copied from `sanitizer_common/sanitizer_stacktrace.cc`
177inline ALWAYS_INLINE uintptr_t GetNextInstructionPc(uintptr_t PC) {
178#if defined(__mips__)
179 return PC + 8;
180#elif defined(__powerpc__) || defined(__sparc__) || defined(__arm__) || \
181 defined(__aarch64__)
182 return PC + 4;
183#else
184 return PC + 1;
185#endif
186}
187
george.karpenkov29efa6d2017-08-21 23:25:50 +0000188void TracePC::UpdateObservedPCs() {
kccec9da662017-08-28 22:52:22 +0000189 Vector<uintptr_t> CoveredFuncs;
kcc00da6482017-08-25 20:09:25 +0000190 auto ObservePC = [&](uintptr_t PC) {
kcc09b3e5f2018-07-06 19:47:00 +0000191 if (ObservedPCs.insert(PC).second && DoPrintNewPCs) {
george.karpenkovdc8a8c72018-10-10 00:57:44 +0000192 PrintPC("\tNEW_PC: %p %F %L", "\tNEW_PC: %p", GetNextInstructionPc(PC));
kcc09b3e5f2018-07-06 19:47:00 +0000193 Printf("\n");
194 }
kcc1c0379f2017-08-22 01:28:32 +0000195 };
kcc00da6482017-08-25 20:09:25 +0000196
197 auto Observe = [&](const PCTableEntry &TE) {
198 if (TE.PCFlags & 1)
kccb3080d02018-07-19 22:00:48 +0000199 if (++ObservedFuncs[TE.PC] == 1 && NumPrintNewFuncs)
kccec9da662017-08-28 22:52:22 +0000200 CoveredFuncs.push_back(TE.PC);
kcc00da6482017-08-25 20:09:25 +0000201 ObservePC(TE.PC);
202 };
203
george.karpenkov29efa6d2017-08-21 23:25:50 +0000204 if (NumPCsInPCTables) {
george.karpenkov29efa6d2017-08-21 23:25:50 +0000205 if (NumInline8bitCounters == NumPCsInPCTables) {
kcce2469202019-01-30 06:15:52 +0000206 for (size_t i = 0; i < NumModules; i++) {
207 auto &M = Modules[i];
208 assert(M.Size() ==
metzmanb8a1ad52019-01-15 22:12:51 +0000209 (size_t)(ModulePCTable[i].Stop - ModulePCTable[i].Start));
kcce2469202019-01-30 06:15:52 +0000210 for (size_t r = 0; r < M.NumRegions; r++) {
211 auto &R = M.Regions[r];
212 if (!R.Enabled) continue;
213 for (uint8_t *P = R.Start; P < R.Stop; P++)
214 if (*P)
215 Observe(ModulePCTable[i].Start[M.Idx(P)]);
216 }
metzmanb8a1ad52019-01-15 22:12:51 +0000217 }
george.karpenkov29efa6d2017-08-21 23:25:50 +0000218 }
219 }
kccec9da662017-08-28 22:52:22 +0000220
kccb3080d02018-07-19 22:00:48 +0000221 for (size_t i = 0, N = Min(CoveredFuncs.size(), NumPrintNewFuncs); i < N;
222 i++) {
kcc873dc112018-06-07 21:15:24 +0000223 Printf("\tNEW_FUNC[%zd/%zd]: ", i + 1, CoveredFuncs.size());
george.karpenkovdc8a8c72018-10-10 00:57:44 +0000224 PrintPC("%p %F %L", "%p", GetNextInstructionPc(CoveredFuncs[i]));
kcc09b3e5f2018-07-06 19:47:00 +0000225 Printf("\n");
kccec9da662017-08-28 22:52:22 +0000226 }
george.karpenkov29efa6d2017-08-21 23:25:50 +0000227}
228
george.karpenkov29efa6d2017-08-21 23:25:50 +0000229
230static std::string GetModuleName(uintptr_t PC) {
231 char ModulePathRaw[4096] = ""; // What's PATH_MAX in portable C++?
232 void *OffsetRaw = nullptr;
233 if (!EF->__sanitizer_get_module_and_offset_for_pc(
234 reinterpret_cast<void *>(PC), ModulePathRaw,
235 sizeof(ModulePathRaw), &OffsetRaw))
236 return "";
237 return ModulePathRaw;
238}
239
kcc85cad3d2018-05-11 01:17:52 +0000240template<class CallBack>
241void TracePC::IterateCoveredFunctions(CallBack CB) {
242 for (size_t i = 0; i < NumPCTables; i++) {
243 auto &M = ModulePCTable[i];
244 assert(M.Start < M.Stop);
245 auto ModuleName = GetModuleName(M.Start->PC);
246 for (auto NextFE = M.Start; NextFE < M.Stop; ) {
247 auto FE = NextFE;
248 assert((FE->PCFlags & 1) && "Not a function entry point");
249 do {
250 NextFE++;
251 } while (NextFE < M.Stop && !(NextFE->PCFlags & 1));
kccfb675a72019-01-26 01:33:09 +0000252 CB(FE, NextFE, ObservedFuncs[FE->PC]);
kcc85cad3d2018-05-11 01:17:52 +0000253 }
254 }
255}
256
kcc3acbe072018-05-16 23:26:37 +0000257void TracePC::SetFocusFunction(const std::string &FuncName) {
258 // This function should be called once.
kcce2469202019-01-30 06:15:52 +0000259 assert(!FocusFunctionCounterPtr);
kcc3acbe072018-05-16 23:26:37 +0000260 if (FuncName.empty())
261 return;
kcce2469202019-01-30 06:15:52 +0000262 for (size_t M = 0; M < NumModules; M++) {
kcc3acbe072018-05-16 23:26:37 +0000263 auto &PCTE = ModulePCTable[M];
264 size_t N = PCTE.Stop - PCTE.Start;
265 for (size_t I = 0; I < N; I++) {
266 if (!(PCTE.Start[I].PCFlags & 1)) continue; // not a function entry.
267 auto Name = DescribePC("%F", GetNextInstructionPc(PCTE.Start[I].PC));
268 if (Name[0] == 'i' && Name[1] == 'n' && Name[2] == ' ')
269 Name = Name.substr(3, std::string::npos);
270 if (FuncName != Name) continue;
271 Printf("INFO: Focus function is set to '%s'\n", Name.c_str());
kcce2469202019-01-30 06:15:52 +0000272 FocusFunctionCounterPtr = Modules[M].Start() + I;
kcc3acbe072018-05-16 23:26:37 +0000273 return;
274 }
275 }
276}
277
278bool TracePC::ObservedFocusFunction() {
kcce2469202019-01-30 06:15:52 +0000279 return FocusFunctionCounterPtr && *FocusFunctionCounterPtr;
kcc3acbe072018-05-16 23:26:37 +0000280}
281
george.karpenkov29efa6d2017-08-21 23:25:50 +0000282void TracePC::PrintCoverage() {
283 if (!EF->__sanitizer_symbolize_pc ||
284 !EF->__sanitizer_get_module_and_offset_for_pc) {
285 Printf("INFO: __sanitizer_symbolize_pc or "
286 "__sanitizer_get_module_and_offset_for_pc is not available,"
287 " not printing coverage\n");
288 return;
289 }
290 Printf("COVERAGE:\n");
kccb3080d02018-07-19 22:00:48 +0000291 auto CoveredFunctionCallback = [&](const PCTableEntry *First,
292 const PCTableEntry *Last,
293 uintptr_t Counter) {
kcc85cad3d2018-05-11 01:17:52 +0000294 assert(First < Last);
295 auto VisualizePC = GetNextInstructionPc(First->PC);
296 std::string FileStr = DescribePC("%s", VisualizePC);
kccb3080d02018-07-19 22:00:48 +0000297 if (!IsInterestingCoverageFile(FileStr))
298 return;
kcc85cad3d2018-05-11 01:17:52 +0000299 std::string FunctionStr = DescribePC("%F", VisualizePC);
kccb3080d02018-07-19 22:00:48 +0000300 if (FunctionStr.find("in ") == 0)
301 FunctionStr = FunctionStr.substr(3);
kcc85cad3d2018-05-11 01:17:52 +0000302 std::string LineStr = DescribePC("%l", VisualizePC);
kccb3080d02018-07-19 22:00:48 +0000303 size_t NumEdges = Last - First;
morehousef64b9402018-06-25 15:59:24 +0000304 Vector<uintptr_t> UncoveredPCs;
kcc85cad3d2018-05-11 01:17:52 +0000305 for (auto TE = First; TE < Last; TE++)
306 if (!ObservedPCs.count(TE->PC))
307 UncoveredPCs.push_back(TE->PC);
kccfb675a72019-01-26 01:33:09 +0000308 Printf("%sCOVERED_FUNC: hits: %zd", Counter ? "" : "UN", Counter);
kccb3080d02018-07-19 22:00:48 +0000309 Printf(" edges: %zd/%zd", NumEdges - UncoveredPCs.size(), NumEdges);
kcc278174f2019-02-12 00:52:11 +0000310 Printf(" %s %s:%s\n", FunctionStr.c_str(), FileStr.c_str(),
311 LineStr.c_str());
kccfb675a72019-01-26 01:33:09 +0000312 if (Counter)
313 for (auto PC : UncoveredPCs)
314 Printf(" UNCOVERED_PC: %s\n",
315 DescribePC("%s:%l", GetNextInstructionPc(PC)).c_str());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000316 };
317
kcc85cad3d2018-05-11 01:17:52 +0000318 IterateCoveredFunctions(CoveredFunctionCallback);
george.karpenkov29efa6d2017-08-21 23:25:50 +0000319}
320
george.karpenkov29efa6d2017-08-21 23:25:50 +0000321// Value profile.
322// We keep track of various values that affect control flow.
323// These values are inserted into a bit-set-based hash map.
324// Every new bit in the map is treated as a new coverage.
325//
326// For memcmp/strcmp/etc the interesting value is the length of the common
327// prefix of the parameters.
328// For cmp instructions the interesting value is a XOR of the parameters.
329// The interesting value is mixed up with the PC and is then added to the map.
330
331ATTRIBUTE_NO_SANITIZE_ALL
332void TracePC::AddValueForMemcmp(void *caller_pc, const void *s1, const void *s2,
333 size_t n, bool StopAtZero) {
334 if (!n) return;
335 size_t Len = std::min(n, Word::GetMaxSize());
336 const uint8_t *A1 = reinterpret_cast<const uint8_t *>(s1);
337 const uint8_t *A2 = reinterpret_cast<const uint8_t *>(s2);
338 uint8_t B1[Word::kMaxSize];
339 uint8_t B2[Word::kMaxSize];
340 // Copy the data into locals in this non-msan-instrumented function
341 // to avoid msan complaining further.
342 size_t Hash = 0; // Compute some simple hash of both strings.
343 for (size_t i = 0; i < Len; i++) {
344 B1[i] = A1[i];
345 B2[i] = A2[i];
346 size_t T = B1[i];
347 Hash ^= (T << 8) | B2[i];
348 }
349 size_t I = 0;
350 for (; I < Len; I++)
351 if (B1[I] != B2[I] || (StopAtZero && B1[I] == 0))
352 break;
353 size_t PC = reinterpret_cast<size_t>(caller_pc);
354 size_t Idx = (PC & 4095) | (I << 12);
355 ValueProfileMap.AddValue(Idx);
356 TORCW.Insert(Idx ^ Hash, Word(B1, Len), Word(B2, Len));
357}
358
359template <class T>
360ATTRIBUTE_TARGET_POPCNT ALWAYS_INLINE
361ATTRIBUTE_NO_SANITIZE_ALL
362void TracePC::HandleCmp(uintptr_t PC, T Arg1, T Arg2) {
363 uint64_t ArgXor = Arg1 ^ Arg2;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000364 if (sizeof(T) == 4)
dor1se6729cb2018-07-16 15:15:34 +0000365 TORC4.Insert(ArgXor, Arg1, Arg2);
george.karpenkov29efa6d2017-08-21 23:25:50 +0000366 else if (sizeof(T) == 8)
dor1se6729cb2018-07-16 15:15:34 +0000367 TORC8.Insert(ArgXor, Arg1, Arg2);
metzman40132972019-01-09 21:46:09 +0000368 uint64_t HammingDistance = Popcountll(ArgXor); // [0,64]
369 uint64_t AbsoluteDistance = (Arg1 == Arg2 ? 0 : Clzll(Arg1 - Arg2) + 1);
kcc6d3b8e92018-08-02 00:24:49 +0000370 ValueProfileMap.AddValue(PC * 128 + HammingDistance);
371 ValueProfileMap.AddValue(PC * 128 + 64 + AbsoluteDistance);
george.karpenkov29efa6d2017-08-21 23:25:50 +0000372}
373
374static size_t InternalStrnlen(const char *S, size_t MaxLen) {
375 size_t Len = 0;
376 for (; Len < MaxLen && S[Len]; Len++) {}
377 return Len;
378}
379
380// Finds min of (strlen(S1), strlen(S2)).
381// Needed bacause one of these strings may actually be non-zero terminated.
382static size_t InternalStrnlen2(const char *S1, const char *S2) {
383 size_t Len = 0;
384 for (; S1[Len] && S2[Len]; Len++) {}
385 return Len;
386}
387
388void TracePC::ClearInlineCounters() {
kcce2469202019-01-30 06:15:52 +0000389 IterateCounterRegions([](const Module::Region &R){
390 if (R.Enabled)
391 memset(R.Start, 0, R.Stop - R.Start);
392 });
george.karpenkov29efa6d2017-08-21 23:25:50 +0000393}
394
kcc0f3c0312017-08-22 01:50:00 +0000395ATTRIBUTE_NO_SANITIZE_ALL
kcc1c0379f2017-08-22 01:28:32 +0000396void TracePC::RecordInitialStack() {
kcc0f3c0312017-08-22 01:50:00 +0000397 int stack;
398 __sancov_lowest_stack = InitialStack = reinterpret_cast<uintptr_t>(&stack);
kcc1c0379f2017-08-22 01:28:32 +0000399}
400
401uintptr_t TracePC::GetMaxStackOffset() const {
402 return InitialStack - __sancov_lowest_stack; // Stack grows down
403}
404
kcc02db23c2019-01-29 23:37:20 +0000405void WarnAboutDeprecatedInstrumentation(const char *flag) {
metzman23b5d202019-01-31 20:32:20 +0000406 // Use RawPrint because Printf cannot be used on Windows before OutputFile is
407 // initialized.
408 RawPrint(flag);
409 RawPrint(
410 " is no longer supported by libFuzzer.\n"
411 "Please either migrate to a compiler that supports -fsanitize=fuzzer\n"
412 "or use an older version of libFuzzer\n");
kcc02db23c2019-01-29 23:37:20 +0000413 exit(1);
414}
415
george.karpenkov29efa6d2017-08-21 23:25:50 +0000416} // namespace fuzzer
417
418extern "C" {
419ATTRIBUTE_INTERFACE
420ATTRIBUTE_NO_SANITIZE_ALL
421void __sanitizer_cov_trace_pc_guard(uint32_t *Guard) {
metzman23b5d202019-01-31 20:32:20 +0000422 fuzzer::WarnAboutDeprecatedInstrumentation(
423 "-fsanitize-coverage=trace-pc-guard");
george.karpenkov29efa6d2017-08-21 23:25:50 +0000424}
425
426// Best-effort support for -fsanitize-coverage=trace-pc, which is available
427// in both Clang and GCC.
428ATTRIBUTE_INTERFACE
429ATTRIBUTE_NO_SANITIZE_ALL
430void __sanitizer_cov_trace_pc() {
metzman23b5d202019-01-31 20:32:20 +0000431 fuzzer::WarnAboutDeprecatedInstrumentation("-fsanitize-coverage=trace-pc");
george.karpenkov29efa6d2017-08-21 23:25:50 +0000432}
433
434ATTRIBUTE_INTERFACE
435void __sanitizer_cov_trace_pc_guard_init(uint32_t *Start, uint32_t *Stop) {
kcc02db23c2019-01-29 23:37:20 +0000436 fuzzer::WarnAboutDeprecatedInstrumentation(
437 "-fsanitize-coverage=trace-pc-guard");
george.karpenkov29efa6d2017-08-21 23:25:50 +0000438}
439
440ATTRIBUTE_INTERFACE
441void __sanitizer_cov_8bit_counters_init(uint8_t *Start, uint8_t *Stop) {
442 fuzzer::TPC.HandleInline8bitCountersInit(Start, Stop);
443}
444
445ATTRIBUTE_INTERFACE
kcc98957a12017-08-25 19:29:47 +0000446void __sanitizer_cov_pcs_init(const uintptr_t *pcs_beg,
447 const uintptr_t *pcs_end) {
george.karpenkov29efa6d2017-08-21 23:25:50 +0000448 fuzzer::TPC.HandlePCsInit(pcs_beg, pcs_end);
449}
450
451ATTRIBUTE_INTERFACE
452ATTRIBUTE_NO_SANITIZE_ALL
453void __sanitizer_cov_trace_pc_indir(uintptr_t Callee) {
metzman40132972019-01-09 21:46:09 +0000454 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000455 fuzzer::TPC.HandleCallerCallee(PC, Callee);
456}
457
458ATTRIBUTE_INTERFACE
459ATTRIBUTE_NO_SANITIZE_ALL
460ATTRIBUTE_TARGET_POPCNT
461void __sanitizer_cov_trace_cmp8(uint64_t Arg1, uint64_t Arg2) {
metzman40132972019-01-09 21:46:09 +0000462 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000463 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
464}
465
466ATTRIBUTE_INTERFACE
467ATTRIBUTE_NO_SANITIZE_ALL
468ATTRIBUTE_TARGET_POPCNT
469// Now the __sanitizer_cov_trace_const_cmp[1248] callbacks just mimic
470// the behaviour of __sanitizer_cov_trace_cmp[1248] ones. This, however,
471// should be changed later to make full use of instrumentation.
472void __sanitizer_cov_trace_const_cmp8(uint64_t Arg1, uint64_t Arg2) {
metzman40132972019-01-09 21:46:09 +0000473 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000474 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
475}
476
477ATTRIBUTE_INTERFACE
478ATTRIBUTE_NO_SANITIZE_ALL
479ATTRIBUTE_TARGET_POPCNT
480void __sanitizer_cov_trace_cmp4(uint32_t Arg1, uint32_t Arg2) {
metzman40132972019-01-09 21:46:09 +0000481 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000482 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
483}
484
485ATTRIBUTE_INTERFACE
486ATTRIBUTE_NO_SANITIZE_ALL
487ATTRIBUTE_TARGET_POPCNT
488void __sanitizer_cov_trace_const_cmp4(uint32_t Arg1, uint32_t Arg2) {
metzman40132972019-01-09 21:46:09 +0000489 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000490 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
491}
492
493ATTRIBUTE_INTERFACE
494ATTRIBUTE_NO_SANITIZE_ALL
495ATTRIBUTE_TARGET_POPCNT
496void __sanitizer_cov_trace_cmp2(uint16_t Arg1, uint16_t Arg2) {
metzman40132972019-01-09 21:46:09 +0000497 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000498 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
499}
500
501ATTRIBUTE_INTERFACE
502ATTRIBUTE_NO_SANITIZE_ALL
503ATTRIBUTE_TARGET_POPCNT
504void __sanitizer_cov_trace_const_cmp2(uint16_t Arg1, uint16_t Arg2) {
metzman40132972019-01-09 21:46:09 +0000505 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000506 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
507}
508
509ATTRIBUTE_INTERFACE
510ATTRIBUTE_NO_SANITIZE_ALL
511ATTRIBUTE_TARGET_POPCNT
512void __sanitizer_cov_trace_cmp1(uint8_t Arg1, uint8_t Arg2) {
metzman40132972019-01-09 21:46:09 +0000513 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000514 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
515}
516
517ATTRIBUTE_INTERFACE
518ATTRIBUTE_NO_SANITIZE_ALL
519ATTRIBUTE_TARGET_POPCNT
520void __sanitizer_cov_trace_const_cmp1(uint8_t Arg1, uint8_t Arg2) {
metzman40132972019-01-09 21:46:09 +0000521 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000522 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
523}
524
525ATTRIBUTE_INTERFACE
526ATTRIBUTE_NO_SANITIZE_ALL
527ATTRIBUTE_TARGET_POPCNT
528void __sanitizer_cov_trace_switch(uint64_t Val, uint64_t *Cases) {
529 uint64_t N = Cases[0];
530 uint64_t ValSizeInBits = Cases[1];
531 uint64_t *Vals = Cases + 2;
kcc59c3be42019-01-24 21:08:54 +0000532 // Skip the most common and the most boring case: all switch values are small.
533 // We may want to skip this at compile-time, but it will make the
534 // instrumentation less general.
535 if (Vals[N - 1] < 256)
536 return;
537 // Also skip small inputs values, they won't give good signal.
538 if (Val < 256)
george.karpenkov29efa6d2017-08-21 23:25:50 +0000539 return;
metzman40132972019-01-09 21:46:09 +0000540 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000541 size_t i;
kcc59c3be42019-01-24 21:08:54 +0000542 uint64_t Smaller = 0;
543 uint64_t Larger = ~(uint64_t)0;
544 // Find two switch values such that Smaller < Val < Larger.
545 // Use 0 and 0xfff..f as the defaults.
george.karpenkov29efa6d2017-08-21 23:25:50 +0000546 for (i = 0; i < N; i++) {
kcc59c3be42019-01-24 21:08:54 +0000547 if (Val < Vals[i]) {
548 Larger = Vals[i];
george.karpenkov29efa6d2017-08-21 23:25:50 +0000549 break;
kcc59c3be42019-01-24 21:08:54 +0000550 }
551 if (Val > Vals[i]) Smaller = Vals[i];
george.karpenkov29efa6d2017-08-21 23:25:50 +0000552 }
553
kcc59c3be42019-01-24 21:08:54 +0000554 // Apply HandleCmp to {Val,Smaller} and {Val, Larger},
555 // use i as the PC modifier for HandleCmp.
556 if (ValSizeInBits == 16) {
557 fuzzer::TPC.HandleCmp(PC + 2 * i, static_cast<uint16_t>(Val),
558 (uint16_t)(Smaller));
559 fuzzer::TPC.HandleCmp(PC + 2 * i + 1, static_cast<uint16_t>(Val),
560 (uint16_t)(Larger));
561 } else if (ValSizeInBits == 32) {
562 fuzzer::TPC.HandleCmp(PC + 2 * i, static_cast<uint32_t>(Val),
563 (uint32_t)(Smaller));
564 fuzzer::TPC.HandleCmp(PC + 2 * i + 1, static_cast<uint32_t>(Val),
565 (uint32_t)(Larger));
566 } else {
567 fuzzer::TPC.HandleCmp(PC + 2*i, Val, Smaller);
568 fuzzer::TPC.HandleCmp(PC + 2*i + 1, Val, Larger);
569 }
george.karpenkov29efa6d2017-08-21 23:25:50 +0000570}
571
572ATTRIBUTE_INTERFACE
573ATTRIBUTE_NO_SANITIZE_ALL
574ATTRIBUTE_TARGET_POPCNT
575void __sanitizer_cov_trace_div4(uint32_t Val) {
metzman40132972019-01-09 21:46:09 +0000576 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000577 fuzzer::TPC.HandleCmp(PC, Val, (uint32_t)0);
578}
579
580ATTRIBUTE_INTERFACE
581ATTRIBUTE_NO_SANITIZE_ALL
582ATTRIBUTE_TARGET_POPCNT
583void __sanitizer_cov_trace_div8(uint64_t Val) {
metzman40132972019-01-09 21:46:09 +0000584 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000585 fuzzer::TPC.HandleCmp(PC, Val, (uint64_t)0);
586}
587
588ATTRIBUTE_INTERFACE
589ATTRIBUTE_NO_SANITIZE_ALL
590ATTRIBUTE_TARGET_POPCNT
591void __sanitizer_cov_trace_gep(uintptr_t Idx) {
metzman40132972019-01-09 21:46:09 +0000592 uintptr_t PC = reinterpret_cast<uintptr_t>(GET_CALLER_PC());
george.karpenkov29efa6d2017-08-21 23:25:50 +0000593 fuzzer::TPC.HandleCmp(PC, Idx, (uintptr_t)0);
594}
595
596ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
597void __sanitizer_weak_hook_memcmp(void *caller_pc, const void *s1,
598 const void *s2, size_t n, int result) {
morehousec6ee8752018-07-17 16:12:00 +0000599 if (!fuzzer::RunningUserCallback) return;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000600 if (result == 0) return; // No reason to mutate.
601 if (n <= 1) return; // Not interesting.
602 fuzzer::TPC.AddValueForMemcmp(caller_pc, s1, s2, n, /*StopAtZero*/false);
603}
604
605ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
606void __sanitizer_weak_hook_strncmp(void *caller_pc, const char *s1,
607 const char *s2, size_t n, int result) {
morehousec6ee8752018-07-17 16:12:00 +0000608 if (!fuzzer::RunningUserCallback) return;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000609 if (result == 0) return; // No reason to mutate.
610 size_t Len1 = fuzzer::InternalStrnlen(s1, n);
611 size_t Len2 = fuzzer::InternalStrnlen(s2, n);
612 n = std::min(n, Len1);
613 n = std::min(n, Len2);
614 if (n <= 1) return; // Not interesting.
615 fuzzer::TPC.AddValueForMemcmp(caller_pc, s1, s2, n, /*StopAtZero*/true);
616}
617
618ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
619void __sanitizer_weak_hook_strcmp(void *caller_pc, const char *s1,
dor1se6729cb2018-07-16 15:15:34 +0000620 const char *s2, int result) {
morehousec6ee8752018-07-17 16:12:00 +0000621 if (!fuzzer::RunningUserCallback) return;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000622 if (result == 0) return; // No reason to mutate.
623 size_t N = fuzzer::InternalStrnlen2(s1, s2);
624 if (N <= 1) return; // Not interesting.
625 fuzzer::TPC.AddValueForMemcmp(caller_pc, s1, s2, N, /*StopAtZero*/true);
626}
627
628ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
629void __sanitizer_weak_hook_strncasecmp(void *called_pc, const char *s1,
630 const char *s2, size_t n, int result) {
morehousec6ee8752018-07-17 16:12:00 +0000631 if (!fuzzer::RunningUserCallback) return;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000632 return __sanitizer_weak_hook_strncmp(called_pc, s1, s2, n, result);
633}
634
635ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
636void __sanitizer_weak_hook_strcasecmp(void *called_pc, const char *s1,
637 const char *s2, int result) {
morehousec6ee8752018-07-17 16:12:00 +0000638 if (!fuzzer::RunningUserCallback) return;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000639 return __sanitizer_weak_hook_strcmp(called_pc, s1, s2, result);
640}
641
642ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
643void __sanitizer_weak_hook_strstr(void *called_pc, const char *s1,
644 const char *s2, char *result) {
morehousec6ee8752018-07-17 16:12:00 +0000645 if (!fuzzer::RunningUserCallback) return;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000646 fuzzer::TPC.MMT.Add(reinterpret_cast<const uint8_t *>(s2), strlen(s2));
647}
648
649ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
650void __sanitizer_weak_hook_strcasestr(void *called_pc, const char *s1,
651 const char *s2, char *result) {
morehousec6ee8752018-07-17 16:12:00 +0000652 if (!fuzzer::RunningUserCallback) return;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000653 fuzzer::TPC.MMT.Add(reinterpret_cast<const uint8_t *>(s2), strlen(s2));
654}
655
656ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
657void __sanitizer_weak_hook_memmem(void *called_pc, const void *s1, size_t len1,
658 const void *s2, size_t len2, void *result) {
morehousec6ee8752018-07-17 16:12:00 +0000659 if (!fuzzer::RunningUserCallback) return;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000660 fuzzer::TPC.MMT.Add(reinterpret_cast<const uint8_t *>(s2), len2);
661}
662} // extern "C"