blob: ebd33d3ec886fc0db6bc72c3d6e4c625684723a4 [file] [log] [blame]
george.karpenkov29efa6d2017-08-21 23:25:50 +00001//===- FuzzerTracePC.cpp - PC tracing--------------------------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9// Trace PCs.
10// This module implements __sanitizer_cov_trace_pc_guard[_init],
11// the callback required for -fsanitize-coverage=trace-pc-guard instrumentation.
12//
13//===----------------------------------------------------------------------===//
14
15#include "FuzzerTracePC.h"
16#include "FuzzerCorpus.h"
17#include "FuzzerDefs.h"
18#include "FuzzerDictionary.h"
19#include "FuzzerExtFunctions.h"
20#include "FuzzerIO.h"
21#include "FuzzerUtil.h"
22#include "FuzzerValueBitMap.h"
23#include <set>
24
25// The coverage counters and PCs.
26// These are declared as global variables named "__sancov_*" to simplify
27// experiments with inlined instrumentation.
28alignas(64) ATTRIBUTE_INTERFACE
29uint8_t __sancov_trace_pc_guard_8bit_counters[fuzzer::TracePC::kNumPCs];
30
31ATTRIBUTE_INTERFACE
32uintptr_t __sancov_trace_pc_pcs[fuzzer::TracePC::kNumPCs];
33
kcc1c0379f2017-08-22 01:28:32 +000034// Used by -fsanitize-coverage=stack-depth to track stack depth
35ATTRIBUTE_INTERFACE thread_local uintptr_t __sancov_lowest_stack;
36
george.karpenkov29efa6d2017-08-21 23:25:50 +000037namespace fuzzer {
38
39TracePC TPC;
40
41int ScopedDoingMyOwnMemOrStr::DoingMyOwnMemOrStr;
42
43uint8_t *TracePC::Counters() const {
44 return __sancov_trace_pc_guard_8bit_counters;
45}
46
47uintptr_t *TracePC::PCs() const {
48 return __sancov_trace_pc_pcs;
49}
50
51size_t TracePC::GetTotalPCCoverage() {
52 if (ObservedPCs.size())
53 return ObservedPCs.size();
54 size_t Res = 0;
55 for (size_t i = 1, N = GetNumPCs(); i < N; i++)
56 if (PCs()[i])
57 Res++;
58 return Res;
59}
60
61
62void TracePC::HandleInline8bitCountersInit(uint8_t *Start, uint8_t *Stop) {
63 if (Start == Stop) return;
64 if (NumModulesWithInline8bitCounters &&
65 ModuleCounters[NumModulesWithInline8bitCounters-1].Start == Start) return;
66 assert(NumModulesWithInline8bitCounters <
67 sizeof(ModuleCounters) / sizeof(ModuleCounters[0]));
68 ModuleCounters[NumModulesWithInline8bitCounters++] = {Start, Stop};
69 NumInline8bitCounters += Stop - Start;
70}
71
72void TracePC::HandlePCsInit(const uint8_t *Start, const uint8_t *Stop) {
73 const uintptr_t *B = reinterpret_cast<const uintptr_t *>(Start);
74 const uintptr_t *E = reinterpret_cast<const uintptr_t *>(Stop);
75 if (NumPCTables && ModulePCTable[NumPCTables - 1].Start == B) return;
76 assert(NumPCTables < sizeof(ModulePCTable) / sizeof(ModulePCTable[0]));
77 ModulePCTable[NumPCTables++] = {B, E};
78 NumPCsInPCTables += E - B;
79}
80
81void TracePC::HandleInit(uint32_t *Start, uint32_t *Stop) {
82 if (Start == Stop || *Start) return;
83 assert(NumModules < sizeof(Modules) / sizeof(Modules[0]));
84 for (uint32_t *P = Start; P < Stop; P++) {
85 NumGuards++;
86 if (NumGuards == kNumPCs) {
87 RawPrint(
88 "WARNING: The binary has too many instrumented PCs.\n"
89 " You may want to reduce the size of the binary\n"
90 " for more efficient fuzzing and precise coverage data\n");
91 }
92 *P = NumGuards % kNumPCs;
93 }
94 Modules[NumModules].Start = Start;
95 Modules[NumModules].Stop = Stop;
96 NumModules++;
97}
98
99void TracePC::PrintModuleInfo() {
100 if (NumGuards) {
101 Printf("INFO: Loaded %zd modules (%zd guards): ", NumModules, NumGuards);
102 for (size_t i = 0; i < NumModules; i++)
103 Printf("%zd [%p, %p), ", Modules[i].Stop - Modules[i].Start,
104 Modules[i].Start, Modules[i].Stop);
105 Printf("\n");
106 }
107 if (NumModulesWithInline8bitCounters) {
108 Printf("INFO: Loaded %zd modules (%zd inline 8-bit counters): ",
109 NumModulesWithInline8bitCounters, NumInline8bitCounters);
110 for (size_t i = 0; i < NumModulesWithInline8bitCounters; i++)
111 Printf("%zd [%p, %p), ", ModuleCounters[i].Stop - ModuleCounters[i].Start,
112 ModuleCounters[i].Start, ModuleCounters[i].Stop);
113 Printf("\n");
114 }
115 if (NumPCTables) {
116 Printf("INFO: Loaded %zd PC tables (%zd PCs): ", NumPCTables,
117 NumPCsInPCTables);
118 for (size_t i = 0; i < NumPCTables; i++) {
119 Printf("%zd [%p,%p), ", ModulePCTable[i].Stop - ModulePCTable[i].Start,
120 ModulePCTable[i].Start, ModulePCTable[i].Stop);
121 }
122 Printf("\n");
123
124 if ((NumGuards && NumGuards != NumPCsInPCTables) ||
125 (NumInline8bitCounters && NumInline8bitCounters != NumPCsInPCTables)) {
126 Printf("ERROR: The size of coverage PC tables does not match the"
127 " number of instrumented PCs. This might be a bug in the compiler,"
128 " please contact the libFuzzer developers.\n");
129 _Exit(1);
130 }
131 }
kcc1c0379f2017-08-22 01:28:32 +0000132 if (size_t NumClangCounters = ClangCountersEnd() - ClangCountersBegin())
133 Printf("INFO: %zd Clang Coverage Counters\n", NumClangCounters);
george.karpenkov29efa6d2017-08-21 23:25:50 +0000134}
135
136ATTRIBUTE_NO_SANITIZE_ALL
137void TracePC::HandleCallerCallee(uintptr_t Caller, uintptr_t Callee) {
138 const uintptr_t kBits = 12;
139 const uintptr_t kMask = (1 << kBits) - 1;
140 uintptr_t Idx = (Caller & kMask) | ((Callee & kMask) << kBits);
141 ValueProfileMap.AddValueModPrime(Idx);
142}
143
144void TracePC::UpdateObservedPCs() {
kcc1c0379f2017-08-22 01:28:32 +0000145 auto Observe = [&](uintptr_t PC) {
146 bool Inserted = ObservedPCs.insert(PC).second;
147 if (Inserted && DoPrintNewPCs)
148 PrintPC("\tNEW_PC: %p %F %L\n", "\tNEW_PC: %p\n", PC + 1);
149 };
george.karpenkov29efa6d2017-08-21 23:25:50 +0000150 if (NumPCsInPCTables) {
george.karpenkov29efa6d2017-08-21 23:25:50 +0000151 if (NumInline8bitCounters == NumPCsInPCTables) {
152 for (size_t i = 0; i < NumModulesWithInline8bitCounters; i++) {
153 uint8_t *Beg = ModuleCounters[i].Start;
154 size_t Size = ModuleCounters[i].Stop - Beg;
155 assert(Size ==
156 (size_t)(ModulePCTable[i].Stop - ModulePCTable[i].Start));
157 for (size_t j = 0; j < Size; j++)
158 if (Beg[j])
159 Observe(ModulePCTable[i].Start[j]);
160 }
161 } else if (NumGuards == NumPCsInPCTables) {
162 size_t GuardIdx = 1;
163 for (size_t i = 0; i < NumModules; i++) {
164 uint32_t *Beg = Modules[i].Start;
165 size_t Size = Modules[i].Stop - Beg;
166 assert(Size ==
167 (size_t)(ModulePCTable[i].Stop - ModulePCTable[i].Start));
168 for (size_t j = 0; j < Size; j++, GuardIdx++)
169 if (Counters()[GuardIdx])
170 Observe(ModulePCTable[i].Start[j]);
171 }
172 }
173 }
kcc1c0379f2017-08-22 01:28:32 +0000174 if (size_t NumClangCounters =
175 ClangCountersEnd() - ClangCountersBegin()) {
176 auto P = ClangCountersBegin();
177 for (size_t Idx = 0; Idx < NumClangCounters; Idx++)
178 if (P[Idx])
179 Observe((uintptr_t)Idx);
180 }
george.karpenkov29efa6d2017-08-21 23:25:50 +0000181}
182
183inline ALWAYS_INLINE uintptr_t GetPreviousInstructionPc(uintptr_t PC) {
184 // TODO: this implementation is x86 only.
185 // see sanitizer_common GetPreviousInstructionPc for full implementation.
186 return PC - 1;
187}
188
189inline ALWAYS_INLINE uintptr_t GetNextInstructionPc(uintptr_t PC) {
190 // TODO: this implementation is x86 only.
191 // see sanitizer_common GetPreviousInstructionPc for full implementation.
192 return PC + 1;
193}
194
195static std::string GetModuleName(uintptr_t PC) {
196 char ModulePathRaw[4096] = ""; // What's PATH_MAX in portable C++?
197 void *OffsetRaw = nullptr;
198 if (!EF->__sanitizer_get_module_and_offset_for_pc(
199 reinterpret_cast<void *>(PC), ModulePathRaw,
200 sizeof(ModulePathRaw), &OffsetRaw))
201 return "";
202 return ModulePathRaw;
203}
204
205void TracePC::PrintCoverage() {
206 if (!EF->__sanitizer_symbolize_pc ||
207 !EF->__sanitizer_get_module_and_offset_for_pc) {
208 Printf("INFO: __sanitizer_symbolize_pc or "
209 "__sanitizer_get_module_and_offset_for_pc is not available,"
210 " not printing coverage\n");
211 return;
212 }
213 Printf("COVERAGE:\n");
214 std::string LastFunctionName = "";
215 std::string LastFileStr = "";
216 std::set<size_t> UncoveredLines;
217 std::set<size_t> CoveredLines;
218
219 auto FunctionEndCallback = [&](const std::string &CurrentFunc,
220 const std::string &CurrentFile) {
221 if (LastFunctionName != CurrentFunc) {
222 if (CoveredLines.empty() && !UncoveredLines.empty()) {
223 Printf("UNCOVERED_FUNC: %s\n", LastFunctionName.c_str());
224 } else {
225 for (auto Line : UncoveredLines) {
226 if (!CoveredLines.count(Line))
227 Printf("UNCOVERED_LINE: %s %s:%zd\n", LastFunctionName.c_str(),
228 LastFileStr.c_str(), Line);
229 }
230 }
231
232 UncoveredLines.clear();
233 CoveredLines.clear();
234 LastFunctionName = CurrentFunc;
235 LastFileStr = CurrentFile;
236 }
237 };
238
239 for (size_t i = 0; i < NumPCTables; i++) {
240 auto &M = ModulePCTable[i];
241 assert(M.Start < M.Stop);
242 auto ModuleName = GetModuleName(*M.Start);
243 for (auto Ptr = M.Start; Ptr < M.Stop; Ptr++) {
244 auto PC = *Ptr;
245 auto VisualizePC = GetNextInstructionPc(PC);
246 bool IsObserved = ObservedPCs.count(PC);
247 std::string FileStr = DescribePC("%s", VisualizePC);
248 if (!IsInterestingCoverageFile(FileStr)) continue;
249 std::string FunctionStr = DescribePC("%F", VisualizePC);
250 FunctionEndCallback(FunctionStr, FileStr);
251 std::string LineStr = DescribePC("%l", VisualizePC);
252 size_t Line = std::stoul(LineStr);
253 if (IsObserved && CoveredLines.insert(Line).second)
254 Printf("COVERED: %s %s:%zd\n", FunctionStr.c_str(), FileStr.c_str(),
255 Line);
256 else
257 UncoveredLines.insert(Line);
258 }
259 }
260 FunctionEndCallback("", "");
261}
262
263void TracePC::DumpCoverage() {
264 if (EF->__sanitizer_dump_coverage) {
265 std::vector<uintptr_t> PCsCopy(GetNumPCs());
266 for (size_t i = 0; i < GetNumPCs(); i++)
267 PCsCopy[i] = PCs()[i] ? GetPreviousInstructionPc(PCs()[i]) : 0;
268 EF->__sanitizer_dump_coverage(PCsCopy.data(), PCsCopy.size());
269 }
270}
271
272// Value profile.
273// We keep track of various values that affect control flow.
274// These values are inserted into a bit-set-based hash map.
275// Every new bit in the map is treated as a new coverage.
276//
277// For memcmp/strcmp/etc the interesting value is the length of the common
278// prefix of the parameters.
279// For cmp instructions the interesting value is a XOR of the parameters.
280// The interesting value is mixed up with the PC and is then added to the map.
281
282ATTRIBUTE_NO_SANITIZE_ALL
283void TracePC::AddValueForMemcmp(void *caller_pc, const void *s1, const void *s2,
284 size_t n, bool StopAtZero) {
285 if (!n) return;
286 size_t Len = std::min(n, Word::GetMaxSize());
287 const uint8_t *A1 = reinterpret_cast<const uint8_t *>(s1);
288 const uint8_t *A2 = reinterpret_cast<const uint8_t *>(s2);
289 uint8_t B1[Word::kMaxSize];
290 uint8_t B2[Word::kMaxSize];
291 // Copy the data into locals in this non-msan-instrumented function
292 // to avoid msan complaining further.
293 size_t Hash = 0; // Compute some simple hash of both strings.
294 for (size_t i = 0; i < Len; i++) {
295 B1[i] = A1[i];
296 B2[i] = A2[i];
297 size_t T = B1[i];
298 Hash ^= (T << 8) | B2[i];
299 }
300 size_t I = 0;
301 for (; I < Len; I++)
302 if (B1[I] != B2[I] || (StopAtZero && B1[I] == 0))
303 break;
304 size_t PC = reinterpret_cast<size_t>(caller_pc);
305 size_t Idx = (PC & 4095) | (I << 12);
306 ValueProfileMap.AddValue(Idx);
307 TORCW.Insert(Idx ^ Hash, Word(B1, Len), Word(B2, Len));
308}
309
310template <class T>
311ATTRIBUTE_TARGET_POPCNT ALWAYS_INLINE
312ATTRIBUTE_NO_SANITIZE_ALL
313void TracePC::HandleCmp(uintptr_t PC, T Arg1, T Arg2) {
314 uint64_t ArgXor = Arg1 ^ Arg2;
315 uint64_t ArgDistance = __builtin_popcountll(ArgXor) + 1; // [1,65]
316 uintptr_t Idx = ((PC & 4095) + 1) * ArgDistance;
317 if (sizeof(T) == 4)
318 TORC4.Insert(ArgXor, Arg1, Arg2);
319 else if (sizeof(T) == 8)
320 TORC8.Insert(ArgXor, Arg1, Arg2);
321 ValueProfileMap.AddValue(Idx);
322}
323
324static size_t InternalStrnlen(const char *S, size_t MaxLen) {
325 size_t Len = 0;
326 for (; Len < MaxLen && S[Len]; Len++) {}
327 return Len;
328}
329
330// Finds min of (strlen(S1), strlen(S2)).
331// Needed bacause one of these strings may actually be non-zero terminated.
332static size_t InternalStrnlen2(const char *S1, const char *S2) {
333 size_t Len = 0;
334 for (; S1[Len] && S2[Len]; Len++) {}
335 return Len;
336}
337
338void TracePC::ClearInlineCounters() {
339 for (size_t i = 0; i < NumModulesWithInline8bitCounters; i++) {
340 uint8_t *Beg = ModuleCounters[i].Start;
341 size_t Size = ModuleCounters[i].Stop - Beg;
342 memset(Beg, 0, Size);
343 }
344}
345
kcc1c0379f2017-08-22 01:28:32 +0000346void TracePC::RecordInitialStack() {
347 InitialStack = __sancov_lowest_stack;
348}
349
350uintptr_t TracePC::GetMaxStackOffset() const {
351 return InitialStack - __sancov_lowest_stack; // Stack grows down
352}
353
george.karpenkov29efa6d2017-08-21 23:25:50 +0000354} // namespace fuzzer
355
356extern "C" {
357ATTRIBUTE_INTERFACE
358ATTRIBUTE_NO_SANITIZE_ALL
359void __sanitizer_cov_trace_pc_guard(uint32_t *Guard) {
360 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
361 uint32_t Idx = *Guard;
362 __sancov_trace_pc_pcs[Idx] = PC;
363 __sancov_trace_pc_guard_8bit_counters[Idx]++;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000364}
365
366// Best-effort support for -fsanitize-coverage=trace-pc, which is available
367// in both Clang and GCC.
368ATTRIBUTE_INTERFACE
369ATTRIBUTE_NO_SANITIZE_ALL
370void __sanitizer_cov_trace_pc() {
371 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
372 uintptr_t Idx = PC & (((uintptr_t)1 << fuzzer::TracePC::kTracePcBits) - 1);
373 __sancov_trace_pc_pcs[Idx] = PC;
374 __sancov_trace_pc_guard_8bit_counters[Idx]++;
375}
376
377ATTRIBUTE_INTERFACE
378void __sanitizer_cov_trace_pc_guard_init(uint32_t *Start, uint32_t *Stop) {
379 fuzzer::TPC.HandleInit(Start, Stop);
380}
381
382ATTRIBUTE_INTERFACE
383void __sanitizer_cov_8bit_counters_init(uint8_t *Start, uint8_t *Stop) {
384 fuzzer::TPC.HandleInline8bitCountersInit(Start, Stop);
385}
386
387ATTRIBUTE_INTERFACE
388void __sanitizer_cov_pcs_init(const uint8_t *pcs_beg, const uint8_t *pcs_end) {
389 fuzzer::TPC.HandlePCsInit(pcs_beg, pcs_end);
390}
391
392ATTRIBUTE_INTERFACE
393ATTRIBUTE_NO_SANITIZE_ALL
394void __sanitizer_cov_trace_pc_indir(uintptr_t Callee) {
395 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
396 fuzzer::TPC.HandleCallerCallee(PC, Callee);
397}
398
399ATTRIBUTE_INTERFACE
400ATTRIBUTE_NO_SANITIZE_ALL
401ATTRIBUTE_TARGET_POPCNT
402void __sanitizer_cov_trace_cmp8(uint64_t Arg1, uint64_t Arg2) {
403 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
404 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
405}
406
407ATTRIBUTE_INTERFACE
408ATTRIBUTE_NO_SANITIZE_ALL
409ATTRIBUTE_TARGET_POPCNT
410// Now the __sanitizer_cov_trace_const_cmp[1248] callbacks just mimic
411// the behaviour of __sanitizer_cov_trace_cmp[1248] ones. This, however,
412// should be changed later to make full use of instrumentation.
413void __sanitizer_cov_trace_const_cmp8(uint64_t Arg1, uint64_t Arg2) {
414 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
415 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
416}
417
418ATTRIBUTE_INTERFACE
419ATTRIBUTE_NO_SANITIZE_ALL
420ATTRIBUTE_TARGET_POPCNT
421void __sanitizer_cov_trace_cmp4(uint32_t Arg1, uint32_t Arg2) {
422 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
423 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
424}
425
426ATTRIBUTE_INTERFACE
427ATTRIBUTE_NO_SANITIZE_ALL
428ATTRIBUTE_TARGET_POPCNT
429void __sanitizer_cov_trace_const_cmp4(uint32_t Arg1, uint32_t Arg2) {
430 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
431 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
432}
433
434ATTRIBUTE_INTERFACE
435ATTRIBUTE_NO_SANITIZE_ALL
436ATTRIBUTE_TARGET_POPCNT
437void __sanitizer_cov_trace_cmp2(uint16_t Arg1, uint16_t Arg2) {
438 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
439 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
440}
441
442ATTRIBUTE_INTERFACE
443ATTRIBUTE_NO_SANITIZE_ALL
444ATTRIBUTE_TARGET_POPCNT
445void __sanitizer_cov_trace_const_cmp2(uint16_t Arg1, uint16_t Arg2) {
446 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
447 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
448}
449
450ATTRIBUTE_INTERFACE
451ATTRIBUTE_NO_SANITIZE_ALL
452ATTRIBUTE_TARGET_POPCNT
453void __sanitizer_cov_trace_cmp1(uint8_t Arg1, uint8_t Arg2) {
454 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
455 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
456}
457
458ATTRIBUTE_INTERFACE
459ATTRIBUTE_NO_SANITIZE_ALL
460ATTRIBUTE_TARGET_POPCNT
461void __sanitizer_cov_trace_const_cmp1(uint8_t Arg1, uint8_t Arg2) {
462 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
463 fuzzer::TPC.HandleCmp(PC, Arg1, Arg2);
464}
465
466ATTRIBUTE_INTERFACE
467ATTRIBUTE_NO_SANITIZE_ALL
468ATTRIBUTE_TARGET_POPCNT
469void __sanitizer_cov_trace_switch(uint64_t Val, uint64_t *Cases) {
470 uint64_t N = Cases[0];
471 uint64_t ValSizeInBits = Cases[1];
472 uint64_t *Vals = Cases + 2;
473 // Skip the most common and the most boring case.
474 if (Vals[N - 1] < 256 && Val < 256)
475 return;
476 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
477 size_t i;
478 uint64_t Token = 0;
479 for (i = 0; i < N; i++) {
480 Token = Val ^ Vals[i];
481 if (Val < Vals[i])
482 break;
483 }
484
485 if (ValSizeInBits == 16)
486 fuzzer::TPC.HandleCmp(PC + i, static_cast<uint16_t>(Token), (uint16_t)(0));
487 else if (ValSizeInBits == 32)
488 fuzzer::TPC.HandleCmp(PC + i, static_cast<uint32_t>(Token), (uint32_t)(0));
489 else
490 fuzzer::TPC.HandleCmp(PC + i, Token, (uint64_t)(0));
491}
492
493ATTRIBUTE_INTERFACE
494ATTRIBUTE_NO_SANITIZE_ALL
495ATTRIBUTE_TARGET_POPCNT
496void __sanitizer_cov_trace_div4(uint32_t Val) {
497 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
498 fuzzer::TPC.HandleCmp(PC, Val, (uint32_t)0);
499}
500
501ATTRIBUTE_INTERFACE
502ATTRIBUTE_NO_SANITIZE_ALL
503ATTRIBUTE_TARGET_POPCNT
504void __sanitizer_cov_trace_div8(uint64_t Val) {
505 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
506 fuzzer::TPC.HandleCmp(PC, Val, (uint64_t)0);
507}
508
509ATTRIBUTE_INTERFACE
510ATTRIBUTE_NO_SANITIZE_ALL
511ATTRIBUTE_TARGET_POPCNT
512void __sanitizer_cov_trace_gep(uintptr_t Idx) {
513 uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
514 fuzzer::TPC.HandleCmp(PC, Idx, (uintptr_t)0);
515}
516
517ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
518void __sanitizer_weak_hook_memcmp(void *caller_pc, const void *s1,
519 const void *s2, size_t n, int result) {
520 if (fuzzer::ScopedDoingMyOwnMemOrStr::DoingMyOwnMemOrStr) return;
521 if (result == 0) return; // No reason to mutate.
522 if (n <= 1) return; // Not interesting.
523 fuzzer::TPC.AddValueForMemcmp(caller_pc, s1, s2, n, /*StopAtZero*/false);
524}
525
526ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
527void __sanitizer_weak_hook_strncmp(void *caller_pc, const char *s1,
528 const char *s2, size_t n, int result) {
529 if (fuzzer::ScopedDoingMyOwnMemOrStr::DoingMyOwnMemOrStr) return;
530 if (result == 0) return; // No reason to mutate.
531 size_t Len1 = fuzzer::InternalStrnlen(s1, n);
532 size_t Len2 = fuzzer::InternalStrnlen(s2, n);
533 n = std::min(n, Len1);
534 n = std::min(n, Len2);
535 if (n <= 1) return; // Not interesting.
536 fuzzer::TPC.AddValueForMemcmp(caller_pc, s1, s2, n, /*StopAtZero*/true);
537}
538
539ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
540void __sanitizer_weak_hook_strcmp(void *caller_pc, const char *s1,
541 const char *s2, int result) {
542 if (fuzzer::ScopedDoingMyOwnMemOrStr::DoingMyOwnMemOrStr) return;
543 if (result == 0) return; // No reason to mutate.
544 size_t N = fuzzer::InternalStrnlen2(s1, s2);
545 if (N <= 1) return; // Not interesting.
546 fuzzer::TPC.AddValueForMemcmp(caller_pc, s1, s2, N, /*StopAtZero*/true);
547}
548
549ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
550void __sanitizer_weak_hook_strncasecmp(void *called_pc, const char *s1,
551 const char *s2, size_t n, int result) {
552 if (fuzzer::ScopedDoingMyOwnMemOrStr::DoingMyOwnMemOrStr) return;
553 return __sanitizer_weak_hook_strncmp(called_pc, s1, s2, n, result);
554}
555
556ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
557void __sanitizer_weak_hook_strcasecmp(void *called_pc, const char *s1,
558 const char *s2, int result) {
559 if (fuzzer::ScopedDoingMyOwnMemOrStr::DoingMyOwnMemOrStr) return;
560 return __sanitizer_weak_hook_strcmp(called_pc, s1, s2, result);
561}
562
563ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
564void __sanitizer_weak_hook_strstr(void *called_pc, const char *s1,
565 const char *s2, char *result) {
566 if (fuzzer::ScopedDoingMyOwnMemOrStr::DoingMyOwnMemOrStr) return;
567 fuzzer::TPC.MMT.Add(reinterpret_cast<const uint8_t *>(s2), strlen(s2));
568}
569
570ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
571void __sanitizer_weak_hook_strcasestr(void *called_pc, const char *s1,
572 const char *s2, char *result) {
573 if (fuzzer::ScopedDoingMyOwnMemOrStr::DoingMyOwnMemOrStr) return;
574 fuzzer::TPC.MMT.Add(reinterpret_cast<const uint8_t *>(s2), strlen(s2));
575}
576
577ATTRIBUTE_INTERFACE ATTRIBUTE_NO_SANITIZE_MEMORY
578void __sanitizer_weak_hook_memmem(void *called_pc, const void *s1, size_t len1,
579 const void *s2, size_t len2, void *result) {
580 if (fuzzer::ScopedDoingMyOwnMemOrStr::DoingMyOwnMemOrStr) return;
581 fuzzer::TPC.MMT.Add(reinterpret_cast<const uint8_t *>(s2), len2);
582}
583} // extern "C"