blob: b37989dbda857ff726bb9b2e2ebe0a7238e2ed38 [file] [log] [blame]
george.karpenkov29efa6d2017-08-21 23:25:50 +00001//===- FuzzerTracePC.h - Internal header for the Fuzzer ---------*- C++ -* ===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9// fuzzer::TracePC
10//===----------------------------------------------------------------------===//
11
12#ifndef LLVM_FUZZER_TRACE_PC
13#define LLVM_FUZZER_TRACE_PC
14
15#include "FuzzerDefs.h"
16#include "FuzzerDictionary.h"
17#include "FuzzerValueBitMap.h"
18
19#include <set>
kccb3080d02018-07-19 22:00:48 +000020#include <unordered_map>
george.karpenkov29efa6d2017-08-21 23:25:50 +000021
22namespace fuzzer {
23
24// TableOfRecentCompares (TORC) remembers the most recently performed
25// comparisons of type T.
26// We record the arguments of CMP instructions in this table unconditionally
27// because it seems cheaper this way than to compute some expensive
28// conditions inside __sanitizer_cov_trace_cmp*.
29// After the unit has been executed we may decide to use the contents of
30// this table to populate a Dictionary.
31template<class T, size_t kSizeT>
32struct TableOfRecentCompares {
33 static const size_t kSize = kSizeT;
34 struct Pair {
35 T A, B;
36 };
37 ATTRIBUTE_NO_SANITIZE_ALL
38 void Insert(size_t Idx, const T &Arg1, const T &Arg2) {
39 Idx = Idx % kSize;
40 Table[Idx].A = Arg1;
41 Table[Idx].B = Arg2;
42 }
43
44 Pair Get(size_t I) { return Table[I % kSize]; }
45
46 Pair Table[kSize];
47};
48
49template <size_t kSizeT>
50struct MemMemTable {
51 static const size_t kSize = kSizeT;
52 Word MemMemWords[kSize];
53 Word EmptyWord;
54
55 void Add(const uint8_t *Data, size_t Size) {
56 if (Size <= 2) return;
57 Size = std::min(Size, Word::GetMaxSize());
58 size_t Idx = SimpleFastHash(Data, Size) % kSize;
59 MemMemWords[Idx].Set(Data, Size);
60 }
61 const Word &Get(size_t Idx) {
62 for (size_t i = 0; i < kSize; i++) {
63 const Word &W = MemMemWords[(Idx + i) % kSize];
64 if (W.size()) return W;
65 }
66 EmptyWord.Set(nullptr, 0);
67 return EmptyWord;
68 }
69};
70
71class TracePC {
dor1se6729cb2018-07-16 15:15:34 +000072 public:
george.karpenkov29efa6d2017-08-21 23:25:50 +000073 static const size_t kNumPCs = 1 << 21;
74 // How many bits of PC are used from __sanitizer_cov_trace_pc.
75 static const size_t kTracePcBits = 18;
76
77 void HandleInit(uint32_t *Start, uint32_t *Stop);
78 void HandleInline8bitCountersInit(uint8_t *Start, uint8_t *Stop);
kcc98957a12017-08-25 19:29:47 +000079 void HandlePCsInit(const uintptr_t *Start, const uintptr_t *Stop);
george.karpenkov29efa6d2017-08-21 23:25:50 +000080 void HandleCallerCallee(uintptr_t Caller, uintptr_t Callee);
81 template <class T> void HandleCmp(uintptr_t PC, T Arg1, T Arg2);
82 size_t GetTotalPCCoverage();
83 void SetUseCounters(bool UC) { UseCounters = UC; }
kcc3850d062018-07-03 22:33:09 +000084 void SetUseValueProfileMask(uint32_t VPMask) { UseValueProfileMask = VPMask; }
george.karpenkov29efa6d2017-08-21 23:25:50 +000085 void SetPrintNewPCs(bool P) { DoPrintNewPCs = P; }
kccec9da662017-08-28 22:52:22 +000086 void SetPrintNewFuncs(size_t P) { NumPrintNewFuncs = P; }
george.karpenkov29efa6d2017-08-21 23:25:50 +000087 void UpdateObservedPCs();
88 template <class Callback> void CollectFeatures(Callback CB) const;
89
90 void ResetMaps() {
91 ValueProfileMap.Reset();
92 if (NumModules)
93 memset(Counters(), 0, GetNumPCs());
94 ClearExtraCounters();
95 ClearInlineCounters();
96 }
97
98 void ClearInlineCounters();
99
100 void UpdateFeatureSet(size_t CurrentElementIdx, size_t CurrentElementSize);
101 void PrintFeatureSet();
102
103 void PrintModuleInfo();
104
105 void PrintCoverage();
kcca7dd2a92018-05-21 19:47:00 +0000106 void DumpCoverage();
dor1sbb933292018-07-16 16:01:31 +0000107 void PrintUnstableStats();
george.karpenkov29efa6d2017-08-21 23:25:50 +0000108
kcc85cad3d2018-05-11 01:17:52 +0000109 template<class CallBack>
110 void IterateCoveredFunctions(CallBack CB);
111
george.karpenkov29efa6d2017-08-21 23:25:50 +0000112 void AddValueForMemcmp(void *caller_pc, const void *s1, const void *s2,
113 size_t n, bool StopAtZero);
114
115 TableOfRecentCompares<uint32_t, 32> TORC4;
116 TableOfRecentCompares<uint64_t, 32> TORC8;
117 TableOfRecentCompares<Word, 32> TORCW;
118 MemMemTable<1024> MMT;
119
120 size_t GetNumPCs() const {
121 return NumGuards == 0 ? (1 << kTracePcBits) : Min(kNumPCs, NumGuards + 1);
122 }
123 uintptr_t GetPC(size_t Idx) {
124 assert(Idx < GetNumPCs());
125 return PCs()[Idx];
126 }
127
kcc1c0379f2017-08-22 01:28:32 +0000128 void RecordInitialStack();
129 uintptr_t GetMaxStackOffset() const;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000130
131 template<class CallBack>
132 void ForEachObservedPC(CallBack CB) {
133 for (auto PC : ObservedPCs)
134 CB(PC);
135 }
136
kcc3acbe072018-05-16 23:26:37 +0000137 void SetFocusFunction(const std::string &FuncName);
138 bool ObservedFocusFunction();
139
dor1sbb933292018-07-16 16:01:31 +0000140 void InitializeUnstableCounters();
141 void UpdateUnstableCounters();
142
george.karpenkov29efa6d2017-08-21 23:25:50 +0000143private:
dor1sbb933292018-07-16 16:01:31 +0000144 // Value used to represent unstable edge.
145 static constexpr int16_t kUnstableCounter = -1;
146
147 // Uses 16-bit signed type to be able to accommodate any possible value from
148 // uint8_t counter and -1 constant as well.
149 int16_t UnstableCounters[kNumPCs];
150
george.karpenkov29efa6d2017-08-21 23:25:50 +0000151 bool UseCounters = false;
kcc3850d062018-07-03 22:33:09 +0000152 uint32_t UseValueProfileMask = false;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000153 bool DoPrintNewPCs = false;
kccec9da662017-08-28 22:52:22 +0000154 size_t NumPrintNewFuncs = 0;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000155
156 struct Module {
157 uint32_t *Start, *Stop;
158 };
159
160 Module Modules[4096];
161 size_t NumModules; // linker-initialized.
162 size_t NumGuards; // linker-initialized.
163
164 struct { uint8_t *Start, *Stop; } ModuleCounters[4096];
165 size_t NumModulesWithInline8bitCounters; // linker-initialized.
166 size_t NumInline8bitCounters;
167
kcc98957a12017-08-25 19:29:47 +0000168 struct PCTableEntry {
169 uintptr_t PC, PCFlags;
170 };
171
172 struct { const PCTableEntry *Start, *Stop; } ModulePCTable[4096];
george.karpenkov29efa6d2017-08-21 23:25:50 +0000173 size_t NumPCTables;
174 size_t NumPCsInPCTables;
175
176 uint8_t *Counters() const;
177 uintptr_t *PCs() const;
178
george.karpenkovfbfa45c2017-08-27 23:20:09 +0000179 Set<uintptr_t> ObservedPCs;
kccb3080d02018-07-19 22:00:48 +0000180 std::unordered_map<uintptr_t, uintptr_t> ObservedFuncs; // PC => Counter.
george.karpenkov29efa6d2017-08-21 23:25:50 +0000181
dor1sd7197f42018-07-18 17:03:27 +0000182 template <class Callback>
183 void IterateInline8bitCounters(Callback CB) const;
184
kcc3acbe072018-05-16 23:26:37 +0000185 std::pair<size_t, size_t> FocusFunction = {-1, -1}; // Module and PC IDs.
186
george.karpenkov29efa6d2017-08-21 23:25:50 +0000187 ValueBitMap ValueProfileMap;
kcc1c0379f2017-08-22 01:28:32 +0000188 uintptr_t InitialStack;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000189};
190
191template <class Callback>
192// void Callback(size_t FirstFeature, size_t Idx, uint8_t Value);
193ATTRIBUTE_NO_SANITIZE_ALL
194void ForEachNonZeroByte(const uint8_t *Begin, const uint8_t *End,
195 size_t FirstFeature, Callback Handle8bitCounter) {
196 typedef uintptr_t LargeType;
197 const size_t Step = sizeof(LargeType) / sizeof(uint8_t);
198 const size_t StepMask = Step - 1;
199 auto P = Begin;
200 // Iterate by 1 byte until either the alignment boundary or the end.
201 for (; reinterpret_cast<uintptr_t>(P) & StepMask && P < End; P++)
202 if (uint8_t V = *P)
203 Handle8bitCounter(FirstFeature, P - Begin, V);
204
205 // Iterate by Step bytes at a time.
206 for (; P < End; P += Step)
207 if (LargeType Bundle = *reinterpret_cast<const LargeType *>(P))
208 for (size_t I = 0; I < Step; I++, Bundle >>= 8)
209 if (uint8_t V = Bundle & 0xff)
210 Handle8bitCounter(FirstFeature, P - Begin + I, V);
211
212 // Iterate by 1 byte until the end.
213 for (; P < End; P++)
214 if (uint8_t V = *P)
215 Handle8bitCounter(FirstFeature, P - Begin, V);
216}
217
delcypher64c03342017-11-28 17:41:58 +0000218// Given a non-zero Counter returns a number in the range [0,7].
kcc1c0379f2017-08-22 01:28:32 +0000219template<class T>
220unsigned CounterToFeature(T Counter) {
dor1se6729cb2018-07-16 15:15:34 +0000221 // Returns a feature number by placing Counters into buckets as illustrated
222 // below.
223 //
224 // Counter bucket: [1] [2] [3] [4-7] [8-15] [16-31] [32-127] [128+]
225 // Feature number: 0 1 2 3 4 5 6 7
226 //
227 // This is a heuristic taken from AFL (see
228 // http://lcamtuf.coredump.cx/afl/technical_details.txt).
229 //
230 // This implementation may change in the future so clients should
231 // not rely on it.
232 assert(Counter);
233 unsigned Bit = 0;
234 /**/ if (Counter >= 128) Bit = 7;
235 else if (Counter >= 32) Bit = 6;
236 else if (Counter >= 16) Bit = 5;
237 else if (Counter >= 8) Bit = 4;
238 else if (Counter >= 4) Bit = 3;
239 else if (Counter >= 3) Bit = 2;
240 else if (Counter >= 2) Bit = 1;
241 return Bit;
kcc1c0379f2017-08-22 01:28:32 +0000242}
243
kccc924e382017-09-15 22:10:36 +0000244template <class Callback> // void Callback(size_t Feature)
kcc1c0379f2017-08-22 01:28:32 +0000245ATTRIBUTE_NO_SANITIZE_ADDRESS
246__attribute__((noinline))
247void TracePC::CollectFeatures(Callback HandleFeature) const {
248 uint8_t *Counters = this->Counters();
249 size_t N = GetNumPCs();
250 auto Handle8bitCounter = [&](size_t FirstFeature,
251 size_t Idx, uint8_t Counter) {
kcc1f5638d2017-12-08 22:21:42 +0000252 if (UseCounters)
253 HandleFeature(FirstFeature + Idx * 8 + CounterToFeature(Counter));
254 else
255 HandleFeature(FirstFeature + Idx);
george.karpenkov29efa6d2017-08-21 23:25:50 +0000256 };
257
258 size_t FirstFeature = 0;
259
260 if (!NumInline8bitCounters) {
261 ForEachNonZeroByte(Counters, Counters + N, FirstFeature, Handle8bitCounter);
262 FirstFeature += N * 8;
263 }
264
265 if (NumInline8bitCounters) {
266 for (size_t i = 0; i < NumModulesWithInline8bitCounters; i++) {
267 ForEachNonZeroByte(ModuleCounters[i].Start, ModuleCounters[i].Stop,
268 FirstFeature, Handle8bitCounter);
269 FirstFeature += 8 * (ModuleCounters[i].Stop - ModuleCounters[i].Start);
270 }
271 }
272
george.karpenkov29efa6d2017-08-21 23:25:50 +0000273 ForEachNonZeroByte(ExtraCountersBegin(), ExtraCountersEnd(), FirstFeature,
274 Handle8bitCounter);
275 FirstFeature += (ExtraCountersEnd() - ExtraCountersBegin()) * 8;
276
kcc3850d062018-07-03 22:33:09 +0000277 if (UseValueProfileMask) {
george.karpenkov29efa6d2017-08-21 23:25:50 +0000278 ValueProfileMap.ForEach([&](size_t Idx) {
279 HandleFeature(FirstFeature + Idx);
280 });
281 FirstFeature += ValueProfileMap.SizeInBits();
282 }
283
kccd804ddb2017-12-09 19:18:10 +0000284 // Step function, grows similar to 8 * Log_2(A).
285 auto StackDepthStepFunction = [](uint32_t A) -> uint32_t {
dor1sa66e7762017-12-20 19:31:51 +0000286 if (!A) return A;
kcce29d7e32017-12-12 23:11:28 +0000287 uint32_t Log2 = Log(A);
kccd804ddb2017-12-09 19:18:10 +0000288 if (Log2 < 3) return A;
289 Log2 -= 3;
290 return (Log2 + 1) * 8 + ((A >> Log2) & 7);
291 };
292 assert(StackDepthStepFunction(1024) == 64);
293 assert(StackDepthStepFunction(1024 * 4) == 80);
294 assert(StackDepthStepFunction(1024 * 1024) == 144);
295
george.karpenkov29efa6d2017-08-21 23:25:50 +0000296 if (auto MaxStackOffset = GetMaxStackOffset())
kccd804ddb2017-12-09 19:18:10 +0000297 HandleFeature(FirstFeature + StackDepthStepFunction(MaxStackOffset / 8));
george.karpenkov29efa6d2017-08-21 23:25:50 +0000298}
299
300extern TracePC TPC;
301
302} // namespace fuzzer
303
304#endif // LLVM_FUZZER_TRACE_PC