blob: 4f5ebeb047a16615ae61ca371bd3210a7033c9e3 [file] [log] [blame]
george.karpenkov29efa6d2017-08-21 23:25:50 +00001//===- FuzzerTracePC.h - Internal header for the Fuzzer ---------*- C++ -* ===//
2//
chandlerc40284492019-01-19 08:50:56 +00003// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
george.karpenkov29efa6d2017-08-21 23:25:50 +00006//
7//===----------------------------------------------------------------------===//
8// fuzzer::TracePC
9//===----------------------------------------------------------------------===//
10
11#ifndef LLVM_FUZZER_TRACE_PC
12#define LLVM_FUZZER_TRACE_PC
13
14#include "FuzzerDefs.h"
15#include "FuzzerDictionary.h"
16#include "FuzzerValueBitMap.h"
17
18#include <set>
kccb3080d02018-07-19 22:00:48 +000019#include <unordered_map>
george.karpenkov29efa6d2017-08-21 23:25:50 +000020
21namespace fuzzer {
22
23// TableOfRecentCompares (TORC) remembers the most recently performed
24// comparisons of type T.
25// We record the arguments of CMP instructions in this table unconditionally
26// because it seems cheaper this way than to compute some expensive
27// conditions inside __sanitizer_cov_trace_cmp*.
28// After the unit has been executed we may decide to use the contents of
29// this table to populate a Dictionary.
30template<class T, size_t kSizeT>
31struct TableOfRecentCompares {
32 static const size_t kSize = kSizeT;
33 struct Pair {
34 T A, B;
35 };
36 ATTRIBUTE_NO_SANITIZE_ALL
37 void Insert(size_t Idx, const T &Arg1, const T &Arg2) {
38 Idx = Idx % kSize;
39 Table[Idx].A = Arg1;
40 Table[Idx].B = Arg2;
41 }
42
43 Pair Get(size_t I) { return Table[I % kSize]; }
44
45 Pair Table[kSize];
46};
47
48template <size_t kSizeT>
49struct MemMemTable {
50 static const size_t kSize = kSizeT;
51 Word MemMemWords[kSize];
52 Word EmptyWord;
53
54 void Add(const uint8_t *Data, size_t Size) {
55 if (Size <= 2) return;
56 Size = std::min(Size, Word::GetMaxSize());
57 size_t Idx = SimpleFastHash(Data, Size) % kSize;
58 MemMemWords[Idx].Set(Data, Size);
59 }
60 const Word &Get(size_t Idx) {
61 for (size_t i = 0; i < kSize; i++) {
62 const Word &W = MemMemWords[(Idx + i) % kSize];
63 if (W.size()) return W;
64 }
65 EmptyWord.Set(nullptr, 0);
66 return EmptyWord;
67 }
68};
69
70class TracePC {
dor1se6729cb2018-07-16 15:15:34 +000071 public:
george.karpenkov29efa6d2017-08-21 23:25:50 +000072 void HandleInline8bitCountersInit(uint8_t *Start, uint8_t *Stop);
kcc98957a12017-08-25 19:29:47 +000073 void HandlePCsInit(const uintptr_t *Start, const uintptr_t *Stop);
george.karpenkov29efa6d2017-08-21 23:25:50 +000074 void HandleCallerCallee(uintptr_t Caller, uintptr_t Callee);
75 template <class T> void HandleCmp(uintptr_t PC, T Arg1, T Arg2);
76 size_t GetTotalPCCoverage();
77 void SetUseCounters(bool UC) { UseCounters = UC; }
kcc3850d062018-07-03 22:33:09 +000078 void SetUseValueProfileMask(uint32_t VPMask) { UseValueProfileMask = VPMask; }
george.karpenkov29efa6d2017-08-21 23:25:50 +000079 void SetPrintNewPCs(bool P) { DoPrintNewPCs = P; }
kccec9da662017-08-28 22:52:22 +000080 void SetPrintNewFuncs(size_t P) { NumPrintNewFuncs = P; }
george.karpenkov29efa6d2017-08-21 23:25:50 +000081 void UpdateObservedPCs();
82 template <class Callback> void CollectFeatures(Callback CB) const;
83
84 void ResetMaps() {
85 ValueProfileMap.Reset();
george.karpenkov29efa6d2017-08-21 23:25:50 +000086 ClearExtraCounters();
87 ClearInlineCounters();
88 }
89
90 void ClearInlineCounters();
91
92 void UpdateFeatureSet(size_t CurrentElementIdx, size_t CurrentElementSize);
93 void PrintFeatureSet();
94
95 void PrintModuleInfo();
96
97 void PrintCoverage();
george.karpenkov29efa6d2017-08-21 23:25:50 +000098
kcc85cad3d2018-05-11 01:17:52 +000099 template<class CallBack>
100 void IterateCoveredFunctions(CallBack CB);
101
george.karpenkov29efa6d2017-08-21 23:25:50 +0000102 void AddValueForMemcmp(void *caller_pc, const void *s1, const void *s2,
103 size_t n, bool StopAtZero);
104
105 TableOfRecentCompares<uint32_t, 32> TORC4;
106 TableOfRecentCompares<uint64_t, 32> TORC8;
107 TableOfRecentCompares<Word, 32> TORCW;
108 MemMemTable<1024> MMT;
109
kcc1c0379f2017-08-22 01:28:32 +0000110 void RecordInitialStack();
111 uintptr_t GetMaxStackOffset() const;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000112
113 template<class CallBack>
114 void ForEachObservedPC(CallBack CB) {
115 for (auto PC : ObservedPCs)
116 CB(PC);
117 }
118
kcc3acbe072018-05-16 23:26:37 +0000119 void SetFocusFunction(const std::string &FuncName);
120 bool ObservedFocusFunction();
121
kccda168932019-01-31 00:09:43 +0000122 void ProtectLazyCounters();
123 bool UnprotectLazyCounters(void *CounterPtr);
124
kcc001e5f72019-02-14 23:12:33 +0000125 struct PCTableEntry {
126 uintptr_t PC, PCFlags;
127 };
128
129 uintptr_t PCTableEntryIdx(const PCTableEntry *TE);
kcc9c0ed932019-02-15 01:22:00 +0000130 const PCTableEntry *PCTableEntryByIdx(uintptr_t Idx);
131 static uintptr_t GetNextInstructionPc(uintptr_t PC);
132 bool PcIsFuncEntry(const PCTableEntry *TE) { return TE->PCFlags & 1; }
kcc001e5f72019-02-14 23:12:33 +0000133
george.karpenkov29efa6d2017-08-21 23:25:50 +0000134private:
135 bool UseCounters = false;
kcc3850d062018-07-03 22:33:09 +0000136 uint32_t UseValueProfileMask = false;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000137 bool DoPrintNewPCs = false;
kccec9da662017-08-28 22:52:22 +0000138 size_t NumPrintNewFuncs = 0;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000139
kcce2469202019-01-30 06:15:52 +0000140 // Module represents the array of 8-bit counters split into regions
141 // such that every region, except maybe the first and the last one, is one
142 // full page.
143 struct Module {
144 struct Region {
145 uint8_t *Start, *Stop;
146 bool Enabled;
147 bool OneFullPage;
148 };
149 Region *Regions;
150 size_t NumRegions;
151 uint8_t *Start() { return Regions[0].Start; }
152 uint8_t *Stop() { return Regions[NumRegions - 1].Stop; }
153 size_t Size() { return Stop() - Start(); }
154 size_t Idx(uint8_t *P) {
155 assert(P >= Start() && P < Stop());
156 return P - Start();
157 }
158 };
159
160 Module Modules[4096];
161 size_t NumModules; // linker-initialized.
george.karpenkov29efa6d2017-08-21 23:25:50 +0000162 size_t NumInline8bitCounters;
163
kcce2469202019-01-30 06:15:52 +0000164 template <class Callback>
165 void IterateCounterRegions(Callback CB) {
166 for (size_t m = 0; m < NumModules; m++)
167 for (size_t r = 0; r < Modules[m].NumRegions; r++)
168 CB(Modules[m].Regions[r]);
169 }
170
kcc98957a12017-08-25 19:29:47 +0000171 struct { const PCTableEntry *Start, *Stop; } ModulePCTable[4096];
george.karpenkov29efa6d2017-08-21 23:25:50 +0000172 size_t NumPCTables;
173 size_t NumPCsInPCTables;
174
kcc001e5f72019-02-14 23:12:33 +0000175 Set<const PCTableEntry*> ObservedPCs;
kccb3080d02018-07-19 22:00:48 +0000176 std::unordered_map<uintptr_t, uintptr_t> ObservedFuncs; // PC => Counter.
george.karpenkov29efa6d2017-08-21 23:25:50 +0000177
kcce2469202019-01-30 06:15:52 +0000178 uint8_t *FocusFunctionCounterPtr = nullptr;
kcc3acbe072018-05-16 23:26:37 +0000179
george.karpenkov29efa6d2017-08-21 23:25:50 +0000180 ValueBitMap ValueProfileMap;
kcc1c0379f2017-08-22 01:28:32 +0000181 uintptr_t InitialStack;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000182};
183
184template <class Callback>
185// void Callback(size_t FirstFeature, size_t Idx, uint8_t Value);
186ATTRIBUTE_NO_SANITIZE_ALL
kcce2469202019-01-30 06:15:52 +0000187size_t ForEachNonZeroByte(const uint8_t *Begin, const uint8_t *End,
george.karpenkov29efa6d2017-08-21 23:25:50 +0000188 size_t FirstFeature, Callback Handle8bitCounter) {
189 typedef uintptr_t LargeType;
190 const size_t Step = sizeof(LargeType) / sizeof(uint8_t);
191 const size_t StepMask = Step - 1;
192 auto P = Begin;
193 // Iterate by 1 byte until either the alignment boundary or the end.
194 for (; reinterpret_cast<uintptr_t>(P) & StepMask && P < End; P++)
195 if (uint8_t V = *P)
196 Handle8bitCounter(FirstFeature, P - Begin, V);
197
198 // Iterate by Step bytes at a time.
199 for (; P < End; P += Step)
200 if (LargeType Bundle = *reinterpret_cast<const LargeType *>(P))
201 for (size_t I = 0; I < Step; I++, Bundle >>= 8)
202 if (uint8_t V = Bundle & 0xff)
203 Handle8bitCounter(FirstFeature, P - Begin + I, V);
204
205 // Iterate by 1 byte until the end.
206 for (; P < End; P++)
207 if (uint8_t V = *P)
208 Handle8bitCounter(FirstFeature, P - Begin, V);
kcce2469202019-01-30 06:15:52 +0000209 return End - Begin;
george.karpenkov29efa6d2017-08-21 23:25:50 +0000210}
211
delcypher64c03342017-11-28 17:41:58 +0000212// Given a non-zero Counter returns a number in the range [0,7].
kcc1c0379f2017-08-22 01:28:32 +0000213template<class T>
214unsigned CounterToFeature(T Counter) {
dor1se6729cb2018-07-16 15:15:34 +0000215 // Returns a feature number by placing Counters into buckets as illustrated
216 // below.
217 //
218 // Counter bucket: [1] [2] [3] [4-7] [8-15] [16-31] [32-127] [128+]
219 // Feature number: 0 1 2 3 4 5 6 7
220 //
221 // This is a heuristic taken from AFL (see
222 // http://lcamtuf.coredump.cx/afl/technical_details.txt).
223 //
224 // This implementation may change in the future so clients should
225 // not rely on it.
226 assert(Counter);
227 unsigned Bit = 0;
228 /**/ if (Counter >= 128) Bit = 7;
229 else if (Counter >= 32) Bit = 6;
230 else if (Counter >= 16) Bit = 5;
231 else if (Counter >= 8) Bit = 4;
232 else if (Counter >= 4) Bit = 3;
233 else if (Counter >= 3) Bit = 2;
234 else if (Counter >= 2) Bit = 1;
235 return Bit;
kcc1c0379f2017-08-22 01:28:32 +0000236}
237
kccc924e382017-09-15 22:10:36 +0000238template <class Callback> // void Callback(size_t Feature)
kcc1c0379f2017-08-22 01:28:32 +0000239ATTRIBUTE_NO_SANITIZE_ADDRESS
metzman2fe66e62019-01-17 16:36:05 +0000240ATTRIBUTE_NOINLINE
kcc1c0379f2017-08-22 01:28:32 +0000241void TracePC::CollectFeatures(Callback HandleFeature) const {
kcc1c0379f2017-08-22 01:28:32 +0000242 auto Handle8bitCounter = [&](size_t FirstFeature,
243 size_t Idx, uint8_t Counter) {
kcc1f5638d2017-12-08 22:21:42 +0000244 if (UseCounters)
245 HandleFeature(FirstFeature + Idx * 8 + CounterToFeature(Counter));
246 else
247 HandleFeature(FirstFeature + Idx);
george.karpenkov29efa6d2017-08-21 23:25:50 +0000248 };
249
250 size_t FirstFeature = 0;
251
kcce2469202019-01-30 06:15:52 +0000252 for (size_t i = 0; i < NumModules; i++) {
253 for (size_t r = 0; r < Modules[i].NumRegions; r++) {
254 if (!Modules[i].Regions[r].Enabled) continue;
255 FirstFeature += 8 * ForEachNonZeroByte(Modules[i].Regions[r].Start,
256 Modules[i].Regions[r].Stop,
257 FirstFeature, Handle8bitCounter);
george.karpenkov29efa6d2017-08-21 23:25:50 +0000258 }
259 }
260
kcce2469202019-01-30 06:15:52 +0000261 FirstFeature +=
262 8 * ForEachNonZeroByte(ExtraCountersBegin(), ExtraCountersEnd(),
263 FirstFeature, Handle8bitCounter);
george.karpenkov29efa6d2017-08-21 23:25:50 +0000264
kcc3850d062018-07-03 22:33:09 +0000265 if (UseValueProfileMask) {
george.karpenkov29efa6d2017-08-21 23:25:50 +0000266 ValueProfileMap.ForEach([&](size_t Idx) {
267 HandleFeature(FirstFeature + Idx);
268 });
269 FirstFeature += ValueProfileMap.SizeInBits();
270 }
271
kccd804ddb2017-12-09 19:18:10 +0000272 // Step function, grows similar to 8 * Log_2(A).
273 auto StackDepthStepFunction = [](uint32_t A) -> uint32_t {
dor1sa66e7762017-12-20 19:31:51 +0000274 if (!A) return A;
kcce29d7e32017-12-12 23:11:28 +0000275 uint32_t Log2 = Log(A);
kccd804ddb2017-12-09 19:18:10 +0000276 if (Log2 < 3) return A;
277 Log2 -= 3;
278 return (Log2 + 1) * 8 + ((A >> Log2) & 7);
279 };
280 assert(StackDepthStepFunction(1024) == 64);
281 assert(StackDepthStepFunction(1024 * 4) == 80);
282 assert(StackDepthStepFunction(1024 * 1024) == 144);
283
george.karpenkov29efa6d2017-08-21 23:25:50 +0000284 if (auto MaxStackOffset = GetMaxStackOffset())
kccd804ddb2017-12-09 19:18:10 +0000285 HandleFeature(FirstFeature + StackDepthStepFunction(MaxStackOffset / 8));
george.karpenkov29efa6d2017-08-21 23:25:50 +0000286}
287
288extern TracePC TPC;
289
290} // namespace fuzzer
291
292#endif // LLVM_FUZZER_TRACE_PC