[libFuzzer] prototype implementation of recursion-depth coverage features (commented out; real implementation needs to use inlined instrumentation)

llvm-svn: 308577
This commit is contained in:
Kostya Serebryany 2017-07-20 01:35:17 +00:00
parent c20b3383b7
commit e55828c740
5 changed files with 30 additions and 4 deletions

View File

@ -456,6 +456,7 @@ static bool LooseMemeq(const uint8_t *A, const uint8_t *B, size_t Size) {
}
void Fuzzer::ExecuteCallback(const uint8_t *Data, size_t Size) {
TPC.RecordInitialStack();
TotalNumberOfRuns++;
assert(InFuzzingThread());
if (SMR.IsClient())

View File

@ -319,6 +319,8 @@ void __sanitizer_cov_trace_pc_guard(uint32_t *Guard) {
uint32_t Idx = *Guard;
__sancov_trace_pc_pcs[Idx] = PC;
__sancov_trace_pc_guard_8bit_counters[Idx]++;
// Uncomment the following line to get stack-depth profiling.
// fuzzer::TPC.RecordCurrentStack();
}
// Best-effort support for -fsanitize-coverage=trace-pc, which is available

View File

@ -115,6 +115,20 @@ class TracePC {
return PCs()[Idx];
}
void RecordCurrentStack() {
uintptr_t Stack = GetCurrentStack();
if (Stack < LowestStack)
LowestStack = Stack;
}
void RecordInitialStack() {
InitialStack = GetCurrentStack();
LowestStack = InitialStack;
}
uintptr_t GetCurrentStack() const {
return reinterpret_cast<uintptr_t>(__builtin_frame_address(0));
}
uintptr_t GetMaxStackOffset() const { return InitialStack - LowestStack; }
private:
bool UseCounters = false;
bool UseValueProfile = false;
@ -138,6 +152,7 @@ private:
std::set<uintptr_t> *PrintedPCs;
ValueBitMap ValueProfileMap;
uintptr_t InitialStack, LowestStack; // Assume stack grows down.
};
template <class Callback> // void Callback(size_t Idx, uint8_t Value);
@ -196,11 +211,17 @@ void TracePC::CollectFeatures(Callback HandleFeature) const {
ForEachNonZeroByte(ExtraCountersBegin(), ExtraCountersEnd(), FirstFeature,
Handle8bitCounter);
FirstFeature += (ExtraCountersEnd() - ExtraCountersBegin()) * 8;
if (UseValueProfile)
if (UseValueProfile) {
ValueProfileMap.ForEach([&](size_t Idx) {
HandleFeature(N * 8 + Idx);
HandleFeature(FirstFeature + Idx);
});
FirstFeature += ValueProfileMap.SizeInBits();
}
if (auto MaxStackOffset = GetMaxStackOffset())
HandleFeature(FirstFeature + MaxStackOffset);
}
extern TracePC TPC;

View File

@ -52,6 +52,8 @@ struct ValueBitMap {
return Map[WordIdx] & (1UL << BitIdx);
}
size_t SizeInBits() const { return kMapSizeInBits; }
size_t GetNumBitsSinceLastMerge() const { return NumBits; }
// Merges 'Other' into 'this', clears 'Other', updates NumBits,

View File

@ -3,7 +3,7 @@
// Simple test for a fuzzer. The fuzzer must find the deep recursion.
// To generate a crashy input:
// for((i=0;i<100;i++)); do echo -n ABCDEFGHIJKLMNOPQRSTUVWXYZ >> INPUT; done
// for((i=0;i<110;i++)); do echo -n ABCDEFGHIJ >> INPUT; done
#include <cstddef>
#include <cstdint>
#include <cstdlib>
@ -13,7 +13,7 @@ static volatile int Sink;
void Recursive(const uint8_t *Data, size_t Size, int Depth) {
if (Depth > 1000) abort();
if (!Size) return;
if (*Data == ('A' + Depth % 26))
if (*Data == ('A' + Depth % 10))
Recursive(Data + 1, Size - 1, Depth + 1);
Sink++;
}