directory. Defaults to 0. This flag can be used to minimize a corpus.
``-minimize_crash``
If 1, minimizes the provided crash input.
- Use with -runs=N or -max_total_time=N to limit the number attempts.
+ Use with -runs=N or -max_total_time=N to limit the number of attempts.
``-reload``
If set to 1 (the default), the corpus directory is re-read periodically to
check for new inputs; this allows detection of new inputs that were discovered
blocks are hit; defaults to 1.
``-use_value_profile``
Use `value profile`_ to guide corpus expansion; defaults to 0.
-``-use_traces``
- Use instruction traces (experimental, defaults to 0); see `Data-flow-guided fuzzing`_.
``-only_ascii``
If 1, generate only ASCII (``isprint``+``isspace``) inputs. Defaults to 0.
``-artifact_prefix``
First, the extra instrumentation may bring up to 2x additional slowdown.
Second, the corpus may grow by several times.
-
-Data-flow-guided fuzzing
-------------------------
-
-*EXPERIMENTAL*.
-With an additional compiler flag ``-fsanitize-coverage=trace-cmp`` (see SanitizerCoverageTraceDataFlow_)
-and extra run-time flag ``-use_traces=1`` the fuzzer will try to apply *data-flow-guided fuzzing*.
-That is, the fuzzer will record the inputs to comparison instructions, switch statements,
-and several libc functions (``memcmp``, ``strcmp``, ``strncmp``, etc).
-It will later use those recorded inputs during mutations.
-
-This mode can be combined with DataFlowSanitizer_ to achieve better sensitivity.
-
Fuzzer-friendly build mode
---------------------------
Sometimes the code under test is not fuzzing-friendly. Examples:
.. _AFL: http://lcamtuf.coredump.cx/afl/
.. _SanitizerCoverage: http://clang.llvm.org/docs/SanitizerCoverage.html
.. _SanitizerCoverageTraceDataFlow: http://clang.llvm.org/docs/SanitizerCoverage.html#tracing-data-flow
-.. _DataFlowSanitizer: http://clang.llvm.org/docs/DataFlowSanitizer.html
.. _AddressSanitizer: http://clang.llvm.org/docs/AddressSanitizer.html
.. _LeakSanitizer: http://clang.llvm.org/docs/LeakSanitizer.html
.. _Heartbleed: http://en.wikipedia.org/wiki/Heartbleed
};
// Declared as static globals for faster checks inside the hooks.
-static bool RecordingTraces = false;
static bool RecordingMemcmp = false;
static bool RecordingMemmem = false;
static bool RecordingValueProfile = false;
const uint8_t *DesiredData, size_t DataSize);
void StartTraceRecording() {
- if (!Options.UseTraces && !Options.UseMemcmp)
+ if (!Options.UseMemcmp)
return;
- RecordingTraces = Options.UseTraces;
RecordingMemcmp = Options.UseMemcmp;
RecordingMemmem = Options.UseMemmem;
NumMutations = 0;
}
void StopTraceRecording() {
- if (!RecordingTraces && !RecordingMemcmp)
+ if (!RecordingMemcmp)
return;
- RecordingTraces = false;
RecordingMemcmp = false;
for (size_t i = 0; i < NumMutations; i++) {
auto &M = Mutations[i];
uint64_t Arg1, uint64_t Arg2, dfsan_label L1,
dfsan_label L2) {
assert(ReallyHaveDFSan());
- if (!RecordingTraces || !F->InFuzzingThread()) return;
+ if (!F->InFuzzingThread()) return;
if (L1 == 0 && L2 == 0)
return; // Not actionable.
if (L1 != 0 && L2 != 0)
uint64_t Val, size_t NumCases,
uint64_t *Cases, dfsan_label L) {
assert(ReallyHaveDFSan());
- if (!RecordingTraces || !F->InFuzzingThread()) return;
+ if (!F->InFuzzingThread()) return;
if (!L) return; // Not actionable.
LabelRange LR = GetLabelRange(L);
size_t ValSize = ValSizeInBits / 8;
void TraceState::TraceCmpCallback(uintptr_t PC, size_t CmpSize, size_t CmpType,
uint64_t Arg1, uint64_t Arg2) {
- if (!RecordingTraces || !F->InFuzzingThread()) return;
+ if (!F->InFuzzingThread()) return;
if ((CmpType == ICMP_EQ || CmpType == ICMP_NE) && Arg1 == Arg2)
return; // No reason to mutate.
int Added = 0;
void TraceState::TraceSwitchCallback(uintptr_t PC, size_t ValSizeInBits,
uint64_t Val, size_t NumCases,
uint64_t *Cases) {
- if (!RecordingTraces || !F->InFuzzingThread()) return;
+ if (F->InFuzzingThread()) return;
size_t ValSize = ValSizeInBits / 8;
bool TryShort = IsTwoByteData(Val);
for (size_t i = 0; i < NumCases; i++)
}
void Fuzzer::AssignTaintLabels(uint8_t *Data, size_t Size) {
- if (!Options.UseTraces && !Options.UseMemcmp) return;
+ if (!Options.UseMemcmp) return;
if (!ReallyHaveDFSan()) return;
TS->EnsureDfsanLabels(Size);
for (size_t i = 0; i < Size; i++)
}
void Fuzzer::InitializeTraceState() {
- if (!Options.UseTraces && !Options.UseMemcmp) return;
+ if (!Options.UseMemcmp) return;
TS = new TraceState(MD, Options, this);
}
} // namespace fuzzer
using fuzzer::TS;
-using fuzzer::RecordingTraces;
using fuzzer::RecordingMemcmp;
using fuzzer::RecordingValueProfile;
void __dfsw___sanitizer_cov_trace_cmp(uint64_t SizeAndType, uint64_t Arg1,
uint64_t Arg2, dfsan_label L0,
dfsan_label L1, dfsan_label L2) {
- if (!RecordingTraces) return;
- assert(L0 == 0);
- uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
- uint64_t CmpSize = (SizeAndType >> 32) / 8;
- uint64_t Type = (SizeAndType << 32) >> 32;
- TS->DFSanCmpCallback(PC, CmpSize, Type, Arg1, Arg2, L1, L2);
}
#define DFSAN_CMP_CALLBACK(N) \
void __dfsw___sanitizer_cov_trace_cmp##N(uint64_t Arg1, uint64_t Arg2, \
dfsan_label L1, dfsan_label L2) { \
- if (RecordingTraces) \
- TS->DFSanCmpCallback( \
- reinterpret_cast<uintptr_t>(__builtin_return_address(0)), N, \
- fuzzer::ICMP_EQ, Arg1, Arg2, L1, L2); \
}
DFSAN_CMP_CALLBACK(1)
void __dfsw___sanitizer_cov_trace_switch(uint64_t Val, uint64_t *Cases,
dfsan_label L1, dfsan_label L2) {
- if (!RecordingTraces) return;
- uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
- TS->DFSanSwitchCallback(PC, Cases[1], Val, Cases[0], Cases+2, L1);
}
void dfsan_weak_hook_memcmp(void *caller_pc, const void *s1, const void *s2,
__attribute__((visibility("default")))
void __sanitizer_cov_trace_cmp(uint64_t SizeAndType, uint64_t Arg1,
uint64_t Arg2) {
- if (RecordingTraces) {
- uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
- uint64_t CmpSize = (SizeAndType >> 32) / 8;
- uint64_t Type = (SizeAndType << 32) >> 32;
- TS->TraceCmpCallback(PC, CmpSize, Type, Arg1, Arg2);
- }
if (RecordingValueProfile)
fuzzer::AddValueForCmp(__builtin_return_address(0), Arg1, Arg2);
}
-// Adding if(RecordingTraces){...} slows down the VP callbacks.
-// Once we prove that VP is as strong as traces, delete this.
-#define MAYBE_RECORD_TRACE(N) \
- if (RecordingTraces) { \
- uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0)); \
- TS->TraceCmpCallback(PC, N, fuzzer::ICMP_EQ, Arg1, Arg2); \
- }
-
__attribute__((visibility("default")))
void __sanitizer_cov_trace_cmp8(uint64_t Arg1, int64_t Arg2) {
fuzzer::AddValueForCmp(__builtin_return_address(0), Arg1, Arg2);
- MAYBE_RECORD_TRACE(8);
}
__attribute__((visibility("default")))
void __sanitizer_cov_trace_cmp4(uint32_t Arg1, int32_t Arg2) {
fuzzer::AddValueForCmp(__builtin_return_address(0), Arg1, Arg2);
- MAYBE_RECORD_TRACE(4);
}
__attribute__((visibility("default")))
void __sanitizer_cov_trace_cmp2(uint16_t Arg1, int16_t Arg2) {
fuzzer::AddValueForCmp(__builtin_return_address(0), Arg1, Arg2);
- MAYBE_RECORD_TRACE(2);
}
__attribute__((visibility("default")))
void __sanitizer_cov_trace_cmp1(uint8_t Arg1, int8_t Arg2) {
fuzzer::AddValueForCmp(__builtin_return_address(0), Arg1, Arg2);
- MAYBE_RECORD_TRACE(1);
}
__attribute__((visibility("default")))
void __sanitizer_cov_trace_switch(uint64_t Val, uint64_t *Cases) {
- if (!RecordingTraces) return;
- uintptr_t PC = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
- TS->TraceSwitchCallback(PC, Cases[1], Val, Cases[0], Cases + 2);
+ // TODO(kcc): support value profile here.
}
__attribute__((visibility("default")))