std::string CFPath = CFPathOrNull ? CFPathOrNull : TempPath(".txt");
Vector<std::string> NewFiles;
- Set<uint32_t> NewFeatures;
+ Set<uint32_t> NewFeatures, NewCov;
CrashResistantMerge(Args, OldCorpus, NewCorpus, &NewFiles, {}, &NewFeatures,
- CFPath, true);
+ {}, &NewCov, CFPath, true);
for (auto &Path : NewFiles)
F->WriteToOutputCorpus(FileToVector(Path, Options.MaxLen));
// We are done, delete the control file if it was a temporary one.
Vector<std::string> CorpusDirs;
std::string MainCorpusDir;
std::string TempDir;
- Set<uint32_t> Features;
+ Set<uint32_t> Features, Cov;
Vector<std::string> Files;
Random *Rand;
int Verbosity = 0;
GetSizedFilesFromDir(Job->CorpusDir, &TempFiles);
Vector<std::string> FilesToAdd;
- Set<uint32_t> NewFeatures;
+ Set<uint32_t> NewFeatures, NewCov;
CrashResistantMerge(Args, {}, TempFiles, &FilesToAdd, Features,
- &NewFeatures, Job->CFPath, false);
+ &NewFeatures, Cov, &NewCov, Job->CFPath, false);
RemoveFile(Job->CFPath);
for (auto &Path : FilesToAdd) {
auto U = FileToVector(Path);
}
RmDirRecursive(Job->CorpusDir);
Features.insert(NewFeatures.begin(), NewFeatures.end());
+ Cov.insert(NewCov.begin(), NewCov.end());
auto Stats = ParseFinalStatsFromLog(Job->LogPath);
NumRuns += Stats.number_of_executed_units;
if (!FilesToAdd.empty())
- Printf("#%zd: ft: %zd corp: %zd exec/s %zd\n", NumRuns,
- Features.size(), Files.size(),
+ Printf("#%zd: cov: %zd ft: %zd corp: %zd exec/s %zd\n", NumRuns,
+ Cov.size(), Features.size(), Files.size(),
Stats.average_exec_per_sec);
}
};
auto CFPath = DirPlusFile(Env.TempDir, "merge.txt");
CrashResistantMerge(Env.Args, {}, SeedFiles, &Env.Files, {}, &Env.Features,
+ {}, &Env.Cov,
CFPath, false);
RemoveFile(CFPath);
Printf("INFO: -fork=%d: %zd seeds, starting to fuzz; scratch: %s\n",
const size_t kInvalidStartMarker = -1;
size_t LastSeenStartMarker = kInvalidStartMarker;
Vector<uint32_t> TmpFeatures;
- Set<uintptr_t> PCs;
+ Set<uint32_t> PCs;
while (std::getline(IS, Line, '\n')) {
std::istringstream ISS1(Line);
std::string Marker;
Files[CurrentFileIdx].Features = TmpFeatures;
}
} else if (Marker == "COV") {
+ size_t CurrentFileIdx = N;
if (ParseCoverage)
while (ISS1 >> std::hex >> N)
if (PCs.insert(N).second)
- NumCoveredPCs++;
+ Files[CurrentFileIdx].Cov.push_back(N);
} else {
return false;
}
// Decides which files need to be merged (add those to NewFiles).
// Returns the number of new features added.
-size_t Merger::Merge(const Set<uint32_t> &InitialFeatures,
- Set<uint32_t> *NewFeatures,
- Vector<std::string> *NewFiles) {
+void Merger::Merge(const Set<uint32_t> &InitialFeatures,
+ Set<uint32_t> *NewFeatures, const Set<uint32_t> &InitialCov,
+ Set<uint32_t> *NewCov, Vector<std::string> *NewFiles) {
NewFiles->clear();
assert(NumFilesInFirstCorpus <= Files.size());
Set<uint32_t> AllFeatures = InitialFeatures;
auto &Cur = Files[i].Features;
AllFeatures.insert(Cur.begin(), Cur.end());
}
- size_t InitialNumFeatures = AllFeatures.size();
-
// Remove all features that we already know from all other inputs.
for (size_t i = NumFilesInFirstCorpus; i < Files.size(); i++) {
auto &Cur = Files[i].Features;
}
if (FoundNewFeatures)
NewFiles->push_back(Files[i].Name);
+ for (auto Cov : Files[i].Cov)
+ if (InitialCov.find(Cov) == InitialCov.end())
+ NewCov->insert(Cov);
}
- return AllFeatures.size() - InitialNumFeatures;
}
Set<uint32_t> Merger::AllFeatures() const {
for (size_t F : UniqFeatures)
OF << " " << std::hex << F;
OF << "\n";
- OF << "COV " << i;
+ OF << "COV " << std::dec << i;
TPC.ForEachObservedPC([&](const TracePC::PCTableEntry *TE) {
if (AllPCs.insert(TE).second)
OF << " " << TPC.PCTableEntryIdx(TE);
const Vector<SizedFile> &NewCorpus,
Vector<std::string> *NewFiles,
const Set<uint32_t> &InitialFeatures,
- Set<uint32_t> *NewFeatures, const std::string &CFPath,
+ Set<uint32_t> *NewFeatures,
+ const Set<uint32_t> &InitialCov,
+ Set<uint32_t> *NewCov,
+ const std::string &CFPath,
bool V /*Verbose*/) {
if (NewCorpus.empty() && OldCorpus.empty()) return; // Nothing to merge.
size_t NumAttempts = 0;
VPrintf(V,
"MERGE-OUTER: consumed %zdMb (%zdMb rss) to parse the control file\n",
M.ApproximateMemoryConsumption() >> 20, GetPeakRSSMb());
- size_t NumNewFeatures = M.Merge(InitialFeatures, NewFeatures, NewFiles);
- VPrintf(V, "MERGE-OUTER: %zd new files with %zd new features added\n",
- NewFiles->size(), NumNewFeatures);
+ M.Merge(InitialFeatures, NewFeatures, InitialCov, NewCov, NewFiles);
+ VPrintf(V, "MERGE-OUTER: %zd new files with %zd new features added; "
+ "%zd new coverage edges\n",
+ NewFiles->size(), NewFeatures->size(), NewCov->size());
}
} // namespace fuzzer
struct MergeFileInfo {
std::string Name;
size_t Size = 0;
- Vector<uint32_t> Features;
+ Vector<uint32_t> Features, Cov;
};
struct Merger {
Vector<MergeFileInfo> Files;
- size_t NumCoveredPCs = 0;
size_t NumFilesInFirstCorpus = 0;
size_t FirstNotProcessedFile = 0;
std::string LastFailure;
bool Parse(std::istream &IS, bool ParseCoverage);
bool Parse(const std::string &Str, bool ParseCoverage);
void ParseOrExit(std::istream &IS, bool ParseCoverage);
- size_t Merge(const Set<uint32_t> &InitialFeatures,
- Set<uint32_t> *NewFeatures,
- Vector<std::string> *NewFiles);
+ void Merge(const Set<uint32_t> &InitialFeatures, Set<uint32_t> *NewFeatures,
+ const Set<uint32_t> &InitialCov, Set<uint32_t> *NewCov,
+ Vector<std::string> *NewFiles);
size_t ApproximateMemoryConsumption() const;
Set<uint32_t> AllFeatures() const;
};
Vector<std::string> *NewFiles,
const Set<uint32_t> &InitialFeatures,
Set<uint32_t> *NewFeatures,
+ const Set<uint32_t> &InitialCov,
+ Set<uint32_t> *NewCov,
const std::string &CFPath,
bool Verbose);