2017-05-18 20:23:05 +03:00
|
|
|
//===- FuzzerMerge.cpp - merging corpora ----------------------------------===//
|
|
|
|
//
|
2019-04-18 11:28:29 +03:00
|
|
|
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
|
|
|
|
// See https://llvm.org/LICENSE.txt for license information.
|
|
|
|
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
|
2017-05-18 20:23:05 +03:00
|
|
|
//
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
// Merging corpora.
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
|
2018-03-29 21:18:36 +03:00
|
|
|
#include "FuzzerCommand.h"
|
2017-05-18 20:23:05 +03:00
|
|
|
#include "FuzzerMerge.h"
|
2018-03-29 21:18:36 +03:00
|
|
|
#include "FuzzerIO.h"
|
|
|
|
#include "FuzzerInternal.h"
|
2017-05-18 20:23:05 +03:00
|
|
|
#include "FuzzerTracePC.h"
|
|
|
|
#include "FuzzerUtil.h"
|
|
|
|
|
|
|
|
#include <fstream>
|
|
|
|
#include <iterator>
|
|
|
|
#include <set>
|
|
|
|
#include <sstream>
|
2020-07-31 18:06:26 +03:00
|
|
|
#include <unordered_set>
|
2017-05-18 20:23:05 +03:00
|
|
|
|
|
|
|
namespace fuzzer {
|
|
|
|
|
|
|
|
bool Merger::Parse(const std::string &Str, bool ParseCoverage) {
|
|
|
|
std::istringstream SS(Str);
|
|
|
|
return Parse(SS, ParseCoverage);
|
|
|
|
}
|
|
|
|
|
|
|
|
void Merger::ParseOrExit(std::istream &IS, bool ParseCoverage) {
|
|
|
|
if (!Parse(IS, ParseCoverage)) {
|
|
|
|
Printf("MERGE: failed to parse the control file (unexpected error)\n");
|
|
|
|
exit(1);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// The control file example:
|
|
|
|
//
|
|
|
|
// 3 # The number of inputs
|
|
|
|
// 1 # The number of inputs in the first corpus, <= the previous number
|
|
|
|
// file0
|
|
|
|
// file1
|
|
|
|
// file2 # One file name per line.
|
|
|
|
// STARTED 0 123 # FileID, file size
|
2019-04-18 11:28:29 +03:00
|
|
|
// FT 0 1 4 6 8 # FileID COV1 COV2 ...
|
|
|
|
// COV 0 7 8 9 # FileID COV1 COV1
|
|
|
|
// STARTED 1 456 # If FT is missing, the input crashed while processing.
|
2017-05-18 20:23:05 +03:00
|
|
|
// STARTED 2 567
|
2019-04-18 11:28:29 +03:00
|
|
|
// FT 2 8 9
|
|
|
|
// COV 2 11 12
|
2017-05-18 20:23:05 +03:00
|
|
|
bool Merger::Parse(std::istream &IS, bool ParseCoverage) {
|
|
|
|
LastFailure.clear();
|
|
|
|
std::string Line;
|
|
|
|
|
|
|
|
// Parse NumFiles.
|
|
|
|
if (!std::getline(IS, Line, '\n')) return false;
|
|
|
|
std::istringstream L1(Line);
|
|
|
|
size_t NumFiles = 0;
|
|
|
|
L1 >> NumFiles;
|
|
|
|
if (NumFiles == 0 || NumFiles > 10000000) return false;
|
|
|
|
|
|
|
|
// Parse NumFilesInFirstCorpus.
|
|
|
|
if (!std::getline(IS, Line, '\n')) return false;
|
|
|
|
std::istringstream L2(Line);
|
|
|
|
NumFilesInFirstCorpus = NumFiles + 1;
|
|
|
|
L2 >> NumFilesInFirstCorpus;
|
|
|
|
if (NumFilesInFirstCorpus > NumFiles) return false;
|
|
|
|
|
|
|
|
// Parse file names.
|
|
|
|
Files.resize(NumFiles);
|
|
|
|
for (size_t i = 0; i < NumFiles; i++)
|
|
|
|
if (!std::getline(IS, Files[i].Name, '\n'))
|
|
|
|
return false;
|
|
|
|
|
2019-04-18 11:28:29 +03:00
|
|
|
// Parse STARTED, FT, and COV lines.
|
2017-05-18 20:23:05 +03:00
|
|
|
size_t ExpectedStartMarker = 0;
|
|
|
|
const size_t kInvalidStartMarker = -1;
|
|
|
|
size_t LastSeenStartMarker = kInvalidStartMarker;
|
2018-03-29 21:18:36 +03:00
|
|
|
Vector<uint32_t> TmpFeatures;
|
2019-04-18 11:28:29 +03:00
|
|
|
Set<uint32_t> PCs;
|
2017-05-18 20:23:05 +03:00
|
|
|
while (std::getline(IS, Line, '\n')) {
|
|
|
|
std::istringstream ISS1(Line);
|
|
|
|
std::string Marker;
|
|
|
|
size_t N;
|
|
|
|
ISS1 >> Marker;
|
|
|
|
ISS1 >> N;
|
|
|
|
if (Marker == "STARTED") {
|
|
|
|
// STARTED FILE_ID FILE_SIZE
|
|
|
|
if (ExpectedStartMarker != N)
|
|
|
|
return false;
|
|
|
|
ISS1 >> Files[ExpectedStartMarker].Size;
|
|
|
|
LastSeenStartMarker = ExpectedStartMarker;
|
|
|
|
assert(ExpectedStartMarker < Files.size());
|
|
|
|
ExpectedStartMarker++;
|
2019-04-18 11:28:29 +03:00
|
|
|
} else if (Marker == "FT") {
|
|
|
|
// FT FILE_ID COV1 COV2 COV3 ...
|
2017-05-18 20:23:05 +03:00
|
|
|
size_t CurrentFileIdx = N;
|
|
|
|
if (CurrentFileIdx != LastSeenStartMarker)
|
|
|
|
return false;
|
|
|
|
LastSeenStartMarker = kInvalidStartMarker;
|
|
|
|
if (ParseCoverage) {
|
|
|
|
TmpFeatures.clear(); // use a vector from outer scope to avoid resizes.
|
2019-04-18 11:28:29 +03:00
|
|
|
while (ISS1 >> N)
|
2017-05-18 20:23:05 +03:00
|
|
|
TmpFeatures.push_back(N);
|
|
|
|
std::sort(TmpFeatures.begin(), TmpFeatures.end());
|
|
|
|
Files[CurrentFileIdx].Features = TmpFeatures;
|
|
|
|
}
|
2019-04-18 11:28:29 +03:00
|
|
|
} else if (Marker == "COV") {
|
|
|
|
size_t CurrentFileIdx = N;
|
|
|
|
if (ParseCoverage)
|
|
|
|
while (ISS1 >> N)
|
|
|
|
if (PCs.insert(N).second)
|
|
|
|
Files[CurrentFileIdx].Cov.push_back(N);
|
2017-05-18 20:23:05 +03:00
|
|
|
} else {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (LastSeenStartMarker != kInvalidStartMarker)
|
|
|
|
LastFailure = Files[LastSeenStartMarker].Name;
|
|
|
|
|
|
|
|
FirstNotProcessedFile = ExpectedStartMarker;
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
size_t Merger::ApproximateMemoryConsumption() const {
|
|
|
|
size_t Res = 0;
|
|
|
|
for (const auto &F: Files)
|
|
|
|
Res += sizeof(F) + F.Features.size() * sizeof(F.Features[0]);
|
|
|
|
return Res;
|
|
|
|
}
|
|
|
|
|
2019-04-18 11:28:29 +03:00
|
|
|
// Decides which files need to be merged (add those to NewFiles).
|
2017-05-18 20:23:05 +03:00
|
|
|
// Returns the number of new features added.
|
2018-03-29 21:18:36 +03:00
|
|
|
size_t Merger::Merge(const Set<uint32_t> &InitialFeatures,
|
2019-04-18 11:28:29 +03:00
|
|
|
Set<uint32_t> *NewFeatures,
|
|
|
|
const Set<uint32_t> &InitialCov, Set<uint32_t> *NewCov,
|
2018-03-29 21:18:36 +03:00
|
|
|
Vector<std::string> *NewFiles) {
|
2017-05-18 20:23:05 +03:00
|
|
|
NewFiles->clear();
|
|
|
|
assert(NumFilesInFirstCorpus <= Files.size());
|
2019-04-18 11:28:29 +03:00
|
|
|
Set<uint32_t> AllFeatures = InitialFeatures;
|
2017-05-18 20:23:05 +03:00
|
|
|
|
|
|
|
// What features are in the initial corpus?
|
|
|
|
for (size_t i = 0; i < NumFilesInFirstCorpus; i++) {
|
|
|
|
auto &Cur = Files[i].Features;
|
|
|
|
AllFeatures.insert(Cur.begin(), Cur.end());
|
|
|
|
}
|
|
|
|
// Remove all features that we already know from all other inputs.
|
|
|
|
for (size_t i = NumFilesInFirstCorpus; i < Files.size(); i++) {
|
|
|
|
auto &Cur = Files[i].Features;
|
2018-03-29 21:18:36 +03:00
|
|
|
Vector<uint32_t> Tmp;
|
2017-05-18 20:23:05 +03:00
|
|
|
std::set_difference(Cur.begin(), Cur.end(), AllFeatures.begin(),
|
|
|
|
AllFeatures.end(), std::inserter(Tmp, Tmp.begin()));
|
|
|
|
Cur.swap(Tmp);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Sort. Give preference to
|
|
|
|
// * smaller files
|
|
|
|
// * files with more features.
|
|
|
|
std::sort(Files.begin() + NumFilesInFirstCorpus, Files.end(),
|
|
|
|
[&](const MergeFileInfo &a, const MergeFileInfo &b) -> bool {
|
|
|
|
if (a.Size != b.Size)
|
|
|
|
return a.Size < b.Size;
|
|
|
|
return a.Features.size() > b.Features.size();
|
|
|
|
});
|
|
|
|
|
|
|
|
// One greedy pass: add the file's features to AllFeatures.
|
|
|
|
// If new features were added, add this file to NewFiles.
|
|
|
|
for (size_t i = NumFilesInFirstCorpus; i < Files.size(); i++) {
|
|
|
|
auto &Cur = Files[i].Features;
|
|
|
|
// Printf("%s -> sz %zd ft %zd\n", Files[i].Name.c_str(),
|
|
|
|
// Files[i].Size, Cur.size());
|
2019-04-18 11:28:29 +03:00
|
|
|
bool FoundNewFeatures = false;
|
|
|
|
for (auto Fe: Cur) {
|
|
|
|
if (AllFeatures.insert(Fe).second) {
|
|
|
|
FoundNewFeatures = true;
|
|
|
|
NewFeatures->insert(Fe);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (FoundNewFeatures)
|
2017-05-18 20:23:05 +03:00
|
|
|
NewFiles->push_back(Files[i].Name);
|
2019-04-18 11:28:29 +03:00
|
|
|
for (auto Cov : Files[i].Cov)
|
|
|
|
if (InitialCov.find(Cov) == InitialCov.end())
|
|
|
|
NewCov->insert(Cov);
|
2017-05-18 20:23:05 +03:00
|
|
|
}
|
2019-04-18 11:28:29 +03:00
|
|
|
return NewFeatures->size();
|
2017-05-18 20:23:05 +03:00
|
|
|
}
|
|
|
|
|
2018-03-29 21:18:36 +03:00
|
|
|
Set<uint32_t> Merger::AllFeatures() const {
|
|
|
|
Set<uint32_t> S;
|
2017-05-18 20:23:05 +03:00
|
|
|
for (auto &File : Files)
|
|
|
|
S.insert(File.Features.begin(), File.Features.end());
|
|
|
|
return S;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Inner process. May crash if the target crashes.
|
|
|
|
void Fuzzer::CrashResistantMergeInternalStep(const std::string &CFPath) {
|
|
|
|
Printf("MERGE-INNER: using the control file '%s'\n", CFPath.c_str());
|
|
|
|
Merger M;
|
|
|
|
std::ifstream IF(CFPath);
|
|
|
|
M.ParseOrExit(IF, false);
|
|
|
|
IF.close();
|
|
|
|
if (!M.LastFailure.empty())
|
|
|
|
Printf("MERGE-INNER: '%s' caused a failure at the previous merge step\n",
|
|
|
|
M.LastFailure.c_str());
|
|
|
|
|
|
|
|
Printf("MERGE-INNER: %zd total files;"
|
|
|
|
" %zd processed earlier; will process %zd files now\n",
|
|
|
|
M.Files.size(), M.FirstNotProcessedFile,
|
|
|
|
M.Files.size() - M.FirstNotProcessedFile);
|
|
|
|
|
|
|
|
std::ofstream OF(CFPath, std::ofstream::out | std::ofstream::app);
|
2018-03-29 21:18:36 +03:00
|
|
|
Set<size_t> AllFeatures;
|
2020-07-31 18:06:26 +03:00
|
|
|
auto PrintStatsWrapper = [this, &AllFeatures](const char* Where) {
|
|
|
|
this->PrintStats(Where, "\n", 0, AllFeatures.size());
|
|
|
|
};
|
2019-04-18 11:28:29 +03:00
|
|
|
Set<const TracePC::PCTableEntry *> AllPCs;
|
2017-05-18 20:23:05 +03:00
|
|
|
for (size_t i = M.FirstNotProcessedFile; i < M.Files.size(); i++) {
|
2019-04-18 11:28:29 +03:00
|
|
|
Fuzzer::MaybeExitGracefully();
|
2017-05-18 20:23:05 +03:00
|
|
|
auto U = FileToVector(M.Files[i].Name);
|
|
|
|
if (U.size() > MaxInputLen) {
|
|
|
|
U.resize(MaxInputLen);
|
|
|
|
U.shrink_to_fit();
|
|
|
|
}
|
2020-07-31 18:06:26 +03:00
|
|
|
|
2017-05-18 20:23:05 +03:00
|
|
|
// Write the pre-run marker.
|
2019-04-18 11:28:29 +03:00
|
|
|
OF << "STARTED " << i << " " << U.size() << "\n";
|
2018-03-29 21:18:36 +03:00
|
|
|
OF.flush(); // Flush is important since Command::Execute may crash.
|
2017-05-18 20:23:05 +03:00
|
|
|
// Run.
|
|
|
|
TPC.ResetMaps();
|
2020-05-11 15:45:07 +03:00
|
|
|
if (ExecuteCallback(U.data(), U.size()) > 0) {
|
|
|
|
continue;
|
|
|
|
}
|
2018-03-29 21:18:36 +03:00
|
|
|
// Collect coverage. We are iterating over the files in this order:
|
|
|
|
// * First, files in the initial corpus ordered by size, smallest first.
|
|
|
|
// * Then, all other files, smallest first.
|
|
|
|
// So it makes no sense to record all features for all files, instead we
|
|
|
|
// only record features that were not seen before.
|
|
|
|
Set<size_t> UniqFeatures;
|
|
|
|
TPC.CollectFeatures([&](size_t Feature) {
|
|
|
|
if (AllFeatures.insert(Feature).second)
|
|
|
|
UniqFeatures.insert(Feature);
|
2017-05-18 20:23:05 +03:00
|
|
|
});
|
2019-04-18 11:28:29 +03:00
|
|
|
TPC.UpdateObservedPCs();
|
2017-05-18 20:23:05 +03:00
|
|
|
// Show stats.
|
|
|
|
if (!(TotalNumberOfRuns & (TotalNumberOfRuns - 1)))
|
2020-07-31 18:06:26 +03:00
|
|
|
PrintStatsWrapper("pulse ");
|
|
|
|
if (TotalNumberOfRuns == M.NumFilesInFirstCorpus)
|
|
|
|
PrintStatsWrapper("LOADED");
|
2017-05-18 20:23:05 +03:00
|
|
|
// Write the post-run marker and the coverage.
|
2019-04-18 11:28:29 +03:00
|
|
|
OF << "FT " << i;
|
2018-03-29 21:18:36 +03:00
|
|
|
for (size_t F : UniqFeatures)
|
2019-04-18 11:28:29 +03:00
|
|
|
OF << " " << F;
|
|
|
|
OF << "\n";
|
|
|
|
OF << "COV " << i;
|
|
|
|
TPC.ForEachObservedPC([&](const TracePC::PCTableEntry *TE) {
|
|
|
|
if (AllPCs.insert(TE).second)
|
|
|
|
OF << " " << TPC.PCTableEntryIdx(TE);
|
|
|
|
});
|
2017-05-18 20:23:05 +03:00
|
|
|
OF << "\n";
|
2018-03-29 21:18:36 +03:00
|
|
|
OF.flush();
|
2017-05-18 20:23:05 +03:00
|
|
|
}
|
2020-07-31 18:06:26 +03:00
|
|
|
PrintStatsWrapper("DONE ");
|
2017-05-18 20:23:05 +03:00
|
|
|
}
|
|
|
|
|
2020-07-31 18:06:26 +03:00
|
|
|
static size_t WriteNewControlFile(const std::string &CFPath,
|
|
|
|
const Vector<SizedFile> &OldCorpus,
|
|
|
|
const Vector<SizedFile> &NewCorpus,
|
|
|
|
const Vector<MergeFileInfo> &KnownFiles) {
|
|
|
|
std::unordered_set<std::string> FilesToSkip;
|
|
|
|
for (auto &SF: KnownFiles)
|
|
|
|
FilesToSkip.insert(SF.Name);
|
|
|
|
|
|
|
|
Vector<std::string> FilesToUse;
|
|
|
|
auto MaybeUseFile = [=, &FilesToUse](std::string Name) {
|
|
|
|
if (FilesToSkip.find(Name) == FilesToSkip.end())
|
|
|
|
FilesToUse.push_back(Name);
|
|
|
|
};
|
2019-04-18 11:28:29 +03:00
|
|
|
for (auto &SF: OldCorpus)
|
2020-07-31 18:06:26 +03:00
|
|
|
MaybeUseFile(SF.File);
|
|
|
|
auto FilesToUseFromOldCorpus = FilesToUse.size();
|
2019-04-18 11:28:29 +03:00
|
|
|
for (auto &SF: NewCorpus)
|
2020-07-31 18:06:26 +03:00
|
|
|
MaybeUseFile(SF.File);
|
|
|
|
|
|
|
|
RemoveFile(CFPath);
|
|
|
|
std::ofstream ControlFile(CFPath);
|
|
|
|
ControlFile << FilesToUse.size() << "\n";
|
|
|
|
ControlFile << FilesToUseFromOldCorpus << "\n";
|
|
|
|
for (auto &FN: FilesToUse)
|
|
|
|
ControlFile << FN << "\n";
|
|
|
|
|
2017-05-18 20:23:05 +03:00
|
|
|
if (!ControlFile) {
|
|
|
|
Printf("MERGE-OUTER: failed to write to the control file: %s\n",
|
|
|
|
CFPath.c_str());
|
|
|
|
exit(1);
|
|
|
|
}
|
2020-07-31 18:06:26 +03:00
|
|
|
|
|
|
|
return FilesToUse.size();
|
2018-03-29 21:18:36 +03:00
|
|
|
}
|
2017-05-18 20:23:05 +03:00
|
|
|
|
2019-04-18 11:28:29 +03:00
|
|
|
// Outer process. Does not call the target code and thus should not fail.
|
|
|
|
void CrashResistantMerge(const Vector<std::string> &Args,
|
|
|
|
const Vector<SizedFile> &OldCorpus,
|
|
|
|
const Vector<SizedFile> &NewCorpus,
|
|
|
|
Vector<std::string> *NewFiles,
|
|
|
|
const Set<uint32_t> &InitialFeatures,
|
|
|
|
Set<uint32_t> *NewFeatures,
|
|
|
|
const Set<uint32_t> &InitialCov,
|
|
|
|
Set<uint32_t> *NewCov,
|
|
|
|
const std::string &CFPath,
|
|
|
|
bool V /*Verbose*/) {
|
|
|
|
if (NewCorpus.empty() && OldCorpus.empty()) return; // Nothing to merge.
|
2018-03-29 21:18:36 +03:00
|
|
|
size_t NumAttempts = 0;
|
2020-07-31 18:06:26 +03:00
|
|
|
Vector<MergeFileInfo> KnownFiles;
|
2019-04-18 11:28:29 +03:00
|
|
|
if (FileSize(CFPath)) {
|
|
|
|
VPrintf(V, "MERGE-OUTER: non-empty control file provided: '%s'\n",
|
|
|
|
CFPath.c_str());
|
2018-03-29 21:18:36 +03:00
|
|
|
Merger M;
|
2019-04-18 11:28:29 +03:00
|
|
|
std::ifstream IF(CFPath);
|
2020-07-31 18:06:26 +03:00
|
|
|
if (M.Parse(IF, /*ParseCoverage=*/true)) {
|
2019-04-18 11:28:29 +03:00
|
|
|
VPrintf(V, "MERGE-OUTER: control file ok, %zd files total,"
|
2018-03-29 21:18:36 +03:00
|
|
|
" first not processed file %zd\n",
|
|
|
|
M.Files.size(), M.FirstNotProcessedFile);
|
|
|
|
if (!M.LastFailure.empty())
|
2019-04-18 11:28:29 +03:00
|
|
|
VPrintf(V, "MERGE-OUTER: '%s' will be skipped as unlucky "
|
2018-03-29 21:18:36 +03:00
|
|
|
"(merge has stumbled on it the last time)\n",
|
|
|
|
M.LastFailure.c_str());
|
|
|
|
if (M.FirstNotProcessedFile >= M.Files.size()) {
|
2020-07-31 18:06:26 +03:00
|
|
|
// Merge has already been completed with the given merge control file.
|
|
|
|
if (M.Files.size() == OldCorpus.size() + NewCorpus.size()) {
|
|
|
|
VPrintf(
|
|
|
|
V,
|
|
|
|
"MERGE-OUTER: nothing to do, merge has been completed before\n");
|
|
|
|
exit(0);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Number of input files likely changed, start merge from scratch, but
|
|
|
|
// reuse coverage information from the given merge control file.
|
2019-04-18 11:28:29 +03:00
|
|
|
VPrintf(
|
2020-07-31 18:06:26 +03:00
|
|
|
V,
|
|
|
|
"MERGE-OUTER: starting merge from scratch, but reusing coverage "
|
|
|
|
"information from the given control file\n");
|
|
|
|
KnownFiles = M.Files;
|
|
|
|
} else {
|
|
|
|
// There is a merge in progress, continue.
|
|
|
|
NumAttempts = M.Files.size() - M.FirstNotProcessedFile;
|
2018-03-29 21:18:36 +03:00
|
|
|
}
|
|
|
|
} else {
|
2019-04-18 11:28:29 +03:00
|
|
|
VPrintf(V, "MERGE-OUTER: bad control file, will overwrite it\n");
|
2018-03-29 21:18:36 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!NumAttempts) {
|
|
|
|
// The supplied control file is empty or bad, create a fresh one.
|
2020-07-31 18:06:26 +03:00
|
|
|
VPrintf(V, "MERGE-OUTER: "
|
|
|
|
"%zd files, %zd in the initial corpus, %zd processed earlier\n",
|
|
|
|
OldCorpus.size() + NewCorpus.size(), OldCorpus.size(),
|
|
|
|
KnownFiles.size());
|
|
|
|
NumAttempts = WriteNewControlFile(CFPath, OldCorpus, NewCorpus, KnownFiles);
|
2018-03-29 21:18:36 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
// Execute the inner process until it passes.
|
2017-05-18 20:23:05 +03:00
|
|
|
// Every inner process should execute at least one input.
|
2018-03-29 21:18:36 +03:00
|
|
|
Command BaseCmd(Args);
|
|
|
|
BaseCmd.removeFlag("merge");
|
2019-04-18 11:28:29 +03:00
|
|
|
BaseCmd.removeFlag("fork");
|
2020-07-31 18:06:26 +03:00
|
|
|
BaseCmd.removeFlag("collect_data_flow");
|
2018-03-29 21:18:36 +03:00
|
|
|
for (size_t Attempt = 1; Attempt <= NumAttempts; Attempt++) {
|
2019-04-18 11:28:29 +03:00
|
|
|
Fuzzer::MaybeExitGracefully();
|
|
|
|
VPrintf(V, "MERGE-OUTER: attempt %zd\n", Attempt);
|
2018-03-29 21:18:36 +03:00
|
|
|
Command Cmd(BaseCmd);
|
|
|
|
Cmd.addFlag("merge_control_file", CFPath);
|
|
|
|
Cmd.addFlag("merge_inner", "1");
|
2019-04-18 11:28:29 +03:00
|
|
|
if (!V) {
|
|
|
|
Cmd.setOutputFile(getDevNull());
|
|
|
|
Cmd.combineOutAndErr();
|
|
|
|
}
|
2018-03-29 21:18:36 +03:00
|
|
|
auto ExitCode = ExecuteCommand(Cmd);
|
2017-05-18 20:23:05 +03:00
|
|
|
if (!ExitCode) {
|
2019-04-18 11:28:29 +03:00
|
|
|
VPrintf(V, "MERGE-OUTER: succesfull in %zd attempt(s)\n", Attempt);
|
2017-05-18 20:23:05 +03:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// Read the control file and do the merge.
|
|
|
|
Merger M;
|
|
|
|
std::ifstream IF(CFPath);
|
|
|
|
IF.seekg(0, IF.end);
|
2019-04-18 11:28:29 +03:00
|
|
|
VPrintf(V, "MERGE-OUTER: the control file has %zd bytes\n",
|
|
|
|
(size_t)IF.tellg());
|
2017-05-18 20:23:05 +03:00
|
|
|
IF.seekg(0, IF.beg);
|
|
|
|
M.ParseOrExit(IF, true);
|
|
|
|
IF.close();
|
2019-04-18 11:28:29 +03:00
|
|
|
VPrintf(V,
|
|
|
|
"MERGE-OUTER: consumed %zdMb (%zdMb rss) to parse the control file\n",
|
|
|
|
M.ApproximateMemoryConsumption() >> 20, GetPeakRSSMb());
|
2020-07-31 18:06:26 +03:00
|
|
|
|
|
|
|
M.Files.insert(M.Files.end(), KnownFiles.begin(), KnownFiles.end());
|
2019-04-18 11:28:29 +03:00
|
|
|
M.Merge(InitialFeatures, NewFeatures, InitialCov, NewCov, NewFiles);
|
|
|
|
VPrintf(V, "MERGE-OUTER: %zd new files with %zd new features added; "
|
|
|
|
"%zd new coverage edges\n",
|
|
|
|
NewFiles->size(), NewFeatures->size(), NewCov->size());
|
2017-05-18 20:23:05 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
} // namespace fuzzer
|