Index: compiler-rt/lib/fuzzer/FuzzerInternal.h =================================================================== --- compiler-rt/lib/fuzzer/FuzzerInternal.h +++ compiler-rt/lib/fuzzer/FuzzerInternal.h @@ -73,7 +73,8 @@ // Merge Corpora[1:] into Corpora[0]. void Merge(const Vector &Corpora); - void CrashResistantMergeInternalStep(const std::string &ControlFilePath); + void CrashResistantMergeInternalStep(const std::string &ControlFilePath, + bool UseRelative = false); MutationDispatcher &GetMD() { return MD; } void PrintFinalStats(); void SetMaxInputLen(size_t MaxInputLen); Index: compiler-rt/lib/fuzzer/FuzzerMerge.h =================================================================== --- compiler-rt/lib/fuzzer/FuzzerMerge.h +++ compiler-rt/lib/fuzzer/FuzzerMerge.h @@ -45,6 +45,7 @@ #include #include #include +#include #include namespace fuzzer { @@ -53,6 +54,7 @@ std::string Name; size_t Size = 0; Vector Features, Cov; + std::unordered_map> FeaturesByHash, CovByHash; }; struct Merger { @@ -64,11 +66,18 @@ bool Parse(std::istream &IS, bool ParseCoverage); bool Parse(const std::string &Str, bool ParseCoverage); void ParseOrExit(std::istream &IS, bool ParseCoverage); + + void MergeValuesInto(const Vector &Values, + Vector &AllValues); + void DedupValuesFrom(Vector &Values, + const Vector &AllValues); + bool AddNewValues(const Vector &Values, Vector &AllValues, + Vector &NewValues); + size_t Merge(const Set &InitialFeatures, Set *NewFeatures, const Set &InitialCov, Set *NewCov, Vector *NewFiles); size_t ApproximateMemoryConsumption() const; - Set AllFeatures() const; }; void CrashResistantMerge(const Vector &Args, Index: compiler-rt/lib/fuzzer/FuzzerMerge.cpp =================================================================== --- compiler-rt/lib/fuzzer/FuzzerMerge.cpp +++ compiler-rt/lib/fuzzer/FuzzerMerge.cpp @@ -14,7 +14,6 @@ #include "FuzzerInternal.h" #include "FuzzerTracePC.h" #include "FuzzerUtil.h" - #include #include #include @@ -35,6 +34,43 @@ } } +void Merger::MergeValuesInto(const Vector &Values, + Vector &Into) { + Vector Out; + Out.reserve(Values.size() + Into.size()); + std::set_union(Values.begin(), Values.end(), Into.begin(), Into.end(), + std::back_inserter(Out)); + if (Into.size() == Out.size()) + return; + Out.shrink_to_fit(); + Into.swap(Out); +} + +void Merger::DedupValuesFrom(Vector &Values, + const Vector &From) { + Vector Out; + Out.reserve(Values.size()); + std::set_difference(Values.begin(), Values.end(), From.begin(), From.end(), + std::back_inserter(Out)); + if (Out.size() == Values.size()) + return; + Out.shrink_to_fit(); + Values.swap(Out); +} + +bool Merger::AddNewValues(const Vector &Values, Vector &All, + Vector &New) { + Vector Diff; + Diff.reserve(Values.size()); + std::set_difference(Values.begin(), Values.end(), All.begin(), All.end(), + std::back_inserter(Diff)); + if (Diff.empty()) + return false; + MergeValuesInto(Diff, All); + MergeValuesInto(Diff, New); + return true; +} + // The control file example: // // 3 # The number of inputs @@ -49,7 +85,10 @@ // STARTED 2 567 // FT 2 8 9 // COV 2 11 12 +// FT_REL 0xdead 3 5 8 # *_REL indicates module-relative values. +// COV_REL 0xbeef 13 21 34 bool Merger::Parse(std::istream &IS, bool ParseCoverage) { + Files.clear(); LastFailure.clear(); std::string Line; @@ -69,16 +108,26 @@ // Parse file names. Files.resize(NumFiles); - for (size_t i = 0; i < NumFiles; i++) - if (!std::getline(IS, Files[i].Name, '\n')) + for (auto &File : Files) { + if (!std::getline(IS, File.Name, '\n')) return false; + File.Features.clear(); + File.Cov.clear(); + } // Parse STARTED, FT, and COV lines. size_t ExpectedStartMarker = 0; const size_t kInvalidStartMarker = -1; size_t LastSeenStartMarker = kInvalidStartMarker; - Vector TmpFeatures; + + bool Succeeded = true; + uint64_t Hash = 0; + Vector Tmp; Set PCs; + + Vector AllFeatures, AllPCs; + std::unordered_map> AllFeaturesByHash, + AllPCsByHash; while (std::getline(IS, Line, '\n')) { std::istringstream ISS1(Line); std::string Marker; @@ -93,110 +142,182 @@ LastSeenStartMarker = ExpectedStartMarker; assert(ExpectedStartMarker < Files.size()); ExpectedStartMarker++; - } else if (Marker == "FT") { - // FT FILE_ID COV1 COV2 COV3 ... - size_t CurrentFileIdx = N; - if (CurrentFileIdx != LastSeenStartMarker) + Succeeded = false; + continue; + } + + // MARKER FILE_ID [HASH] VAL1 VAL2 VAL3 + size_t CurrentFileIdx = N; + if (CurrentFileIdx != LastSeenStartMarker) + return false; + Succeeded = true; + if (!ParseCoverage) + continue; + if (Marker == "FT_REL" || Marker == "COV_REL") + if (!(ISS1 >> std::hex >> Hash >> std::dec)) return false; - LastSeenStartMarker = kInvalidStartMarker; - if (ParseCoverage) { - TmpFeatures.clear(); // use a vector from outer scope to avoid resizes. - while (ISS1 >> N) - TmpFeatures.push_back(N); - std::sort(TmpFeatures.begin(), TmpFeatures.end()); - Files[CurrentFileIdx].Features = TmpFeatures; - } - } else if (Marker == "COV") { - size_t CurrentFileIdx = N; - if (ParseCoverage) - while (ISS1 >> N) - if (PCs.insert(N).second) - Files[CurrentFileIdx].Cov.push_back(N); - } else { + + // Use a vector from the outer scope to reduce resizes. + Tmp.clear(); + while (ISS1 >> N) + Tmp.push_back(N); + std::sort(Tmp.begin(), Tmp.end()); + auto &File = Files[CurrentFileIdx]; + + if (Marker == "FT") + MergeValuesInto(Tmp, File.Features); + else if (Marker == "COV") + AddNewValues(Tmp, AllPCs, File.Cov); + else if (Marker == "FT_REL") + MergeValuesInto(Tmp, File.FeaturesByHash[Hash]); + else if (Marker == "COV_REL") + AddNewValues(Tmp, AllPCsByHash[Hash], File.CovByHash[Hash]); + else return false; - } } - if (LastSeenStartMarker != kInvalidStartMarker) + if (!Succeeded) LastFailure = Files[LastSeenStartMarker].Name; FirstNotProcessedFile = ExpectedStartMarker; return true; } +static size_t GetNumFeatures(const MergeFileInfo &File) { + auto N = File.Features.size(); + for (auto F : File.FeaturesByHash) + N += F.second.size(); + return N; +}; + +static size_t GetNumPCs(const MergeFileInfo &File) { + auto N = File.Cov.size(); + for (auto C : File.CovByHash) + N += C.second.size(); + return N; +}; + size_t Merger::ApproximateMemoryConsumption() const { - size_t Res = 0; - for (const auto &F: Files) - Res += sizeof(F) + F.Features.size() * sizeof(F.Features[0]); - return Res; + size_t NumFeatures = 0; + size_t NumPCs = 0; + size_t NumHashes = 0; + for (const auto &File : Files) { + NumFeatures += GetNumFeatures(File); + NumPCs += GetNumPCs(File); + NumHashes += File.FeaturesByHash.size() + File.CovByHash.size(); + } + return (Files.size() * sizeof(Files[0])) + (NumFeatures * sizeof(uint32_t)) + + (NumPCs * sizeof(uint32_t)) + (NumHashes * sizeof(uint64_t)); } -// Decides which files need to be merged (add those to NewFiles). -// Returns the number of new features added. size_t Merger::Merge(const Set &InitialFeatures, Set *NewFeatures, const Set &InitialCov, Set *NewCov, Vector *NewFiles) { - NewFiles->clear(); - NewFeatures->clear(); - NewCov->clear(); assert(NumFilesInFirstCorpus <= Files.size()); - Set AllFeatures = InitialFeatures; - // What features are in the initial corpus? + Vector AllFeatures, AddedFeatures, TmpFeatures; + std::unordered_map> AllFeaturesByHash; + std::copy(InitialFeatures.begin(), InitialFeatures.end(), + std::back_inserter(AllFeatures)); + + Vector AllCov, AddedCov; + std::copy(InitialCov.begin(), InitialCov.end(), std::back_inserter(AllCov)); + + Vector AddedFiles; + + // What features and coverage are in the initial corpus? for (size_t i = 0; i < NumFilesInFirstCorpus; i++) { - auto &Cur = Files[i].Features; - AllFeatures.insert(Cur.begin(), Cur.end()); + auto &File = Files[i]; + MergeValuesInto(File.Features, AllFeatures); + for (auto F : File.FeaturesByHash) + MergeValuesInto(F.second, AllFeaturesByHash[F.first]); + MergeValuesInto(File.Cov, AllCov); } + // Remove all features that we already know from all other inputs. for (size_t i = NumFilesInFirstCorpus; i < Files.size(); i++) { - auto &Cur = Files[i].Features; - Vector Tmp; - std::set_difference(Cur.begin(), Cur.end(), AllFeatures.begin(), - AllFeatures.end(), std::inserter(Tmp, Tmp.begin())); - Cur.swap(Tmp); + auto &File = Files[i]; + DedupValuesFrom(File.Features, AllFeatures); + for (auto F : File.FeaturesByHash) + DedupValuesFrom(F.second, AllFeaturesByHash[F.first]); } // Sort. Give preference to // * smaller files // * files with more features. std::sort(Files.begin() + NumFilesInFirstCorpus, Files.end(), - [&](const MergeFileInfo &a, const MergeFileInfo &b) -> bool { - if (a.Size != b.Size) - return a.Size < b.Size; - return a.Features.size() > b.Features.size(); + [&](const MergeFileInfo &A, const MergeFileInfo &B) -> bool { + if (A.Size != B.Size) + return A.Size < B.Size; + return GetNumFeatures(A) > GetNumFeatures(B); }); // One greedy pass: add the file's features to AllFeatures. // If new features were added, add this file to NewFiles. + ModuleInfo Info; + size_t NumNewFeaturesByHash = 0; for (size_t i = NumFilesInFirstCorpus; i < Files.size(); i++) { - auto &Cur = Files[i].Features; - // Printf("%s -> sz %zd ft %zd\n", Files[i].Name.c_str(), - // Files[i].Size, Cur.size()); - bool FoundNewFeatures = false; - for (auto Fe: Cur) { - if (AllFeatures.insert(Fe).second) { - FoundNewFeatures = true; - NewFeatures->insert(Fe); + auto &File = Files[i]; + + bool FoundNewFeatures = + AddNewValues(File.Features, AllFeatures, AddedFeatures); + for (auto F : File.FeaturesByHash) { + auto Hash = F.first; + auto &Features = F.second; + // Look up the corresponding module by hash. The module may or may not be + // present; e.g. it may only be loaded as a result of executing a specifc + // input. If it is present, convert any new relative features to absolute. + // Otherwise, simply record the number of new features. + if (TPC.ModuleInfoByHash(Hash, &Info)) { + std::transform(Features.begin(), Features.end(), Features.begin(), + [&Info](uint32_t Feature) -> uint32_t { + return static_cast(Info.FirstFeature + + Feature); + }); + FoundNewFeatures |= AddNewValues(Features, AllFeatures, AddedFeatures); + } else { + FoundNewFeatures |= + AddNewValues(Features, AllFeaturesByHash[Hash], TmpFeatures); + NumNewFeaturesByHash += TmpFeatures.size(); + TmpFeatures.clear(); } } + + AddNewValues(File.Cov, AllCov, AddedCov); + for (auto C : File.CovByHash) { + auto Hash = C.first; + auto &Cov = C.second; + + if (TPC.ModuleInfoByHash(Hash, &Info)) { + std::transform(Cov.begin(), Cov.end(), Cov.begin(), + [&Info](uint32_t Idx) -> uint32_t { + return static_cast(Info.FirstIdx + Idx); + }); + AddNewValues(Cov, AllCov, AddedCov); + } + } + if (FoundNewFeatures) - NewFiles->push_back(Files[i].Name); - for (auto Cov : Files[i].Cov) - if (InitialCov.find(Cov) == InitialCov.end()) - NewCov->insert(Cov); + AddedFiles.push_back(File.Name); } - return NewFeatures->size(); -} -Set Merger::AllFeatures() const { - Set S; - for (auto &File : Files) - S.insert(File.Features.begin(), File.Features.end()); - return S; + if (NewFeatures) { + NewFeatures->clear(); + NewFeatures->insert(AddedFeatures.begin(), AddedFeatures.end()); + } + if (NewCov) { + NewCov->clear(); + NewCov->insert(AddedCov.begin(), AddedCov.end()); + } + if (NewFiles) + NewFiles->swap(AddedFiles); + + return AddedFeatures.size() + NumNewFeaturesByHash; } // Inner process. May crash if the target crashes. -void Fuzzer::CrashResistantMergeInternalStep(const std::string &CFPath) { +void Fuzzer::CrashResistantMergeInternalStep(const std::string &CFPath, + bool UseRelative) { Printf("MERGE-INNER: using the control file '%s'\n", CFPath.c_str()); Merger M; std::ifstream IF(CFPath); @@ -211,12 +332,22 @@ M.Files.size(), M.FirstNotProcessedFile, M.Files.size() - M.FirstNotProcessedFile); - std::ofstream OF(CFPath, std::ofstream::out | std::ofstream::app); - Set AllFeatures; - auto PrintStatsWrapper = [this, &AllFeatures](const char* Where) { - this->PrintStats(Where, "\n", 0, AllFeatures.size()); - }; + // Use vectors in the outer scope to reduce reallocations. + Vector Features, AllFeatures, UniqFeatures; Set AllPCs; + Vector UniqCov; + + auto PrintStatsWrapper = [&](const char *Where) { + PrintStats(Where, "\n", 0, AllFeatures.size()); + }; + + std::ofstream OF(CFPath, std::ofstream::out | std::ofstream::app); + ModuleInfo Info; + bool First; + + // |Merge| guarantees that this will iterate over files in this order: + // * First, files in the initial corpus ordered by size, smallest first. + // * Then, all other files, smallest first. for (size_t i = M.FirstNotProcessedFile; i < M.Files.size(); i++) { Fuzzer::MaybeExitGracefully(); auto U = FileToVector(M.Files[i].Name); @@ -228,36 +359,72 @@ // Write the pre-run marker. OF << "STARTED " << i << " " << U.size() << "\n"; OF.flush(); // Flush is important since Command::Execute may crash. + // Run. TPC.ResetMaps(); ExecuteCallback(U.data(), U.size()); - // Collect coverage. We are iterating over the files in this order: - // * First, files in the initial corpus ordered by size, smallest first. - // * Then, all other files, smallest first. - // So it makes no sense to record all features for all files, instead we - // only record features that were not seen before. - Set UniqFeatures; + + // Only record features that were not seen before. + Features.clear(); TPC.CollectFeatures([&](size_t Feature) { - if (AllFeatures.insert(Feature).second) - UniqFeatures.insert(Feature); + Features.push_back(static_cast(Feature)); }); + UniqFeatures.clear(); + M.AddNewValues(Features, AllFeatures, UniqFeatures); + TPC.UpdateObservedPCs(); + TPC.ForEachObservedPC([&](const TracePC::PCTableEntry *TE) { + if (AllPCs.insert(TE).second) + UniqCov.push_back(static_cast(TPC.PCTableEntryIdx(TE))); + }); + std::sort(UniqCov.begin(), UniqCov.end()); + // Show stats. if (!(TotalNumberOfRuns & (TotalNumberOfRuns - 1))) PrintStatsWrapper("pulse "); if (TotalNumberOfRuns == M.NumFilesInFirstCorpus) PrintStatsWrapper("LOADED"); - // Write the post-run marker and the coverage. - OF << "FT " << i; - for (size_t F : UniqFeatures) - OF << " " << F; - OF << "\n"; - OF << "COV " << i; - TPC.ForEachObservedPC([&](const TracePC::PCTableEntry *TE) { - if (AllPCs.insert(TE).second) - OF << " " << TPC.PCTableEntryIdx(TE); - }); - OF << "\n"; + + // Write the post-run markers and the coverage. + if (UseRelative) { + First = true; + for (auto Feature : UniqFeatures) { + if (Feature < Info.FirstFeature || Info.LastFeature <= Feature) { + if (First) + First = false; + else + OF << "\n"; + TPC.ModuleInfoByFeature(Feature, &Info); + OF << "FT_REL " << i << " " << std::hex << Info.Hash << std::dec; + } + OF << " " << (Feature - Info.FirstFeature); + } + OF << "\n"; + + First = true; + for (auto Idx : UniqFeatures) { + if (Idx < Info.FirstIdx || Info.LastIdx <= Idx) { + if (First) + First = false; + else + OF << "\n"; + TPC.ModuleInfoByIdx(Idx, &Info); + OF << "COV_REL " << i << " " << std::hex << Info.Hash << std::dec; + } + OF << " " << (Idx - Info.FirstIdx); + } + OF << "\n"; + + } else { + OF << "FT " << i; + for (auto Feature : UniqFeatures) + OF << " " << Feature; + OF << "\n"; + OF << "COV " << i; + for (auto Idx : UniqCov) + OF << " " << Idx; + OF << "\n"; + } OF.flush(); } PrintStatsWrapper("DONE "); Index: compiler-rt/lib/fuzzer/FuzzerTracePC.cpp =================================================================== --- compiler-rt/lib/fuzzer/FuzzerTracePC.cpp +++ compiler-rt/lib/fuzzer/FuzzerTracePC.cpp @@ -500,7 +500,7 @@ size_t TracePC::StackDepthStepFunction(uintptr_t A) { if (!A) return A; - uint32_t Log2 = Log(A); + auto Log2 = Log(A); if (Log2 < 3) return A; Log2 -= 3; Index: compiler-rt/lib/fuzzer/tests/FuzzerUnittest.cpp =================================================================== --- compiler-rt/lib/fuzzer/tests/FuzzerUnittest.cpp +++ compiler-rt/lib/fuzzer/tests/FuzzerUnittest.cpp @@ -651,8 +651,7 @@ template void VerifyFeatures(FakeModule &M, bool UseCounters) { ModuleInfo Info; - // Each of the given features should appear in sorted order and map back to - // this module... + // Features must always be returned sorted. Vector AllFeatures; uint32_t Last = 0; TPC.CollectFeatures([&](size_t Feature) { @@ -742,23 +741,30 @@ const char *kInvalidInputs[] = { // Bad file numbers "", - "x", + "BAD", "0\n0", - "3\nx", + "3\nBAD", "2\n3", "2\n2", // Bad file names "2\n2\nA\n", "2\n2\nA\nB\nC\n", + // Unexpected markers + "1\n1\nA\nFT 0", + "1\n1\nA\nCOV 0", // Unknown markers - "2\n1\nA\nSTARTED 0\nBAD 0 0x0", + "1\n1\nA\nBAD 0", + "2\n1\nA\nB\nSTARTED 0\nBAD 0", // Bad file IDs "1\n1\nA\nSTARTED 1", "2\n1\nA\nSTARTED 0\nFT 1 0x0", + // Missing Hash + "2\n1\nA\nB\nSTARTED 0\nFT_REL 0\n", + "2\n1\nA\nB\nSTARTED 0\nCOV_REL 0\n", }; for (auto S : kInvalidInputs) { SCOPED_TRACE(S); - EXPECT_FALSE(M.Parse(S, false)); + EXPECT_FALSE(M.Parse(S, true)); } // Parse initial control file @@ -783,7 +789,7 @@ "STARTED 0 1000\n" "FT 0 1 2 3\n" "STARTED 1 1001\n" - "FT 1 4 5 6 \n" + "FT_REL 1 0xbeef 4 5 6 \n" "STARTED 2 1002\n" "", true)); @@ -798,7 +804,9 @@ EXPECT_EQ(M.LastFailure, "C"); EXPECT_EQ(M.FirstNotProcessedFile, 3U); TRACED_EQ(M.Files[0].Features, {1, 2, 3}); - TRACED_EQ(M.Files[1].Features, {4, 5, 6}); + TRACED_EQ(M.Files[0].FeaturesByHash[0xbeef], {}); + TRACED_EQ(M.Files[1].Features, {}); + TRACED_EQ(M.Files[1].FeaturesByHash[0xbeef], {4, 5, 6}); // Parse control file without features or PCs EXPECT_TRUE(M.Parse("2\n0\nAA\nBB\n" @@ -806,8 +814,8 @@ "FT 0\n" "COV 0\n" "STARTED 1 1001\n" - "FT 1\n" - "COV 1\n" + "FT_REL 1 0xcafe\n" + "COV_REL 1 0xcafe\n" "", true)); ASSERT_EQ(M.Files.size(), 2U); @@ -827,9 +835,11 @@ "STARTED 1 1001\n" "FT 1 4 5 6\n" "COV 1 7 8 9\n" + "FT_REL 1 0xcafe 14 15 16\n" + "COV_REL 1 0xcafe 17 18 19\n" "STARTED 2 1002\n" - "FT 2 6 1 3\n" - "COV 2 16 11 13\n" + "FT_REL 2 0xcafe 6 1 3\n" + "COV_REL 2 0xcafe 16 11 13\n" "", true)); ASSERT_EQ(M.Files.size(), 3U); @@ -839,9 +849,11 @@ TRACED_EQ(M.Files[0].Features, {1, 2, 3}); TRACED_EQ(M.Files[0].Cov, {11, 12, 13}); TRACED_EQ(M.Files[1].Features, {4, 5, 6}); + TRACED_EQ(M.Files[1].FeaturesByHash[0xcafe], {14, 15, 16}); TRACED_EQ(M.Files[1].Cov, {7, 8, 9}); - TRACED_EQ(M.Files[2].Features, {1, 3, 6}); - TRACED_EQ(M.Files[2].Cov, {16}); + TRACED_EQ(M.Files[1].CovByHash[0xcafe], {17, 18, 19}); + TRACED_EQ(M.Files[2].FeaturesByHash[0xcafe], {1, 3, 6}); + TRACED_EQ(M.Files[2].CovByHash[0xcafe], {11, 13, 16}); } TEST(Merger, Merge) { @@ -893,10 +905,12 @@ EXPECT_EQ(M.Merge(Features, &NewFeatures, Cov, &NewCov, &NewFiles), 0U); TRACED_EQ(M.Files, {"A", "B", "C"}); TRACED_EQ(NewFiles, {}); - TRACED_EQ(NewFeatures, {}); + EXPECT_TRUE(NewFeatures.empty()); // Can pass initial features and coverage. - Features = {1, 2, 3}; + Features.insert(1); + Features.insert(2); + Features.insert(3); Cov = {}; EXPECT_TRUE(M.Parse("2\n0\nA\nB\n" "STARTED 0 1000\n" @@ -959,6 +973,48 @@ TRACED_EQ(M.Files, {"A", "B", "C", "D"}); TRACED_EQ(NewFiles, {"D", "B"}); TRACED_EQ(NewFeatures, {1, 2, 3}); + + // Relative features for unknown modules are counted and used for determining + // file order, but are not returned. + static FakeModule<512> Module; + ModuleInfo Info; + EXPECT_FALSE(TPC.ModuleInfoByHash(Module.Hash(), &Info)); + std::ostringstream OSS; + OSS << "4\n1\nA\nB\nC\nD\n"; + OSS << "STARTED 0 2000\n"; + OSS << "FT 0 4 5 6 7 8\n"; + OSS << "STARTED 1 1100\n"; + OSS << "FT_REL 1 " << std::hex << Module.Hash() << std::dec << " 11 12 13 \n"; + OSS << "STARTED 2 1100\n"; + OSS << "FT_REL 2 " << std::hex << Module.Hash() << std::dec << " 12 13 \n"; + OSS << "STARTED 3 1000\n"; + OSS << "FT 3 1 \n"; + auto MCF = OSS.str(); + EXPECT_TRUE(M.Parse(MCF, true)); + EXPECT_EQ(M.Merge(Features, &NewFeatures, Cov, &NewCov, &NewFiles), 4U); + TRACED_EQ(M.Files, {"A", "B", "C", "D"}); + TRACED_EQ(NewFiles, {"D", "B"}); + TRACED_EQ(NewFeatures, {1}); + + // Relative features for known modules are translated to absolute. The dummy + // module is used to ensure that the first feature is non-zero regardless of + // whether the tests are built with instrumentation of not. + static FakeModule<64> Dummy; + Dummy.AddToTPC(); + + Module.AddToTPC(); + EXPECT_TRUE(TPC.ModuleInfoByHash(Module.Hash(), &Info)); + EXPECT_NE(Info.FirstFeature, 0U); + + EXPECT_TRUE(M.Parse(MCF, true)); + EXPECT_EQ(M.Merge(Features, &NewFeatures, Cov, &NewCov, &NewFiles), 4U); + TRACED_EQ(M.Files, {"A", "B", "C", "D"}); + TRACED_EQ(NewFiles, {"D", "B"}); + Vector Expected({1}); + Expected.push_back(static_cast(Info.FirstFeature + 11)); + Expected.push_back(static_cast(Info.FirstFeature + 12)); + Expected.push_back(static_cast(Info.FirstFeature + 13)); + TRACED_EQ(NewFeatures, Expected); } #undef TRACED_EQ Index: compiler-rt/test/fuzzer/merge-control-file.test =================================================================== --- compiler-rt/test/fuzzer/merge-control-file.test +++ compiler-rt/test/fuzzer/merge-control-file.test @@ -32,9 +32,9 @@ RUN: rm -f %t/T1/*; cp %t/T0/* %t/T1 RUN: echo 3 > %t/MCF; echo 0 >> %t/MCF; echo %t/T1/1 >> %t/MCF; echo %t/T1/2 >> %t/MCF; echo %t/T1/3 >> %t/MCF RUN: echo STARTED 0 1 >> %t/MCF -RUN: echo FT 0 11 >> %t/MCF +RUN: echo FT 0 0xdeadbeef 11 >> %t/MCF RUN: echo STARTED 1 2 >> %t/MCF -RUN: echo FT 1 12 >> %t/MCF +RUN: echo FT 1 0xcafef00d 12 >> %t/MCF RUN: %run %t/T.exe -merge=1 %t/T1 %t/T2 -merge_control_file=%t/MCF 2>&1 | FileCheck %s --check-prefix=OK_2 OK_2: MERGE-OUTER: control file ok, 3 files total, first not processed file 2 OK_2: MERGE-OUTER: 3 new files with {{.*}} new features added @@ -42,10 +42,10 @@ RUN: rm -f %t/T1/*; cp %t/T0/* %t/T1 RUN: echo 3 > %t/MCF; echo 0 >> %t/MCF; echo %t/T1/1 >> %t/MCF; echo %t/T1/2 >> %t/MCF; echo %t/T1/3 >> %t/MCF RUN: echo STARTED 0 1 >> %t/MCF -RUN: echo FT 0 11 >> %t/MCF +RUN: echo FT 0 0xdeadbeef 11 >> %t/MCF RUN: echo STARTED 1 2 >> %t/MCF -RUN: echo FT 1 12 >> %t/MCF +RUN: echo FT 1 0xcafef00d 12 >> %t/MCF RUN: echo STARTED 2 2 >> %t/MCF -RUN: echo FT 2 13 >> %t/MCF +RUN: echo FT 2 0x12345678 13 >> %t/MCF RUN: %run %t/T.exe -merge=1 %t/T1 %t/T2 -merge_control_file=%t/MCF 2>&1 | FileCheck %s --check-prefix=OK_3 OK_3: MERGE-OUTER: nothing to do, merge has been completed before Index: compiler-rt/test/fuzzer/merge.test =================================================================== --- compiler-rt/test/fuzzer/merge.test +++ compiler-rt/test/fuzzer/merge.test @@ -52,7 +52,7 @@ RUN: cp %t/T0/* %t/T1/ RUN: echo 'FUZZER' > %t/T2/FUZZER RUN: %run %t-FullCoverageSetTest -merge=1 %t/T1 %t/T2 2>&1 | FileCheck %s --check-prefix=MERGE_WITH_CRASH -MERGE_WITH_CRASH: MERGE-OUTER: succesfull in 2 attempt(s) +MERGE_WITH_CRASH: MERGE-OUTER: successful in 2 attempt(s) MERGE_WITH_CRASH: MERGE-OUTER: 3 new files # Check that we actually limit the size with max_len @@ -61,5 +61,5 @@ RUN: %run %t-FullCoverageSetTest -merge=1 %t/T1 %t/T2 -max_len=5 2>&1 | FileCheck %s --check-prefix=MERGE_LEN5 RUN: not grep FUZZER %t/T1/* RUN: grep FUZZE %t/T1/* -MERGE_LEN5: MERGE-OUTER: succesfull in 1 attempt(s) +MERGE_LEN5: MERGE-OUTER: successful in 1 attempt(s)