The change in https://github.com/llvm/llvm-project/commit/1ae7d83803e45f6053ec6a606f259653846926b8 makes rustc unable to read the profiles that `llvm-profdata merge` outputs, further causing some problems (e.g. bug 1811960). diff --git a/llvm/lib/ProfileData/InstrProfWriter.cpp b/llvm/lib/ProfileData/InstrProfWriter.cpp index af3c27ebac76..a6da1e0f3aec 100644 --- a/llvm/lib/ProfileData/InstrProfWriter.cpp +++ b/llvm/lib/ProfileData/InstrProfWriter.cpp @@ -291,10 +291,6 @@ void InstrProfWriter::mergeRecordsFromWriter(InstrProfWriter &&IPW, for (auto &Func : I.getValue()) addRecord(I.getKey(), Func.first, std::move(Func.second), 1, Warn); - BinaryIds.reserve(BinaryIds.size() + IPW.BinaryIds.size()); - for (auto &I : IPW.BinaryIds) - addBinaryIds(I); - MemProfFrameData.reserve(IPW.MemProfFrameData.size()); for (auto &I : IPW.MemProfFrameData) { // If we weren't able to add the frame mappings then it doesn't make sense @@ -339,7 +335,6 @@ static void setSummary(IndexedInstrProf::Summary *TheSummary, Error InstrProfWriter::writeImpl(ProfOStream &OS) { using namespace IndexedInstrProf; - using namespace support; OnDiskChainedHashTableGenerator Generator; @@ -356,7 +351,7 @@ Error InstrProfWriter::writeImpl(ProfOStream &OS) { // Write the header. IndexedInstrProf::Header Header; Header.Magic = IndexedInstrProf::Magic; - Header.Version = IndexedInstrProf::ProfVersion::Version9; + Header.Version = IndexedInstrProf::ProfVersion::Version8; if (static_cast(ProfileKind & InstrProfKind::IRInstrumentation)) Header.Version |= VARIANT_MASK_IR_PROF; if (static_cast(ProfileKind & InstrProfKind::ContextSensitive)) @@ -396,12 +389,6 @@ Error InstrProfWriter::writeImpl(ProfOStream &OS) { // profile contains memory profile information. OS.write(0); - // Save the location of binary ids section. - uint64_t BinaryIdSectionOffset = OS.tell(); - // Reserve space for the BinaryIdOffset field to be patched later if this - // profile contains binary ids. - OS.write(0); - // Reserve space to write profile summary data. uint32_t NumEntries = ProfileSummaryBuilder::DefaultCutoffs.size(); uint32_t SummarySize = Summary::getSize(Summary::NumKinds, NumEntries); @@ -478,43 +465,6 @@ Error InstrProfWriter::writeImpl(ProfOStream &OS) { OS.patch(PatchItems, 3); } - // BinaryIdSection has two parts: - // 1. uint64_t BinaryIdsSectionSize - // 2. list of binary ids that consist of: - // a. uint64_t BinaryIdLength - // b. uint8_t BinaryIdData - // c. uint8_t Padding (if necessary) - uint64_t BinaryIdSectionStart = OS.tell(); - // Calculate size of binary section. - uint64_t BinaryIdsSectionSize = 0; - - // Remove duplicate binary ids. - llvm::sort(BinaryIds); - BinaryIds.erase(std::unique(BinaryIds.begin(), BinaryIds.end()), - BinaryIds.end()); - - for (auto BI : BinaryIds) { - // Increment by binary id length data type size. - BinaryIdsSectionSize += sizeof(uint64_t); - // Increment by binary id data length, aligned to 8 bytes. - BinaryIdsSectionSize += alignToPowerOf2(BI.size(), sizeof(uint64_t)); - } - // Write binary ids section size. - OS.write(BinaryIdsSectionSize); - - for (auto BI : BinaryIds) { - uint64_t BILen = BI.size(); - // Write binary id length. - OS.write(BILen); - // Write binary id data. - for (unsigned K = 0; K < BILen; K++) - OS.writeByte(BI[K]); - // Write padding if necessary. - uint64_t PaddingSize = alignToPowerOf2(BILen, sizeof(uint64_t)) - BILen; - for (unsigned K = 0; K < PaddingSize; K++) - OS.writeByte(0); - } - // Allocate space for data to be serialized out. std::unique_ptr TheSummary = IndexedInstrProf::allocSummary(SummarySize); @@ -537,11 +487,8 @@ Error InstrProfWriter::writeImpl(ProfOStream &OS) { PatchItem PatchItems[] = { // Patch the Header.HashOffset field. {HashTableStartFieldOffset, &HashTableStart, 1}, - // Patch the Header.MemProfOffset (=0 for profiles without MemProf - // data). + // Patch the Header.MemProfOffset (=0 for profiles without MemProf data). {MemProfSectionOffset, &MemProfSectionStart, 1}, - // Patch the Header.BinaryIdSectionOffset. - {BinaryIdSectionOffset, &BinaryIdSectionStart, 1}, // Patch the summary data. {SummaryOffset, reinterpret_cast(TheSummary.get()), (int)(SummarySize / sizeof(uint64_t))},