Index: llvm/lib/Bitcode/Reader/MetadataLoader.cpp =================================================================== --- llvm/lib/Bitcode/Reader/MetadataLoader.cpp +++ llvm/lib/Bitcode/Reader/MetadataLoader.cpp @@ -768,13 +768,12 @@ unsigned ID, PlaceholderQueue &Placeholders) { assert(ID < (MDStringRef.size()) + GlobalMetadataBitPosIndex.size()); assert(ID >= MDStringRef.size() && "Unexpected lazy-loading of MDString"); -#ifndef NDEBUG // Lookup first if the metadata hasn't already been loaded. if (auto *MD = MetadataList.lookup(ID)) { auto *N = dyn_cast_or_null(MD); - assert(N && N->isTemporary() && "Lazy loading an already loaded metadata"); + if (!N->isTemporary()) + return; } -#endif SmallVector Record; StringRef Blob; IndexCursor.JumpToBit(GlobalMetadataBitPosIndex[ID - MDStringRef.size()]); @@ -827,8 +826,20 @@ auto getMD = [&](unsigned ID) -> Metadata * { if (ID < MDStringRef.size()) return lazyLoadOneMDString(ID); - if (!IsDistinct) - return MetadataList.getMetadataFwdRef(ID); + if (!IsDistinct) { + if (auto *MD = MetadataList.lookup(ID)) + return MD; + // Create a temporary, it is needed before recursing in case there are + // uniquing cycles. + auto *TempMD = MetadataList.getMetadataFwdRef(ID); + // If lazy-loading is enabled, we try recursively to load the operand + // instead of creating a temporary. + if (ID < (MDStringRef.size() + GlobalMetadataBitPosIndex.size())) { + lazyLoadOneMetadata(ID, Placeholders); + return MetadataList.lookup(ID); + } + return TempMD; + } if (auto *MD = MetadataList.getMetadataIfResolved(ID)) return MD; return &Placeholders.getPlaceholderOp(ID);