Index: Simple.h =================================================================== --- Simple.h +++ Simple.h @@ -161,8 +161,8 @@ } -// ilist will lazily create a sentinal (so end() can return a node past the -// end of the list). We need this trait so that the sentinal is allocated +// ilist will lazily create a sentinel (so end() can return a node past the +// end of the list). We need this trait so that the sentinel is allocated // via the BumpPtrAllocator. namespace llvm { template<> @@ -170,15 +170,24 @@ ilist_sentinel_traits() : _allocator(nullptr) { } - void setAllocator(llvm::BumpPtrAllocator *alloc) { - _allocator = alloc; - } - lld::SimpleReference *createSentinel() const { + // absence of allocator generally is not an issue. That might be useful for + // ilists of SimpleReferences created as temporatily local objects. + // They dont need to be allocated via BumpPtrAllocator + if (!_allocator) { + return new lld::SimpleReference(); + } + return new (*_allocator) lld::SimpleReference(); } - static void destroySentinel(lld::SimpleReference*) {} + void destroySentinel(lld::SimpleReference *sentinel) { + // the same situation as described above in createSentinel() + // there is no BumpPtrAllocator allocator sometimes and its fine. + if (!_allocator) { + delete sentinel; + } + } static lld::SimpleReference *provideInitialHead() { return nullptr; } @@ -197,6 +206,11 @@ ilist_traits::setPrev(newHead, sentinel); } +protected: + void setAllocator(llvm::BumpPtrAllocator *alloc) { + _allocator = alloc; + } + private: mutable llvm::BumpPtrAllocator *_allocator; }; @@ -204,11 +218,20 @@ namespace lld { +class SimpleReferencesList : public llvm::ilist { +public: + SimpleReferencesList(llvm::BumpPtrAllocator *alloc) { + // SimpleReferencesList which is a part of SimpleDefinedAtom should + // always be created with BumpPtrAllocator + assert(alloc); + setAllocator(alloc); + } +}; + class SimpleDefinedAtom : public DefinedAtom { public: explicit SimpleDefinedAtom(const File &f) - : _file(f), _ordinal(f.getNextAtomOrdinalAndIncrement()) { - _references.setAllocator(&f.allocator()); + : _file(f), _ordinal(f.getNextAtomOrdinalAndIncrement()), _references(&f.allocator()) { } const File &file() const override { return _file; } @@ -267,33 +290,22 @@ /// Sort references in a canonical order (by offset, then by kind). void sortReferences() const { - // Cannot sort a linked list, so move elements into a temporary vector, - // sort the vector, then reconstruct the list. - llvm::SmallVector elements; - for (SimpleReference &node : _references) { - elements.push_back(&node); - } - std::sort(elements.begin(), elements.end(), - [] (const SimpleReference *lhs, const SimpleReference *rhs) -> bool { - uint64_t lhsOffset = lhs->offsetInAtom(); - uint64_t rhsOffset = rhs->offsetInAtom(); - if (rhsOffset != lhsOffset) - return (lhsOffset < rhsOffset); - if (rhs->kindNamespace() != lhs->kindNamespace()) - return (lhs->kindNamespace() < rhs->kindNamespace()); - if (rhs->kindArch() != lhs->kindArch()) - return (lhs->kindArch() < rhs->kindArch()); - return (lhs->kindValue() < rhs->kindValue()); - }); - _references.clearAndLeakNodesUnsafely(); - for (SimpleReference *node : elements) { - _references.push_back(node); - } + _references.sort([](const SimpleReference &lhs, const SimpleReference &rhs) -> bool { + uint64_t lhsOffset = lhs.offsetInAtom(); + uint64_t rhsOffset = rhs.offsetInAtom(); + if (rhsOffset != lhsOffset) + return (lhsOffset < rhsOffset); + if (rhs.kindNamespace() != lhs.kindNamespace()) + return (lhs.kindNamespace() < rhs.kindNamespace()); + if (rhs.kindArch() != lhs.kindArch()) + return (lhs.kindArch() < rhs.kindArch()); + return (lhs.kindValue() < rhs.kindValue()); + }); } void setOrdinal(uint64_t ord) { _ordinal = ord; } private: - typedef llvm::ilist RefList; + typedef SimpleReferencesList RefList; const File &_file; uint64_t _ordinal;