Index: ELF/Arch/X86_64.cpp =================================================================== --- ELF/Arch/X86_64.cpp +++ ELF/Arch/X86_64.cpp @@ -592,7 +592,7 @@ }; memcpy(Buf, Insn, sizeof(Insn)); - uint64_t Off = TargetInfo::getPltEntryOffset(Index); + uint64_t Off = getPltEntryOffset(Index); write32le(Buf + 3, GotPltEntryAddr - PltEntryAddr - 7); write32le(Buf + 8, -Off - 12 + 32); @@ -635,7 +635,7 @@ memcpy(Buf, Insn, sizeof(Insn)); write32le(Buf + 3, GotPltEntryAddr - PltEntryAddr - 7); - write32le(Buf + 8, -TargetInfo::getPltEntryOffset(Index) - 12); + write32le(Buf + 8, -getPltEntryOffset(Index) - 12); } template static TargetInfo *getTargetInfo() { Index: ELF/Relocations.cpp =================================================================== --- ELF/Relocations.cpp +++ ELF/Relocations.cpp @@ -943,7 +943,7 @@ if (!Sym.isInPlt()) addPltEntry(In.Plt, In.GotPlt, In.RelaPlt, Target->PltRel, Sym); if (!Sym.isDefined()) - replaceWithDefined(Sym, In.Plt, Sym.getPltOffset(), 0); + replaceWithDefined(Sym, In.Plt, getPltEntryOffset(Sym.PltIndex), 0); Sym.NeedsPltAddr = true; Sec.Relocations.push_back({Expr, Type, Offset, Addend, &Sym}); return; Index: ELF/Symbols.h =================================================================== --- ELF/Symbols.h +++ ELF/Symbols.h @@ -172,7 +172,6 @@ uint64_t getGotPltOffset() const; uint64_t getGotPltVA() const; uint64_t getPltVA() const; - uint64_t getPltOffset() const; uint64_t getPPC64LongBranchTableVA() const; uint64_t getPPC64LongBranchOffset() const; uint64_t getSize() const; Index: ELF/Symbols.cpp =================================================================== --- ELF/Symbols.cpp +++ ELF/Symbols.cpp @@ -144,17 +144,8 @@ } uint64_t Symbol::getPltVA() const { - if (this->IsInIplt) { - if (Config->ZRetpolineplt) - return In.Iplt->getVA() + Target->getPltEntryOffset(PltIndex); - return In.Iplt->getVA() + PltIndex * Target->PltEntrySize; - } - return In.Plt->getVA() + Target->getPltEntryOffset(PltIndex); -} - -uint64_t Symbol::getPltOffset() const { - assert(!this->IsInIplt); - return Target->getPltEntryOffset(PltIndex); + PltSection *Plt = IsInIplt ? In.Iplt : In.Plt; + return Plt->getVA() + Plt->HeaderSize + PltIndex * Target->PltEntrySize; } uint64_t Symbol::getPPC64LongBranchTableVA() const { Index: ELF/SyntheticSections.h =================================================================== --- ELF/SyntheticSections.h +++ ELF/SyntheticSections.h @@ -659,13 +659,13 @@ size_t getSize() const override; bool empty() const override { return Entries.empty(); } void addSymbols(); - template void addEntry(Symbol &Sym); + size_t HeaderSize; + private: unsigned getPltRelocOff() const; std::vector> Entries; - size_t HeaderSize; bool IsIplt; }; Index: ELF/Target.h =================================================================== --- ELF/Target.h +++ ELF/Target.h @@ -45,10 +45,6 @@ virtual void addPltHeaderSymbols(InputSection &IS) const {} virtual void addPltSymbols(InputSection &IS, uint64_t Off) const {} - unsigned getPltEntryOffset(unsigned Index) const { - return Index * PltEntrySize + PltHeaderSize; - } - // Returns true if a relocation only uses the low bits of a value such that // all those bits are in the same page. For example, if the relocation // only uses the low 12 bits in a system with 4k pages. If this is true, the @@ -201,6 +197,10 @@ ", " + Twine(Max).str() + "]" + Hint); } +inline unsigned getPltEntryOffset(unsigned Idx) { + return Target->PltHeaderSize + Target->PltEntrySize * Idx; +} + // Make sure that V can be represented as an N bit signed integer. inline void checkInt(uint8_t *Loc, int64_t V, int N, RelType Type) { if (V != llvm::SignExtend64(V, N))