29#define DEBUG_TYPE "dyld"
34 or32le(L, (Imm & 0xFFF) << 10);
37template <
class T>
static void write(
bool isBE,
void *
P,
T V) {
38 isBE ? write<T, llvm::endianness::big>(
P, V)
39 : write<T, llvm::endianness::little>(
P, V);
44 uint32_t ImmHi = (Imm & 0x1FFFFC) << 3;
45 uint64_t Mask = (0x3 << 29) | (0x1FFFFC << 3);
53 return (Val >> Start) & Mask;
58template <
class ELFT>
class DyldELFObject :
public ELFObjectFile<ELFT> {
61 typedef typename ELFT::uint addr_type;
79 return v->isDyldType();
91 this->isDyldELFObject =
true;
98 if (
auto E = Obj.takeError())
100 std::unique_ptr<DyldELFObject<ELFT>>
Ret(
101 new DyldELFObject<ELFT>(std::move(*Obj)));
102 return std::move(Ret);
106void DyldELFObject<ELFT>::updateSectionAddress(
const SectionRef &Sec,
110 const_cast<Elf_Shdr *
>(
reinterpret_cast<const Elf_Shdr *
>(ShdrRef.
p));
114 shdr->sh_addr =
static_cast<addr_type
>(
Addr);
118void DyldELFObject<ELFT>::updateSymbolAddress(
const SymbolRef &SymRef,
121 Elf_Sym *sym =
const_cast<Elf_Sym *
>(
126 sym->st_value =
static_cast<addr_type
>(
Addr);
129class LoadedELFObjectInfo final
131 RuntimeDyld::LoadedObjectInfo> {
133 LoadedELFObjectInfo(
RuntimeDyldImpl &RTDyld, ObjSectionToIDMap ObjSecToIDMap)
137 getObjectForDebug(
const ObjectFile &Obj)
const override;
140template <
typename ELFT>
143 const LoadedELFObjectInfo &L) {
144 typedef typename ELFT::Shdr Elf_Shdr;
145 typedef typename ELFT::uint addr_type;
148 DyldELFObject<ELFT>::create(Buffer);
152 std::unique_ptr<DyldELFObject<ELFT>> Obj = std::move(*ObjOrErr);
156 for (
const auto &Sec : Obj->sections()) {
163 if (*NameOrErr !=
"") {
165 Elf_Shdr *shdr =
const_cast<Elf_Shdr *
>(
166 reinterpret_cast<const Elf_Shdr *
>(ShdrRef.
p));
168 if (
uint64_t SecLoadAddr =
L.getSectionLoadAddress(*SI)) {
171 shdr->sh_addr =
static_cast<addr_type
>(SecLoadAddr);
177 return std::move(Obj);
181createELFDebugObject(
const ObjectFile &Obj,
const LoadedELFObjectInfo &L) {
184 std::unique_ptr<MemoryBuffer> Buffer =
191 createRTDyldELFObject<ELF32LE>(Buffer->getMemBufferRef(), Obj, L);
194 createRTDyldELFObject<ELF32BE>(Buffer->getMemBufferRef(), Obj, L);
197 createRTDyldELFObject<ELF64BE>(Buffer->getMemBufferRef(), Obj, L);
200 createRTDyldELFObject<ELF64LE>(Buffer->getMemBufferRef(), Obj, L);
209LoadedELFObjectInfo::getObjectForDebug(
const ObjectFile &Obj)
const {
210 return createELFDebugObject(Obj, *
this);
223 for (
int i = 0, e = UnregisteredEHFrameSections.
size(); i != e; ++i) {
224 SID EHFrameSID = UnregisteredEHFrameSections[i];
225 uint8_t *EHFrameAddr =
Sections[EHFrameSID].getAddress();
227 size_t EHFrameSize =
Sections[EHFrameSID].getSize();
230 UnregisteredEHFrameSections.
clear();
233std::unique_ptr<RuntimeDyldELF>
248std::unique_ptr<RuntimeDyld::LoadedObjectInfo>
251 return std::make_unique<LoadedELFObjectInfo>(*
this, *ObjSectionToIDOrErr);
260void RuntimeDyldELF::resolveX86_64Relocation(
const SectionEntry &Section,
268 case ELF::R_X86_64_NONE:
270 case ELF::R_X86_64_8: {
273 uint8_t TruncatedAddr = (
Value & 0xFF);
274 *Section.getAddressWithOffset(
Offset) = TruncatedAddr;
276 <<
format(
"%p\n", Section.getAddressWithOffset(
Offset)));
279 case ELF::R_X86_64_16: {
286 <<
format(
"%p\n", Section.getAddressWithOffset(
Offset)));
289 case ELF::R_X86_64_64: {
296 case ELF::R_X86_64_32:
297 case ELF::R_X86_64_32S: {
300 (
Type == ELF::R_X86_64_32S &&
301 ((int64_t)
Value <= INT32_MAX && (int64_t)
Value >= INT32_MIN)));
309 case ELF::R_X86_64_PC8: {
311 int64_t RealOffset =
Value + Addend - FinalAddress;
312 assert(isInt<8>(RealOffset));
313 int8_t TruncOffset = (RealOffset & 0xFF);
317 case ELF::R_X86_64_PC32: {
319 int64_t RealOffset =
Value + Addend - FinalAddress;
320 assert(isInt<32>(RealOffset));
321 int32_t TruncOffset = (RealOffset & 0xFFFFFFFF);
326 case ELF::R_X86_64_PC64: {
328 int64_t RealOffset =
Value + Addend - FinalAddress;
332 <<
format(
"%p\n", FinalAddress));
335 case ELF::R_X86_64_GOTOFF64: {
338 for (
const auto &Section :
Sections) {
339 if (
Section.getName() ==
".got") {
340 GOTBase =
Section.getLoadAddressWithOffset(0);
344 assert(GOTBase != 0 &&
"missing GOT");
345 int64_t GOTOffset =
Value - GOTBase + Addend;
349 case ELF::R_X86_64_DTPMOD64: {
354 case ELF::R_X86_64_DTPOFF64:
355 case ELF::R_X86_64_TPOFF64: {
364 case ELF::R_X86_64_DTPOFF32:
365 case ELF::R_X86_64_TPOFF32: {
368 int64_t RealValue =
Value + Addend;
369 assert(RealValue >= INT32_MIN && RealValue <= INT32_MAX);
370 int32_t TruncValue = RealValue;
378void RuntimeDyldELF::resolveX86Relocation(
const SectionEntry &Section,
382 case ELF::R_386_32: {
389 case ELF::R_386_PLT32:
390 case ELF::R_386_PC32: {
406void RuntimeDyldELF::resolveAArch64Relocation(
const SectionEntry &Section,
417 <<
" FinalAddress: 0x" <<
format(
"%llx", FinalAddress)
418 <<
" Value: 0x" <<
format(
"%llx",
Value) <<
" Type: 0x"
420 <<
format(
"%llx", Addend) <<
"\n");
426 case ELF::R_AARCH64_NONE:
428 case ELF::R_AARCH64_ABS16: {
431 (Result >> 16) == 0);
432 write(isBE, TargetPtr,
static_cast<uint16_t>(Result & 0xffffU));
435 case ELF::R_AARCH64_ABS32: {
438 (Result >> 32) == 0);
439 write(isBE, TargetPtr,
static_cast<uint32_t>(Result & 0xffffffffU));
442 case ELF::R_AARCH64_ABS64:
445 case ELF::R_AARCH64_PLT32: {
447 assert(
static_cast<int64_t
>(Result) >= INT32_MIN &&
448 static_cast<int64_t
>(Result) <= INT32_MAX);
452 case ELF::R_AARCH64_PREL16: {
454 assert(
static_cast<int64_t
>(Result) >= INT16_MIN &&
455 static_cast<int64_t
>(Result) <= UINT16_MAX);
456 write(isBE, TargetPtr,
static_cast<uint16_t>(Result & 0xffffU));
459 case ELF::R_AARCH64_PREL32: {
461 assert(
static_cast<int64_t
>(Result) >= INT32_MIN &&
462 static_cast<int64_t
>(Result) <= UINT32_MAX);
463 write(isBE, TargetPtr,
static_cast<uint32_t>(Result & 0xffffffffU));
466 case ELF::R_AARCH64_PREL64:
467 write(isBE, TargetPtr,
Value + Addend - FinalAddress);
469 case ELF::R_AARCH64_CONDBR19: {
472 assert(isInt<21>(BranchImm));
473 *TargetPtr &= 0xff00001fU;
475 or32le(TargetPtr, (BranchImm & 0x001FFFFC) << 3);
478 case ELF::R_AARCH64_TSTBR14: {
481 assert(isInt<16>(BranchImm));
487 or32le(TargetPtr, (BranchImm & 0x0000FFFC) << 3);
490 case ELF::R_AARCH64_CALL26:
491 case ELF::R_AARCH64_JUMP26: {
497 assert(isInt<28>(BranchImm));
498 or32le(TargetPtr, (BranchImm & 0x0FFFFFFC) >> 2);
501 case ELF::R_AARCH64_MOVW_UABS_G3:
502 or32le(TargetPtr, ((
Value + Addend) & 0xFFFF000000000000) >> 43);
504 case ELF::R_AARCH64_MOVW_UABS_G2_NC:
505 or32le(TargetPtr, ((
Value + Addend) & 0xFFFF00000000) >> 27);
507 case ELF::R_AARCH64_MOVW_UABS_G1_NC:
508 or32le(TargetPtr, ((
Value + Addend) & 0xFFFF0000) >> 11);
510 case ELF::R_AARCH64_MOVW_UABS_G0_NC:
511 or32le(TargetPtr, ((
Value + Addend) & 0xFFFF) << 5);
513 case ELF::R_AARCH64_ADR_PREL_PG_HI21: {
516 ((
Value + Addend) & ~0xfffULL) - (FinalAddress & ~0xfffULL);
519 assert(isInt<33>(Result) &&
"overflow check failed for relocation");
526 case ELF::R_AARCH64_ADD_ABS_LO12_NC:
532 case ELF::R_AARCH64_LDST8_ABS_LO12_NC:
538 case ELF::R_AARCH64_LDST16_ABS_LO12_NC:
544 case ELF::R_AARCH64_LDST32_ABS_LO12_NC:
550 case ELF::R_AARCH64_LDST64_ABS_LO12_NC:
556 case ELF::R_AARCH64_LDST128_ABS_LO12_NC:
562 case ELF::R_AARCH64_LD_PREL_LO19: {
567 assert(isInt<21>(Result));
569 *TargetPtr &= 0xff00001fU;
572 *TargetPtr |= ((
Result & 0xffc) << (5 - 2));
575 case ELF::R_AARCH64_ADR_PREL_LO21: {
580 assert(isInt<21>(Result));
582 *TargetPtr &= 0x9f00001fU;
585 *TargetPtr |= ((
Result & 0xffc) << (5 - 2));
586 *TargetPtr |= (
Result & 0x3) << 29;
592void RuntimeDyldELF::resolveARMRelocation(
const SectionEntry &Section,
603 <<
" FinalAddress: " <<
format(
"%p", FinalAddress)
606 <<
" Addend: " <<
format(
"%x", Addend) <<
"\n");
612 case ELF::R_ARM_NONE:
615 case ELF::R_ARM_PREL31:
618 ((
Value - FinalAddress) & ~0x80000000);
620 case ELF::R_ARM_TARGET1:
621 case ELF::R_ARM_ABS32:
626 case ELF::R_ARM_MOVW_ABS_NC:
627 case ELF::R_ARM_MOVT_ABS:
628 if (
Type == ELF::R_ARM_MOVW_ABS_NC)
630 else if (
Type == ELF::R_ARM_MOVT_ABS)
634 (((
Value >> 12) & 0xF) << 16);
637 case ELF::R_ARM_PC24:
638 case ELF::R_ARM_CALL:
639 case ELF::R_ARM_JUMP24:
640 int32_t RelValue =
static_cast<int32_t
>(
Value - FinalAddress - 8);
641 RelValue = (RelValue & 0x03FFFFFC) >> 2;
649void RuntimeDyldELF::setMipsABI(
const ObjectFile &Obj) {
657 if (
auto *E = dyn_cast<ELFObjectFileBase>(&Obj)) {
658 unsigned AbiVariant = E->getPlatformFlags();
667 ObjSectionToIDMap &LocalSections,
679 for (
auto &Section : Obj.
sections()) {
689 if (
auto SectionIDOrErr =
693 return SectionIDOrErr.takeError();
708 ObjSectionToIDMap &LocalSections,
728 if (RelSectionName !=
".opd")
732 e = si->relocation_end();
737 if (TypeFunc != ELF::R_PPC64_ADDR64) {
742 uint64_t TargetSymbolOffset = i->getOffset();
745 if (
auto AddendOrErr = i->getAddend())
746 Addend = *AddendOrErr;
748 return AddendOrErr.takeError();
756 if (TypeTOC != ELF::R_PPC64_TOC)
762 if (Rel.
Addend != (int64_t)TargetSymbolOffset)
766 if (
auto TSIOrErr = TargetSymbol->
getSection())
769 return TSIOrErr.takeError();
772 bool IsCode = TSI->
isText();
777 return SectionIDOrErr.takeError();
778 Rel.
Addend = (intptr_t)Addend;
793 return (
value >> 16) & 0xffff;
797 return ((
value + 0x8000) >> 16) & 0xffff;
801 return (
value >> 32) & 0xffff;
805 return ((
value + 0x8000) >> 32) & 0xffff;
809 return (
value >> 48) & 0xffff;
813 return ((
value + 0x8000) >> 48) & 0xffff;
816void RuntimeDyldELF::resolvePPC32Relocation(
const SectionEntry &Section,
819 uint8_t *LocalAddress = Section.getAddressWithOffset(
Offset);
824 case ELF::R_PPC_ADDR16_LO:
827 case ELF::R_PPC_ADDR16_HI:
830 case ELF::R_PPC_ADDR16_HA:
836void RuntimeDyldELF::resolvePPC64Relocation(
const SectionEntry &Section,
839 uint8_t *LocalAddress =
Section.getAddressWithOffset(
Offset);
844 case ELF::R_PPC64_ADDR16:
847 case ELF::R_PPC64_ADDR16_DS:
850 case ELF::R_PPC64_ADDR16_LO:
853 case ELF::R_PPC64_ADDR16_LO_DS:
856 case ELF::R_PPC64_ADDR16_HI:
857 case ELF::R_PPC64_ADDR16_HIGH:
860 case ELF::R_PPC64_ADDR16_HA:
861 case ELF::R_PPC64_ADDR16_HIGHA:
864 case ELF::R_PPC64_ADDR16_HIGHER:
867 case ELF::R_PPC64_ADDR16_HIGHERA:
870 case ELF::R_PPC64_ADDR16_HIGHEST:
873 case ELF::R_PPC64_ADDR16_HIGHESTA:
876 case ELF::R_PPC64_ADDR14: {
879 uint8_t aalk = *(LocalAddress + 3);
882 case ELF::R_PPC64_REL16_LO: {
887 case ELF::R_PPC64_REL16_HI: {
892 case ELF::R_PPC64_REL16_HA: {
897 case ELF::R_PPC64_ADDR32: {
898 int64_t
Result =
static_cast<int64_t
>(
Value + Addend);
899 if (SignExtend64<32>(Result) !=
Result)
903 case ELF::R_PPC64_REL24: {
905 int64_t delta =
static_cast<int64_t
>(
Value - FinalAddress + Addend);
906 if (SignExtend64<26>(delta) != delta)
910 writeInt32BE(LocalAddress, (Inst & 0xFC000003) | (delta & 0x03FFFFFC));
912 case ELF::R_PPC64_REL32: {
914 int64_t delta =
static_cast<int64_t
>(
Value - FinalAddress + Addend);
915 if (SignExtend64<32>(delta) != delta)
919 case ELF::R_PPC64_REL64: {
924 case ELF::R_PPC64_ADDR64:
930void RuntimeDyldELF::resolveSystemZRelocation(
const SectionEntry &Section,
933 uint8_t *LocalAddress =
Section.getAddressWithOffset(
Offset);
938 case ELF::R_390_PC16DBL:
939 case ELF::R_390_PLT16DBL: {
941 assert(int16_t(Delta / 2) * 2 == Delta &&
"R_390_PC16DBL overflow");
945 case ELF::R_390_PC32DBL:
946 case ELF::R_390_PLT32DBL: {
948 assert(int32_t(Delta / 2) * 2 == Delta &&
"R_390_PC32DBL overflow");
952 case ELF::R_390_PC16: {
954 assert(int16_t(Delta) == Delta &&
"R_390_PC16 overflow");
958 case ELF::R_390_PC32: {
960 assert(int32_t(Delta) == Delta &&
"R_390_PC32 overflow");
964 case ELF::R_390_PC64: {
970 *LocalAddress = (uint8_t)(
Value + Addend);
984void RuntimeDyldELF::resolveBPFRelocation(
const SectionEntry &Section,
993 case ELF::R_BPF_NONE:
994 case ELF::R_BPF_64_64:
995 case ELF::R_BPF_64_32:
996 case ELF::R_BPF_64_NODYLD32:
998 case ELF::R_BPF_64_ABS64: {
1004 case ELF::R_BPF_64_ABS32: {
1042void RuntimeDyldELF::resolveRelocation(
const SectionEntry &Section,
1045 uint64_t SymOffset, SID SectionID) {
1048 resolveX86_64Relocation(Section,
Offset,
Value,
Type, Addend, SymOffset);
1085void *RuntimeDyldELF::computePlaceholderAddress(
unsigned SectionID,
uint64_t Offset)
const {
1091 if (
Value.SymbolName)
1098 bool IsLocal)
const {
1100 case ELF::R_MICROMIPS_GOT16:
1102 return ELF::R_MICROMIPS_LO16;
1104 case ELF::R_MICROMIPS_HI16:
1105 return ELF::R_MICROMIPS_LO16;
1106 case ELF::R_MIPS_GOT16:
1108 return ELF::R_MIPS_LO16;
1110 case ELF::R_MIPS_HI16:
1111 return ELF::R_MIPS_LO16;
1112 case ELF::R_MIPS_PCHI16:
1113 return ELF::R_MIPS_PCLO16;
1117 return ELF::R_MIPS_NONE;
1129bool RuntimeDyldELF::resolveAArch64ShortBranch(
1133 unsigned TargetSectionID;
1134 if (
Value.SymbolName) {
1141 const auto &
SymInfo = Loc->second;
1143 TargetSectionID =
SymInfo.getSectionID();
1144 TargetOffset =
SymInfo.getOffset();
1146 TargetSectionID =
Value.SectionID;
1152 if (TargetSectionID != SectionID)
1155 uint64_t SourceOffset = RelI->getOffset();
1160 if (!isInt<28>(TargetOffset +
Value.Addend - SourceOffset))
1164 if (
Value.SymbolName)
1172void RuntimeDyldELF::resolveAArch64Branch(
unsigned SectionID,
1177 LLVM_DEBUG(
dbgs() <<
"\t\tThis is an AArch64 branch relocation.");
1181 unsigned RelType = RelI->getType();
1183 StubMap::const_iterator i = Stubs.find(
Value);
1184 if (i != Stubs.end()) {
1185 resolveRelocation(Section,
Offset,
1186 Section.getLoadAddressWithOffset(i->second), RelType, 0);
1188 }
else if (!resolveAArch64ShortBranch(SectionID, RelI,
Value)) {
1196 ELF::R_AARCH64_MOVW_UABS_G3,
Value.Addend);
1198 StubTargetAddr -
Section.getAddress() + 4,
1199 ELF::R_AARCH64_MOVW_UABS_G2_NC,
Value.Addend);
1201 StubTargetAddr -
Section.getAddress() + 8,
1202 ELF::R_AARCH64_MOVW_UABS_G1_NC,
Value.Addend);
1204 StubTargetAddr -
Section.getAddress() + 12,
1205 ELF::R_AARCH64_MOVW_UABS_G0_NC,
Value.Addend);
1207 if (
Value.SymbolName) {
1218 resolveRelocation(Section,
Offset,
1221 Section.advanceStubOffset(getMaxStubSize());
1229 const auto &Obj = cast<ELFObjectFileBase>(O);
1230 uint64_t RelType = RelI->getType();
1233 Addend = *AddendOrErr;
1241 if (
auto TargetNameOrErr = Symbol->getName())
1242 TargetName = *TargetNameOrErr;
1244 return TargetNameOrErr.takeError();
1246 LLVM_DEBUG(
dbgs() <<
"\t\tRelType: " << RelType <<
" Addend: " << Addend
1247 <<
" TargetName: " << TargetName <<
"\n");
1257 if (!SymTypeOrErr) {
1263 SymType = *SymTypeOrErr;
1266 const auto &
SymInfo = gsi->second;
1276 auto SectionOrErr = Symbol->getSection();
1277 if (!SectionOrErr) {
1287 bool isCode = si->
isText();
1290 Value.SectionID = *SectionIDOrErr;
1292 return SectionIDOrErr.takeError();
1293 Value.Addend = Addend;
1301 Value.Addend = Addend;
1307 if (!
Value.SymbolName)
1308 Value.SymbolName =
"";
1322 if ((RelType == ELF::R_AARCH64_CALL26 ||
1323 RelType == ELF::R_AARCH64_JUMP26) &&
1325 resolveAArch64Branch(SectionID,
Value, RelI, Stubs);
1326 }
else if (RelType == ELF::R_AARCH64_ADR_GOT_PAGE) {
1329 uint64_t GOTOffset = findOrAllocGOTEntry(
Value, ELF::R_AARCH64_ABS64);
1330 resolveGOTOffsetRelocation(SectionID,
Offset, GOTOffset + Addend,
1331 ELF::R_AARCH64_ADR_PREL_PG_HI21);
1333 }
else if (RelType == ELF::R_AARCH64_LD64_GOT_LO12_NC) {
1334 uint64_t GOTOffset = findOrAllocGOTEntry(
Value, ELF::R_AARCH64_ABS64);
1335 resolveGOTOffsetRelocation(SectionID,
Offset, GOTOffset + Addend,
1336 ELF::R_AARCH64_LDST64_ABS_LO12_NC);
1338 processSimpleRelocation(SectionID,
Offset, RelType,
Value);
1341 if (RelType == ELF::R_ARM_PC24 || RelType == ELF::R_ARM_CALL ||
1342 RelType == ELF::R_ARM_JUMP24) {
1348 StubMap::const_iterator i = Stubs.find(
Value);
1349 if (i != Stubs.end()) {
1350 resolveRelocation(Section,
Offset,
1351 Section.getLoadAddressWithOffset(i->second), RelType,
1357 Stubs[
Value] = Section.getStubOffset();
1359 Section.getAddressWithOffset(Section.getStubOffset()));
1361 ELF::R_ARM_ABS32,
Value.Addend);
1362 if (
Value.SymbolName)
1369 Section.getLoadAddressWithOffset(Section.getStubOffset()), RelType,
1371 Section.advanceStubOffset(getMaxStubSize());
1375 reinterpret_cast<uint32_t*
>(computePlaceholderAddress(SectionID,
Offset));
1376 if (RelType == ELF::R_ARM_PREL31 || RelType == ELF::R_ARM_TARGET1 ||
1377 RelType == ELF::R_ARM_ABS32) {
1378 Value.Addend += *Placeholder;
1379 }
else if (RelType == ELF::R_ARM_MOVW_ABS_NC || RelType == ELF::R_ARM_MOVT_ABS) {
1381 Value.Addend += (int16_t)((*Placeholder & 0xFFF) | (((*Placeholder >> 16) & 0xF) << 12));
1383 processSimpleRelocation(SectionID,
Offset, RelType,
Value);
1386 uint8_t *Placeholder =
reinterpret_cast<uint8_t *
>(
1387 computePlaceholderAddress(SectionID,
Offset));
1389 if (RelType == ELF::R_MIPS_26) {
1397 uint32_t Addend = (Opcode & 0x03ffffff) << 2;
1399 Value.Addend += Addend;
1402 StubMap::const_iterator i = Stubs.find(
Value);
1403 if (i != Stubs.end()) {
1410 Stubs[
Value] = Section.getStubOffset();
1415 Section.getAddressWithOffset(Section.getStubOffset()), AbiVariant);
1418 RelocationEntry REHi(SectionID, StubTargetAddr - Section.getAddress(),
1419 ELF::R_MIPS_HI16,
Value.Addend);
1421 StubTargetAddr - Section.getAddress() + 4,
1422 ELF::R_MIPS_LO16,
Value.Addend);
1424 if (
Value.SymbolName) {
1434 Section.advanceStubOffset(getMaxStubSize());
1436 }
else if (RelType == ELF::R_MIPS_HI16 || RelType == ELF::R_MIPS_PCHI16) {
1437 int64_t Addend = (Opcode & 0x0000ffff) << 16;
1439 PendingRelocs.push_back(std::make_pair(
Value, RE));
1440 }
else if (RelType == ELF::R_MIPS_LO16 || RelType == ELF::R_MIPS_PCLO16) {
1441 int64_t Addend =
Value.Addend + SignExtend32<16>(Opcode & 0x0000ffff);
1442 for (
auto I = PendingRelocs.begin();
I != PendingRelocs.end();) {
1445 if (MatchingValue ==
Value &&
1446 RelType == getMatchingLoRelocation(Reloc.
RelType) &&
1449 if (
Value.SymbolName)
1453 I = PendingRelocs.erase(
I);
1458 if (
Value.SymbolName)
1463 if (RelType == ELF::R_MIPS_32)
1464 Value.Addend += Opcode;
1465 else if (RelType == ELF::R_MIPS_PC16)
1466 Value.Addend += SignExtend32<18>((Opcode & 0x0000ffff) << 2);
1467 else if (RelType == ELF::R_MIPS_PC19_S2)
1468 Value.Addend += SignExtend32<21>((Opcode & 0x0007ffff) << 2);
1469 else if (RelType == ELF::R_MIPS_PC21_S2)
1470 Value.Addend += SignExtend32<23>((Opcode & 0x001fffff) << 2);
1471 else if (RelType == ELF::R_MIPS_PC26_S2)
1472 Value.Addend += SignExtend32<28>((Opcode & 0x03ffffff) << 2);
1473 processSimpleRelocation(SectionID,
Offset, RelType,
Value);
1478 if (r_type == ELF::R_MIPS_CALL16 || r_type == ELF::R_MIPS_GOT_PAGE
1479 || r_type == ELF::R_MIPS_GOT_DISP) {
1481 if (i != GOTSymbolOffsets.
end())
1485 GOTSymbolOffsets[TargetName] = RE.
SymOffset;
1487 if (
Value.SymbolName)
1491 }
else if (RelType == ELF::R_MIPS_26) {
1497 StubMap::const_iterator i = Stubs.find(
Value);
1498 if (i != Stubs.end()) {
1505 Stubs[
Value] = Section.getStubOffset();
1510 Section.getAddressWithOffset(Section.getStubOffset()), AbiVariant);
1514 RelocationEntry REHi(SectionID, StubTargetAddr - Section.getAddress(),
1515 ELF::R_MIPS_HI16,
Value.Addend);
1517 StubTargetAddr - Section.getAddress() + 4,
1518 ELF::R_MIPS_LO16,
Value.Addend);
1519 if (
Value.SymbolName) {
1530 StubTargetAddr - Section.getAddress(),
1531 ELF::R_MIPS_HIGHEST,
Value.Addend);
1533 StubTargetAddr - Section.getAddress() + 4,
1534 ELF::R_MIPS_HIGHER,
Value.Addend);
1536 StubTargetAddr - Section.getAddress() + 12,
1537 ELF::R_MIPS_HI16,
Value.Addend);
1539 StubTargetAddr - Section.getAddress() + 20,
1540 ELF::R_MIPS_LO16,
Value.Addend);
1541 if (
Value.SymbolName) {
1555 Section.advanceStubOffset(getMaxStubSize());
1558 processSimpleRelocation(SectionID,
Offset, RelType,
Value);
1562 if (RelType == ELF::R_PPC64_REL24) {
1571 uint8_t *
Target = Section.getAddressWithOffset(
Offset);
1572 bool RangeOverflow =
false;
1575 if (AbiVariant != 2) {
1579 if (
auto Err = findOPDEntrySection(Obj, ObjSectionToID,
Value))
1580 return std::move(Err);
1584 if (
Value.SectionID == SectionID){
1585 uint8_t SymOther = Symbol->getOther();
1589 uint8_t *RelocTarget =
1591 int64_t delta =
static_cast<int64_t
>(
Target - RelocTarget);
1593 if (SignExtend64<26>(delta) != delta) {
1594 RangeOverflow =
true;
1595 }
else if ((AbiVariant != 2) ||
1596 (AbiVariant == 2 &&
Value.SectionID == SectionID)) {
1601 if (IsExtern || (AbiVariant == 2 &&
Value.SectionID != SectionID) ||
1605 StubMap::const_iterator i = Stubs.find(
Value);
1606 if (i != Stubs.end()) {
1608 resolveRelocation(Section,
Offset,
1609 Section.getLoadAddressWithOffset(i->second),
1615 Stubs[
Value] = Section.getStubOffset();
1617 Section.getAddressWithOffset(Section.getStubOffset()),
1620 ELF::R_PPC64_ADDR64,
Value.Addend);
1626 uint64_t StubRelocOffset = StubTargetAddr - Section.getAddress();
1628 StubRelocOffset += 2;
1631 ELF::R_PPC64_ADDR16_HIGHEST,
Value.Addend);
1633 ELF::R_PPC64_ADDR16_HIGHER,
Value.Addend);
1635 ELF::R_PPC64_ADDR16_HI,
Value.Addend);
1637 ELF::R_PPC64_ADDR16_LO,
Value.Addend);
1639 if (
Value.SymbolName) {
1653 Section.getLoadAddressWithOffset(Section.getStubOffset()),
1655 Section.advanceStubOffset(getMaxStubSize());
1657 if (IsExtern || (AbiVariant == 2 &&
Value.SectionID != SectionID)) {
1659 if (AbiVariant == 2)
1665 }
else if (RelType == ELF::R_PPC64_TOC16 ||
1666 RelType == ELF::R_PPC64_TOC16_DS ||
1667 RelType == ELF::R_PPC64_TOC16_LO ||
1668 RelType == ELF::R_PPC64_TOC16_LO_DS ||
1669 RelType == ELF::R_PPC64_TOC16_HI ||
1670 RelType == ELF::R_PPC64_TOC16_HA) {
1682 case ELF::R_PPC64_TOC16: RelType = ELF::R_PPC64_ADDR16;
break;
1683 case ELF::R_PPC64_TOC16_DS: RelType = ELF::R_PPC64_ADDR16_DS;
break;
1684 case ELF::R_PPC64_TOC16_LO: RelType = ELF::R_PPC64_ADDR16_LO;
break;
1685 case ELF::R_PPC64_TOC16_LO_DS: RelType = ELF::R_PPC64_ADDR16_LO_DS;
break;
1686 case ELF::R_PPC64_TOC16_HI: RelType = ELF::R_PPC64_ADDR16_HI;
break;
1687 case ELF::R_PPC64_TOC16_HA: RelType = ELF::R_PPC64_ADDR16_HA;
break;
1692 if (
auto Err = findPPC64TOCSection(Obj, ObjSectionToID, TOCValue))
1693 return std::move(Err);
1703 if (RelType == ELF::R_PPC64_TOC) {
1704 RelType = ELF::R_PPC64_ADDR64;
1705 if (
auto Err = findPPC64TOCSection(Obj, ObjSectionToID,
Value))
1706 return std::move(Err);
1707 }
else if (TargetName ==
".TOC.") {
1708 if (
auto Err = findPPC64TOCSection(Obj, ObjSectionToID,
Value))
1709 return std::move(Err);
1710 Value.Addend += Addend;
1715 if (
Value.SymbolName)
1721 (RelType == ELF::R_390_PLT32DBL || RelType == ELF::R_390_GOTENT)) {
1731 LLVM_DEBUG(
dbgs() <<
"\t\tThis is a SystemZ indirect relocation.");
1735 StubMap::const_iterator i = Stubs.find(
Value);
1736 uintptr_t StubAddress;
1737 if (i != Stubs.end()) {
1738 StubAddress = uintptr_t(Section.getAddressWithOffset(i->second));
1744 uintptr_t BaseAddress = uintptr_t(Section.getAddress());
1746 alignTo(BaseAddress + Section.getStubOffset(), getStubAlignment());
1747 unsigned StubOffset = StubAddress - BaseAddress;
1749 Stubs[
Value] = StubOffset;
1753 if (
Value.SymbolName)
1757 Section.advanceStubOffset(getMaxStubSize());
1760 if (RelType == ELF::R_390_GOTENT)
1761 resolveRelocation(Section,
Offset, StubAddress + 8, ELF::R_390_PC32DBL,
1764 resolveRelocation(Section,
Offset, StubAddress, RelType, Addend);
1766 if (RelType == ELF::R_X86_64_PLT32) {
1787 StubMap::const_iterator i = Stubs.find(
Value);
1788 uintptr_t StubAddress;
1789 if (i != Stubs.end()) {
1790 StubAddress = uintptr_t(Section->getAddress()) + i->second;
1796 uintptr_t BaseAddress = uintptr_t(Section->getAddress());
1797 StubAddress =
alignTo(BaseAddress + Section->getStubOffset(),
1798 getStubAlignment());
1799 unsigned StubOffset = StubAddress - BaseAddress;
1800 Stubs[
Value] = StubOffset;
1804 Section->advanceStubOffset(getMaxStubSize());
1807 uint64_t GOTOffset = allocateGOTEntries(1);
1813 resolveGOTOffsetRelocation(SectionID, StubOffset + 2, GOTOffset - 4,
1814 ELF::R_X86_64_PC32);
1818 computeGOTOffsetRE(GOTOffset, 0, ELF::R_X86_64_64),
1823 resolveRelocation(*Section,
Offset, StubAddress, ELF::R_X86_64_PC32,
1827 computePlaceholderAddress(SectionID,
Offset));
1828 processSimpleRelocation(SectionID,
Offset, ELF::R_X86_64_PC32,
Value);
1830 }
else if (RelType == ELF::R_X86_64_GOTPCREL ||
1831 RelType == ELF::R_X86_64_GOTPCRELX ||
1832 RelType == ELF::R_X86_64_REX_GOTPCRELX) {
1833 uint64_t GOTOffset = allocateGOTEntries(1);
1834 resolveGOTOffsetRelocation(SectionID,
Offset, GOTOffset + Addend,
1835 ELF::R_X86_64_PC32);
1839 computeGOTOffsetRE(GOTOffset,
Value.Offset, ELF::R_X86_64_64);
1840 if (
Value.SymbolName)
1844 }
else if (RelType == ELF::R_X86_64_GOT64) {
1846 uint64_t GOTOffset = allocateGOTEntries(1);
1848 ELF::R_X86_64_64, 0);
1852 computeGOTOffsetRE(GOTOffset,
Value.Offset, ELF::R_X86_64_64);
1853 if (
Value.SymbolName)
1857 }
else if (RelType == ELF::R_X86_64_GOTPC32) {
1861 (void)allocateGOTEntries(0);
1862 resolveGOTOffsetRelocation(SectionID,
Offset, Addend, ELF::R_X86_64_PC32);
1863 }
else if (RelType == ELF::R_X86_64_GOTPC64) {
1864 (void)allocateGOTEntries(0);
1865 resolveGOTOffsetRelocation(SectionID,
Offset, Addend, ELF::R_X86_64_PC64);
1866 }
else if (RelType == ELF::R_X86_64_GOTOFF64) {
1868 (void)allocateGOTEntries(0);
1869 processSimpleRelocation(SectionID,
Offset, RelType,
Value);
1870 }
else if (RelType == ELF::R_X86_64_PC32) {
1872 processSimpleRelocation(SectionID,
Offset, RelType,
Value);
1873 }
else if (RelType == ELF::R_X86_64_PC64) {
1875 processSimpleRelocation(SectionID,
Offset, RelType,
Value);
1876 }
else if (RelType == ELF::R_X86_64_GOTTPOFF) {
1877 processX86_64GOTTPOFFRelocation(SectionID,
Offset,
Value, Addend);
1878 }
else if (RelType == ELF::R_X86_64_TLSGD ||
1879 RelType == ELF::R_X86_64_TLSLD) {
1882 auto &GetAddrRelocation = *RelI;
1883 processX86_64TLSRelocation(SectionID,
Offset, RelType,
Value, Addend,
1886 processSimpleRelocation(SectionID,
Offset, RelType,
Value);
1892 processSimpleRelocation(SectionID,
Offset, RelType,
Value);
1897void RuntimeDyldELF::processX86_64GOTTPOFFRelocation(
unsigned SectionID,
1908 struct CodeSequence {
1920 std::array<CodeSequence, 2> CodeSequences;
1924 static const std::initializer_list<uint8_t> ExpectedCodeSequenceList = {
1925 0x64, 0x48, 0x8b, 0x04, 0x25, 0x00, 0x00, 0x00,
1927 0x48, 0x03, 0x05, 0x00, 0x00, 0x00, 0x00
1930 CodeSequences[0].ExpectedCodeSequence =
1932 CodeSequences[0].TLSSequenceOffset = 12;
1934 static const std::initializer_list<uint8_t> NewCodeSequenceList = {
1935 0x64, 0x48, 0x8b, 0x04, 0x25, 0x00, 0x00, 0x00, 0x00,
1936 0x48, 0x8d, 0x80, 0x00, 0x00, 0x00, 0x00
1939 CodeSequences[0].TpoffRelocationOffset = 12;
1944 static const std::initializer_list<uint8_t> ExpectedCodeSequenceList = {
1945 0x48, 0x8b, 0x05, 0x00, 0x00, 0x00, 0x00,
1946 0x64, 0x48, 0x8b, 0x00, 0x00, 0x00, 0x00
1948 CodeSequences[1].ExpectedCodeSequence =
1950 CodeSequences[1].TLSSequenceOffset = 3;
1952 static const std::initializer_list<uint8_t> NewCodeSequenceList = {
1953 0x66, 0x0f, 0x1f, 0x44, 0x00, 0x00,
1954 0x64, 0x8b, 0x04, 0x25, 0x00, 0x00, 0x00, 0x00,
1957 CodeSequences[1].TpoffRelocationOffset = 10;
1962 for (
const auto &
C : CodeSequences) {
1963 assert(
C.ExpectedCodeSequence.size() ==
C.NewCodeSequence.size() &&
1964 "Old and new code sequences must have the same size");
1966 if (
Offset <
C.TLSSequenceOffset ||
1967 (
Offset -
C.TLSSequenceOffset +
C.NewCodeSequence.size()) >
1974 auto TLSSequenceStartOffset =
Offset -
C.TLSSequenceOffset;
1975 auto *TLSSequence =
Section.getAddressWithOffset(TLSSequenceStartOffset);
1977 C.ExpectedCodeSequence) {
1981 memcpy(TLSSequence,
C.NewCodeSequence.data(),
C.NewCodeSequence.size());
1988 TLSSequenceStartOffset +
C.TpoffRelocationOffset,
1989 ELF::R_X86_64_TPOFF32,
Value.Addend - Addend);
1991 if (
Value.SymbolName)
2004 uint64_t GOTOffset = allocateGOTEntries(1);
2005 resolveGOTOffsetRelocation(SectionID,
Offset, GOTOffset + Addend,
2006 ELF::R_X86_64_PC32);
2008 computeGOTOffsetRE(GOTOffset,
Value.Offset, ELF::R_X86_64_TPOFF64);
2009 if (
Value.SymbolName)
2016void RuntimeDyldELF::processX86_64TLSRelocation(
2029 bool IsSmallCodeModel;
2031 bool IsGOTPCRel =
false;
2033 switch (GetAddrRelocation.
getType()) {
2034 case ELF::R_X86_64_GOTPCREL:
2035 case ELF::R_X86_64_REX_GOTPCRELX:
2036 case ELF::R_X86_64_GOTPCRELX:
2039 case ELF::R_X86_64_PLT32:
2040 IsSmallCodeModel =
true;
2042 case ELF::R_X86_64_PLTOFF64:
2043 IsSmallCodeModel =
false;
2047 "invalid TLS relocations for General/Local Dynamic TLS Model: "
2048 "expected PLT or GOT relocation for __tls_get_addr function");
2059 if (RelType == ELF::R_X86_64_TLSGD) {
2064 if (IsSmallCodeModel) {
2066 static const std::initializer_list<uint8_t> CodeSequence = {
2068 0x48, 0x8d, 0x3d, 0x00, 0x00,
2072 0xe8, 0x00, 0x00, 0x00, 0x00
2075 TLSSequenceOffset = 4;
2079 static const std::initializer_list<uint8_t> CodeSequence = {
2081 0x48, 0x8d, 0x3d, 0x00, 0x00,
2085 0xff, 0x15, 0x00, 0x00, 0x00,
2089 TLSSequenceOffset = 4;
2094 static const std::initializer_list<uint8_t> SmallSequence = {
2095 0x64, 0x48, 0x8b, 0x04, 0x25, 0x00, 0x00, 0x00,
2097 0x48, 0x8d, 0x80, 0x00, 0x00, 0x00, 0x00
2101 TpoffRelocOffset = 12;
2103 static const std::initializer_list<uint8_t> CodeSequence = {
2104 0x48, 0x8d, 0x3d, 0x00, 0x00, 0x00, 0x00,
2106 0x48, 0xb8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
2112 TLSSequenceOffset = 3;
2115 static const std::initializer_list<uint8_t> LargeSequence = {
2116 0x64, 0x48, 0x8b, 0x04, 0x25, 0x00, 0x00, 0x00,
2118 0x48, 0x8d, 0x80, 0x00, 0x00, 0x00, 0x00,
2120 0x66, 0x0f, 0x1f, 0x44, 0x00, 0x00
2123 TpoffRelocOffset = 12;
2130 ELF::R_X86_64_TPOFF32,
Value.Addend - Addend);
2131 if (
Value.SymbolName)
2135 }
else if (RelType == ELF::R_X86_64_TLSLD) {
2136 if (IsSmallCodeModel) {
2138 static const std::initializer_list<uint8_t> CodeSequence = {
2139 0x48, 0x8d, 0x3d, 0x00, 0x00, 0x00,
2140 0x00, 0xe8, 0x00, 0x00, 0x00, 0x00
2143 TLSSequenceOffset = 3;
2146 static const std::initializer_list<uint8_t> SmallSequence = {
2148 0x64, 0x48, 0x8b, 0x04, 0x25,
2149 0x00, 0x00, 0x00, 0x00
2155 static const std::initializer_list<uint8_t> CodeSequence = {
2156 0x48, 0x8d, 0x3d, 0x00,
2158 0xff, 0x15, 0x00, 0x00,
2163 TLSSequenceOffset = 3;
2167 static const std::initializer_list<uint8_t> SmallSequence = {
2168 0x0f, 0x1f, 0x40, 0x00,
2169 0x64, 0x48, 0x8b, 0x04, 0x25,
2170 0x00, 0x00, 0x00, 0x00
2177 static const std::initializer_list<uint8_t> CodeSequence = {
2178 0x48, 0x8d, 0x3d, 0x00, 0x00, 0x00, 0x00,
2180 0x48, 0xb8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
2186 TLSSequenceOffset = 3;
2189 static const std::initializer_list<uint8_t> LargeSequence = {
2191 0x66, 0x66, 0x0f, 0x1f, 0x84, 0x00, 0x00, 0x00, 0x00,
2193 0x64, 0x48, 0x8b, 0x04, 0x25, 0x00, 0x00, 0x00, 0x00
2202 "Old and new code sequences must have the same size");
2205 if (
Offset < TLSSequenceOffset ||
2206 (
Offset - TLSSequenceOffset + NewCodeSequence.
size()) >
2211 auto *TLSSequence =
Section.getAddressWithOffset(
Offset - TLSSequenceOffset);
2213 ExpectedCodeSequence) {
2215 "invalid TLS sequence for Global/Local Dynamic TLS Model");
2218 memcpy(TLSSequence, NewCodeSequence.
data(), NewCodeSequence.
size());
2256uint64_t RuntimeDyldELF::allocateGOTEntries(
unsigned no) {
2257 if (GOTSectionID == 0) {
2264 CurrentGOTIndex += no;
2269 unsigned GOTRelType) {
2270 auto E = GOTOffsetMap.insert({
Value, 0});
2272 uint64_t GOTOffset = allocateGOTEntries(1);
2276 computeGOTOffsetRE(GOTOffset,
Value.Offset, GOTRelType);
2277 if (
Value.SymbolName)
2282 E.first->second = GOTOffset;
2285 return E.first->second;
2288void RuntimeDyldELF::resolveGOTOffsetRelocation(
unsigned SectionID,
2308 if (ObjSymbolFlags & SymbolRef::SF_Indirect) {
2309 if (IFuncStubSectionID == 0) {
2312 IFuncStubSectionID =
Sections.size();
2314 SectionEntry(
".text.__llvm_IFuncStubs",
nullptr, 0, 0, 0));
2316 IFuncStubOffset = 64;
2324 IFuncStubOffset += getMaxIFuncStubSize();
2331 if (!PendingRelocs.empty())
2332 return make_error<RuntimeDyldError>(
"Can't find matching LO16 reloc");
2336 if (IFuncStubSectionID != 0) {
2338 IFuncStubOffset, 1, IFuncStubSectionID,
".text.__llvm_IFuncStubs");
2339 if (!IFuncStubsAddr)
2340 return make_error<RuntimeDyldError>(
2341 "Unable to allocate memory for IFunc stubs!");
2343 SectionEntry(
".text.__llvm_IFuncStubs", IFuncStubsAddr, IFuncStubOffset,
2344 IFuncStubOffset, 0);
2346 createIFuncResolver(IFuncStubsAddr);
2349 << IFuncStubSectionID <<
" Addr: "
2350 <<
Sections[IFuncStubSectionID].getAddress() <<
'\n');
2351 for (
auto &IFuncStub : IFuncStubs) {
2352 auto &Symbol = IFuncStub.OriginalSymbol;
2354 <<
" Offset: " <<
format(
"%p", Symbol.getOffset())
2355 <<
" IFuncStubOffset: "
2356 <<
format(
"%p\n", IFuncStub.StubOffset));
2357 createIFuncStub(IFuncStubSectionID, 0, IFuncStub.StubOffset,
2358 Symbol.getSectionID(), Symbol.getOffset());
2361 IFuncStubSectionID = 0;
2362 IFuncStubOffset = 0;
2367 if (GOTSectionID != 0) {
2371 GOTSectionID,
".got",
false);
2373 return make_error<RuntimeDyldError>(
"Unable to allocate memory for GOT!");
2380 memset(
Addr, 0, TotalSize);
2386 if (SI->relocation_begin() != SI->relocation_end()) {
2389 return make_error<RuntimeDyldError>(
2393 ObjSectionToIDMap::iterator i = SectionMap.find(*RelocatedSection);
2394 assert(i != SectionMap.end());
2398 GOTSymbolOffsets.
clear();
2403 ObjSectionToIDMap::iterator i, e;
2404 for (i = SectionMap.begin(), e = SectionMap.end(); i != e; ++i) {
2414 if (
Name ==
".eh_frame") {
2415 UnregisteredEHFrameSections.
push_back(i->second);
2420 GOTOffsetMap.clear();
2422 CurrentGOTIndex = 0;
2431void RuntimeDyldELF::createIFuncResolver(uint8_t *
Addr)
const {
2443 const uint8_t StubCode[] = {
2451 0x41, 0xff, 0x53, 0x08,
2463 static_assert(
sizeof(StubCode) <= 64,
2464 "maximum size of the IFunc resolver is 64B");
2465 memcpy(
Addr, StubCode,
sizeof(StubCode));
2468 "IFunc resolver is not supported for target architecture");
2472void RuntimeDyldELF::createIFuncStub(
unsigned IFuncStubSectionID,
2475 unsigned IFuncSectionID,
2477 auto &IFuncStubSection =
Sections[IFuncStubSectionID];
2478 auto *
Addr = IFuncStubSection.getAddressWithOffset(IFuncStubOffset);
2502 uint64_t GOT1 = allocateGOTEntries(2);
2506 IFuncResolverOffset, {});
2508 RelocationEntry RE2(GOTSectionID, GOT2, ELF::R_X86_64_64, IFuncOffset, {});
2511 const uint8_t StubCode[] = {
2512 0x4c, 0x8d, 0x1d, 0x00, 0x00, 0x00, 0x00,
2515 assert(
sizeof(StubCode) <= getMaxIFuncStubSize() &&
2516 "IFunc stub size must not exceed getMaxIFuncStubSize()");
2517 memcpy(
Addr, StubCode,
sizeof(StubCode));
2521 resolveGOTOffsetRelocation(IFuncStubSectionID, IFuncStubOffset + 3,
2522 GOT1 - 4, ELF::R_X86_64_PC32);
2528unsigned RuntimeDyldELF::getMaxIFuncStubSize()
const {
2535bool RuntimeDyldELF::relocationNeedsGot(
const RelocationRef &R)
const {
2536 unsigned RelTy =
R.getType();
2538 return RelTy == ELF::R_AARCH64_ADR_GOT_PAGE ||
2539 RelTy == ELF::R_AARCH64_LD64_GOT_LO12_NC;
2542 return RelTy == ELF::R_X86_64_GOTPCREL ||
2543 RelTy == ELF::R_X86_64_GOTPCRELX ||
2544 RelTy == ELF::R_X86_64_GOT64 ||
2545 RelTy == ELF::R_X86_64_REX_GOTPCRELX;
2549bool RuntimeDyldELF::relocationNeedsStub(
const RelocationRef &R)
const {
2553 switch (
R.getType()) {
2558 case ELF::R_X86_64_GOTPCREL:
2559 case ELF::R_X86_64_GOTPCRELX:
2560 case ELF::R_X86_64_REX_GOTPCRELX:
2561 case ELF::R_X86_64_GOTPC64:
2562 case ELF::R_X86_64_GOT64:
2563 case ELF::R_X86_64_GOTOFF64:
2564 case ELF::R_X86_64_PC32:
2565 case ELF::R_X86_64_PC64:
2566 case ELF::R_X86_64_64:
amdgpu aa AMDGPU Address space based Alias Analysis Wrapper
Given that RA is a live value
#define LLVM_ELF_IMPORT_TYPES_ELFT(ELFT)
static void or32le(void *P, int32_t V)
static void or32AArch64Imm(void *L, uint64_t Imm)
static void write(bool isBE, void *P, T V)
static uint64_t getBits(uint64_t Val, int Start, int End)
static void write32AArch64Addr(void *L, uint64_t Imm)
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
size_t size() const
size - Get the array size.
Lightweight error class with error context and mandatory checking.
static ErrorSuccess success()
Create a success value.
Tagged union holding either a T or a Error.
Error takeError()
Take ownership of the stored error.
Symbol resolution interface.
static std::unique_ptr< MemoryBuffer > getMemBufferCopy(StringRef InputData, const Twine &BufferName="")
Open the specified memory range as a MemoryBuffer, copying the contents and taking ownership of it.
RelocationEntry - used to represent relocations internally in the dynamic linker.
uint32_t RelType
RelType - relocation type.
uint64_t Offset
Offset - offset into the section.
int64_t Addend
Addend - the relocation addend encoded in the instruction itself.
unsigned SectionID
SectionID - the section this relocation points to.
Interface for looking up the initializer for a variable name, used by Init::resolveReferences.
void registerEHFrames() override
size_t getGOTEntrySize() override
~RuntimeDyldELF() override
static std::unique_ptr< RuntimeDyldELF > create(Triple::ArchType Arch, RuntimeDyld::MemoryManager &MemMgr, JITSymbolResolver &Resolver)
Error finalizeLoad(const ObjectFile &Obj, ObjSectionToIDMap &SectionMap) override
DenseMap< SID, SID > SectionToGOTMap
bool isCompatibleFile(const object::ObjectFile &Obj) const override
std::unique_ptr< RuntimeDyld::LoadedObjectInfo > loadObject(const object::ObjectFile &O) override
RuntimeDyldELF(RuntimeDyld::MemoryManager &MemMgr, JITSymbolResolver &Resolver)
Expected< relocation_iterator > processRelocationRef(unsigned SectionID, relocation_iterator RelI, const ObjectFile &Obj, ObjSectionToIDMap &ObjSectionToID, StubMap &Stubs) override
Parses one or more object file relocations (some object files use relocation pairs) and stores it to ...
std::map< SectionRef, unsigned > ObjSectionToIDMap
void writeInt32BE(uint8_t *Addr, uint32_t Value)
void writeInt64BE(uint8_t *Addr, uint64_t Value)
std::map< RelocationValueRef, uintptr_t > StubMap
void writeInt16BE(uint8_t *Addr, uint16_t Value)
void addRelocationForSymbol(const RelocationEntry &RE, StringRef SymbolName)
bool IsTargetLittleEndian
RuntimeDyld::MemoryManager & MemMgr
void addRelocationForSection(const RelocationEntry &RE, unsigned SectionID)
Expected< unsigned > findOrEmitSection(const ObjectFile &Obj, const SectionRef &Section, bool IsCode, ObjSectionToIDMap &LocalSections)
Find Section in LocalSections.
uint8_t * createStubFunction(uint8_t *Addr, unsigned AbiVariant=0)
Emits long jump instruction to Addr.
uint64_t readBytesUnaligned(uint8_t *Src, unsigned Size) const
Endian-aware read Read the least significant Size bytes from Src.
RTDyldSymbolTable GlobalSymbolTable
Expected< ObjSectionToIDMap > loadObjectImpl(const object::ObjectFile &Obj)
virtual uint8_t * allocateDataSection(uintptr_t Size, unsigned Alignment, unsigned SectionID, StringRef SectionName, bool IsReadOnly)=0
Allocate a memory block of (at least) the given size suitable for data.
virtual uint8_t * allocateCodeSection(uintptr_t Size, unsigned Alignment, unsigned SectionID, StringRef SectionName)=0
Allocate a memory block of (at least) the given size suitable for executable code.
virtual void registerEHFrames(uint8_t *Addr, uint64_t LoadAddr, size_t Size)=0
Register the EH frames with the runtime so that c++ exceptions work.
virtual bool allowStubAllocation() const
Override to return false to tell LLVM no stub space will be needed.
SectionEntry - represents a section emitted into memory by the dynamic linker.
void push_back(const T &Elt)
iterator find(StringRef Key)
StringRef - Represent a constant reference to a string, i.e.
constexpr const char * data() const
data - Get a pointer to the start of the string (which may not be null terminated).
Symbol info for RuntimeDyld.
Target - Wrapper for Target specific information.
static StringRef getArchTypePrefix(ArchType Kind)
Get the "prefix" canonical name for the Kind architecture.
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
The instances of the Type class are immutable: once they are created, they are never changed.
LLVM Value Representation.
Expected< uint32_t > getFlags() const
Get symbol flags (bitwise OR of SymbolRef::Flags)
DataRefImpl getRawDataRefImpl() const
StringRef getData() const
bool isLittleEndian() const
StringRef getFileName() const
virtual unsigned getPlatformFlags() const =0
Returns platform-specific object flags, if any.
static bool classof(const Binary *v)
Expected< const Elf_Sym * > getSymbol(DataRefImpl Sym) const
static Expected< ELFObjectFile< ELFT > > create(MemoryBufferRef Object, bool InitContent=true)
Expected< int64_t > getAddend() const
This class is the base class for all object file types.
virtual section_iterator section_end() const =0
virtual uint8_t getBytesInAddress() const =0
The number of bytes used to represent an address in this object file format.
section_iterator_range sections() const
virtual StringRef getFileFormatName() const =0
virtual section_iterator section_begin() const =0
This is a value type class that represents a single relocation in the list of relocations in the obje...
This is a value type class that represents a single section in the list of sections in the object fil...
DataRefImpl getRawDataRefImpl() const
bool isText() const
Whether this section contains instructions.
Expected< StringRef > getName() const
This is a value type class that represents a single symbol in the list of symbols in the object file.
Expected< section_iterator > getSection() const
Get section this symbol is defined in reference to.
virtual basic_symbol_iterator symbol_end() const =0
A raw_ostream that writes to an std::string.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
@ C
The default llvm calling convention, compatible with C.
static int64_t decodePPC64LocalEntryOffset(unsigned Other)
std::optional< const char * > toString(const std::optional< DWARFFormValue > &V)
Take an optional DWARFFormValue and try to extract a string value from it.
@ Resolved
Queried, materialization begun.
void write32le(void *P, uint32_t V)
uint32_t read32le(const void *P)
This is an optimization pass for GlobalISel generic memory operations.
void logAllUnhandledErrors(Error E, raw_ostream &OS, Twine ErrorBanner={})
Log all errors (if any) in E to OS.
static uint16_t applyPPChighera(uint64_t value)
static uint16_t applyPPChi(uint64_t value)
void handleAllErrors(Error E, HandlerTs &&... Handlers)
Behaves the same as handleErrors, except that by contract all errors must be handled by the given han...
static uint16_t applyPPChighesta(uint64_t value)
static uint16_t applyPPChighest(uint64_t value)
Error write(MCStreamer &Out, ArrayRef< std::string > Inputs, OnCuIndexOverflow OverflowOptValue)
static uint16_t applyPPCha(uint64_t value)
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
void report_fatal_error(Error Err, bool gen_crash_diag=true)
Report a serious error, calling any installed error handler.
bool isa(const From &Val)
isa<X> - Return true if the parameter to the template is an instance of one of the template type argu...
static uint16_t applyPPClo(uint64_t value)
format_object< Ts... > format(const char *Fmt, const Ts &... Vals)
These are helper functions used to produce formatted output.
void cantFail(Error Err, const char *Msg=nullptr)
Report a fatal error if Err is a failure value.
static uint16_t applyPPChigher(uint64_t value)
uint64_t alignTo(uint64_t Size, Align A)
Returns a multiple of A needed to store Size bytes.
static void or32le(void *P, int32_t V)
OutputIt move(R &&Range, OutputIt Out)
Provide wrappers to std::move which take ranges instead of having to pass begin/end explicitly.
decltype(auto) cast(const From &Val)
cast<X> - Return the argument parameter cast to the specified type.
constexpr int64_t SignExtend64(uint64_t x)
Sign-extend the number in the bottom B bits of X to a 64-bit integer.
void consumeError(Error Err)
Consume a Error without doing anything.
static void write32AArch64Addr(void *T, uint64_t s, uint64_t p, int shift)
Implement std::hash so that hash_code can be used in STL containers.
SymInfo contains information about symbol: it's address and section index which is -1LL for absolute ...