29#define DEBUG_TYPE "dyld"
34 or32le(L, (Imm & 0xFFF) << 10);
37template <
class T>
static void write(
bool isBE,
void *
P,
T V) {
38 isBE ? write<T, llvm::endianness::big>(
P, V)
39 : write<T, llvm::endianness::little>(
P, V);
44 uint32_t ImmHi = (Imm & 0x1FFFFC) << 3;
45 uint64_t Mask = (0x3 << 29) | (0x1FFFFC << 3);
53 return (Val >> Start) & Mask;
58template <
class ELFT>
class DyldELFObject :
public ELFObjectFile<ELFT> {
61 typedef typename ELFT::uint addr_type;
79 return v->isDyldType();
91 this->isDyldELFObject =
true;
98 if (
auto E = Obj.takeError())
100 std::unique_ptr<DyldELFObject<ELFT>>
Ret(
101 new DyldELFObject<ELFT>(std::move(*Obj)));
102 return std::move(Ret);
106void DyldELFObject<ELFT>::updateSectionAddress(
const SectionRef &Sec,
110 const_cast<Elf_Shdr *
>(
reinterpret_cast<const Elf_Shdr *
>(ShdrRef.
p));
114 shdr->sh_addr =
static_cast<addr_type
>(
Addr);
118void DyldELFObject<ELFT>::updateSymbolAddress(
const SymbolRef &SymRef,
121 Elf_Sym *sym =
const_cast<Elf_Sym *
>(
126 sym->st_value =
static_cast<addr_type
>(
Addr);
129class LoadedELFObjectInfo final
131 RuntimeDyld::LoadedObjectInfo> {
133 LoadedELFObjectInfo(
RuntimeDyldImpl &RTDyld, ObjSectionToIDMap ObjSecToIDMap)
137 getObjectForDebug(
const ObjectFile &Obj)
const override;
140template <
typename ELFT>
143 const LoadedELFObjectInfo &L) {
144 typedef typename ELFT::Shdr Elf_Shdr;
145 typedef typename ELFT::uint addr_type;
148 DyldELFObject<ELFT>::create(Buffer);
152 std::unique_ptr<DyldELFObject<ELFT>> Obj = std::move(*ObjOrErr);
156 for (
const auto &Sec : Obj->sections()) {
163 if (*NameOrErr !=
"") {
165 Elf_Shdr *shdr =
const_cast<Elf_Shdr *
>(
166 reinterpret_cast<const Elf_Shdr *
>(ShdrRef.
p));
168 if (
uint64_t SecLoadAddr =
L.getSectionLoadAddress(*SI)) {
171 shdr->sh_addr =
static_cast<addr_type
>(SecLoadAddr);
177 return std::move(Obj);
181createELFDebugObject(
const ObjectFile &Obj,
const LoadedELFObjectInfo &L) {
184 std::unique_ptr<MemoryBuffer> Buffer =
191 createRTDyldELFObject<ELF32LE>(Buffer->getMemBufferRef(), Obj, L);
194 createRTDyldELFObject<ELF32BE>(Buffer->getMemBufferRef(), Obj, L);
197 createRTDyldELFObject<ELF64BE>(Buffer->getMemBufferRef(), Obj, L);
200 createRTDyldELFObject<ELF64LE>(Buffer->getMemBufferRef(), Obj, L);
209LoadedELFObjectInfo::getObjectForDebug(
const ObjectFile &Obj)
const {
210 return createELFDebugObject(Obj, *
this);
223 for (
SID EHFrameSID : UnregisteredEHFrameSections) {
224 uint8_t *EHFrameAddr =
Sections[EHFrameSID].getAddress();
226 size_t EHFrameSize =
Sections[EHFrameSID].getSize();
229 UnregisteredEHFrameSections.clear();
232std::unique_ptr<RuntimeDyldELF>
247std::unique_ptr<RuntimeDyld::LoadedObjectInfo>
250 return std::make_unique<LoadedELFObjectInfo>(*
this, *ObjSectionToIDOrErr);
259void RuntimeDyldELF::resolveX86_64Relocation(
const SectionEntry &Section,
267 case ELF::R_X86_64_NONE:
269 case ELF::R_X86_64_8: {
272 uint8_t TruncatedAddr = (
Value & 0xFF);
273 *Section.getAddressWithOffset(
Offset) = TruncatedAddr;
275 <<
format(
"%p\n", Section.getAddressWithOffset(
Offset)));
278 case ELF::R_X86_64_16: {
285 <<
format(
"%p\n", Section.getAddressWithOffset(
Offset)));
288 case ELF::R_X86_64_64: {
295 case ELF::R_X86_64_32:
296 case ELF::R_X86_64_32S: {
299 (
Type == ELF::R_X86_64_32S &&
300 ((int64_t)
Value <= INT32_MAX && (int64_t)
Value >= INT32_MIN)));
308 case ELF::R_X86_64_PC8: {
310 int64_t RealOffset =
Value + Addend - FinalAddress;
311 assert(isInt<8>(RealOffset));
312 int8_t TruncOffset = (RealOffset & 0xFF);
316 case ELF::R_X86_64_PC32: {
318 int64_t RealOffset =
Value + Addend - FinalAddress;
319 assert(isInt<32>(RealOffset));
320 int32_t TruncOffset = (RealOffset & 0xFFFFFFFF);
325 case ELF::R_X86_64_PC64: {
327 int64_t RealOffset =
Value + Addend - FinalAddress;
331 <<
format(
"%p\n", FinalAddress));
334 case ELF::R_X86_64_GOTOFF64: {
337 for (
const auto &Section :
Sections) {
338 if (
Section.getName() ==
".got") {
339 GOTBase =
Section.getLoadAddressWithOffset(0);
343 assert(GOTBase != 0 &&
"missing GOT");
344 int64_t GOTOffset =
Value - GOTBase + Addend;
348 case ELF::R_X86_64_DTPMOD64: {
353 case ELF::R_X86_64_DTPOFF64:
354 case ELF::R_X86_64_TPOFF64: {
363 case ELF::R_X86_64_DTPOFF32:
364 case ELF::R_X86_64_TPOFF32: {
367 int64_t RealValue =
Value + Addend;
368 assert(RealValue >= INT32_MIN && RealValue <= INT32_MAX);
369 int32_t TruncValue = RealValue;
377void RuntimeDyldELF::resolveX86Relocation(
const SectionEntry &Section,
381 case ELF::R_386_32: {
388 case ELF::R_386_PLT32:
389 case ELF::R_386_PC32: {
405void RuntimeDyldELF::resolveAArch64Relocation(
const SectionEntry &Section,
416 <<
" FinalAddress: 0x" <<
format(
"%llx", FinalAddress)
417 <<
" Value: 0x" <<
format(
"%llx",
Value) <<
" Type: 0x"
419 <<
format(
"%llx", Addend) <<
"\n");
425 case ELF::R_AARCH64_NONE:
427 case ELF::R_AARCH64_ABS16: {
430 (Result >> 16) == 0);
431 write(isBE, TargetPtr,
static_cast<uint16_t>(Result & 0xffffU));
434 case ELF::R_AARCH64_ABS32: {
437 (Result >> 32) == 0);
438 write(isBE, TargetPtr,
static_cast<uint32_t>(Result & 0xffffffffU));
441 case ELF::R_AARCH64_ABS64:
444 case ELF::R_AARCH64_PLT32: {
446 assert(
static_cast<int64_t
>(Result) >= INT32_MIN &&
447 static_cast<int64_t
>(Result) <= INT32_MAX);
451 case ELF::R_AARCH64_PREL16: {
453 assert(
static_cast<int64_t
>(Result) >= INT16_MIN &&
454 static_cast<int64_t
>(Result) <= UINT16_MAX);
455 write(isBE, TargetPtr,
static_cast<uint16_t>(Result & 0xffffU));
458 case ELF::R_AARCH64_PREL32: {
460 assert(
static_cast<int64_t
>(Result) >= INT32_MIN &&
461 static_cast<int64_t
>(Result) <= UINT32_MAX);
462 write(isBE, TargetPtr,
static_cast<uint32_t>(Result & 0xffffffffU));
465 case ELF::R_AARCH64_PREL64:
466 write(isBE, TargetPtr,
Value + Addend - FinalAddress);
468 case ELF::R_AARCH64_CONDBR19: {
471 assert(isInt<21>(BranchImm));
472 *TargetPtr &= 0xff00001fU;
474 or32le(TargetPtr, (BranchImm & 0x001FFFFC) << 3);
477 case ELF::R_AARCH64_TSTBR14: {
480 assert(isInt<16>(BranchImm));
486 or32le(TargetPtr, (BranchImm & 0x0000FFFC) << 3);
489 case ELF::R_AARCH64_CALL26:
490 case ELF::R_AARCH64_JUMP26: {
496 assert(isInt<28>(BranchImm));
497 or32le(TargetPtr, (BranchImm & 0x0FFFFFFC) >> 2);
500 case ELF::R_AARCH64_MOVW_UABS_G3:
501 or32le(TargetPtr, ((
Value + Addend) & 0xFFFF000000000000) >> 43);
503 case ELF::R_AARCH64_MOVW_UABS_G2_NC:
504 or32le(TargetPtr, ((
Value + Addend) & 0xFFFF00000000) >> 27);
506 case ELF::R_AARCH64_MOVW_UABS_G1_NC:
507 or32le(TargetPtr, ((
Value + Addend) & 0xFFFF0000) >> 11);
509 case ELF::R_AARCH64_MOVW_UABS_G0_NC:
510 or32le(TargetPtr, ((
Value + Addend) & 0xFFFF) << 5);
512 case ELF::R_AARCH64_ADR_PREL_PG_HI21: {
515 ((
Value + Addend) & ~0xfffULL) - (FinalAddress & ~0xfffULL);
518 assert(isInt<33>(Result) &&
"overflow check failed for relocation");
525 case ELF::R_AARCH64_ADD_ABS_LO12_NC:
531 case ELF::R_AARCH64_LDST8_ABS_LO12_NC:
537 case ELF::R_AARCH64_LDST16_ABS_LO12_NC:
543 case ELF::R_AARCH64_LDST32_ABS_LO12_NC:
549 case ELF::R_AARCH64_LDST64_ABS_LO12_NC:
555 case ELF::R_AARCH64_LDST128_ABS_LO12_NC:
561 case ELF::R_AARCH64_LD_PREL_LO19: {
566 assert(isInt<21>(Result));
568 *TargetPtr &= 0xff00001fU;
571 *TargetPtr |= ((
Result & 0xffc) << (5 - 2));
574 case ELF::R_AARCH64_ADR_PREL_LO21: {
579 assert(isInt<21>(Result));
581 *TargetPtr &= 0x9f00001fU;
584 *TargetPtr |= ((
Result & 0xffc) << (5 - 2));
585 *TargetPtr |= (
Result & 0x3) << 29;
591void RuntimeDyldELF::resolveARMRelocation(
const SectionEntry &Section,
602 <<
" FinalAddress: " <<
format(
"%p", FinalAddress)
605 <<
" Addend: " <<
format(
"%x", Addend) <<
"\n");
611 case ELF::R_ARM_NONE:
614 case ELF::R_ARM_PREL31:
617 ((
Value - FinalAddress) & ~0x80000000);
619 case ELF::R_ARM_TARGET1:
620 case ELF::R_ARM_ABS32:
625 case ELF::R_ARM_MOVW_ABS_NC:
626 case ELF::R_ARM_MOVT_ABS:
627 if (
Type == ELF::R_ARM_MOVW_ABS_NC)
629 else if (
Type == ELF::R_ARM_MOVT_ABS)
633 (((
Value >> 12) & 0xF) << 16);
636 case ELF::R_ARM_PC24:
637 case ELF::R_ARM_CALL:
638 case ELF::R_ARM_JUMP24:
639 int32_t RelValue =
static_cast<int32_t
>(
Value - FinalAddress - 8);
640 RelValue = (RelValue & 0x03FFFFFC) >> 2;
648void RuntimeDyldELF::setMipsABI(
const ObjectFile &Obj) {
656 if (
auto *E = dyn_cast<ELFObjectFileBase>(&Obj)) {
657 unsigned AbiVariant = E->getPlatformFlags();
666 ObjSectionToIDMap &LocalSections,
678 for (
auto &Section : Obj.
sections()) {
688 if (
auto SectionIDOrErr =
692 return SectionIDOrErr.takeError();
707 ObjSectionToIDMap &LocalSections,
727 if (RelSectionName !=
".opd")
731 e = si->relocation_end();
736 if (TypeFunc != ELF::R_PPC64_ADDR64) {
741 uint64_t TargetSymbolOffset = i->getOffset();
744 if (
auto AddendOrErr = i->getAddend())
745 Addend = *AddendOrErr;
747 return AddendOrErr.takeError();
755 if (TypeTOC != ELF::R_PPC64_TOC)
761 if (Rel.
Addend != (int64_t)TargetSymbolOffset)
765 if (
auto TSIOrErr = TargetSymbol->
getSection())
768 return TSIOrErr.takeError();
771 bool IsCode = TSI->
isText();
776 return SectionIDOrErr.takeError();
777 Rel.
Addend = (intptr_t)Addend;
792 return (
value >> 16) & 0xffff;
796 return ((
value + 0x8000) >> 16) & 0xffff;
800 return (
value >> 32) & 0xffff;
804 return ((
value + 0x8000) >> 32) & 0xffff;
808 return (
value >> 48) & 0xffff;
812 return ((
value + 0x8000) >> 48) & 0xffff;
815void RuntimeDyldELF::resolvePPC32Relocation(
const SectionEntry &Section,
818 uint8_t *LocalAddress = Section.getAddressWithOffset(
Offset);
823 case ELF::R_PPC_ADDR16_LO:
826 case ELF::R_PPC_ADDR16_HI:
829 case ELF::R_PPC_ADDR16_HA:
835void RuntimeDyldELF::resolvePPC64Relocation(
const SectionEntry &Section,
838 uint8_t *LocalAddress =
Section.getAddressWithOffset(
Offset);
843 case ELF::R_PPC64_ADDR16:
846 case ELF::R_PPC64_ADDR16_DS:
849 case ELF::R_PPC64_ADDR16_LO:
852 case ELF::R_PPC64_ADDR16_LO_DS:
855 case ELF::R_PPC64_ADDR16_HI:
856 case ELF::R_PPC64_ADDR16_HIGH:
859 case ELF::R_PPC64_ADDR16_HA:
860 case ELF::R_PPC64_ADDR16_HIGHA:
863 case ELF::R_PPC64_ADDR16_HIGHER:
866 case ELF::R_PPC64_ADDR16_HIGHERA:
869 case ELF::R_PPC64_ADDR16_HIGHEST:
872 case ELF::R_PPC64_ADDR16_HIGHESTA:
875 case ELF::R_PPC64_ADDR14: {
878 uint8_t aalk = *(LocalAddress + 3);
881 case ELF::R_PPC64_REL16_LO: {
886 case ELF::R_PPC64_REL16_HI: {
891 case ELF::R_PPC64_REL16_HA: {
896 case ELF::R_PPC64_ADDR32: {
897 int64_t
Result =
static_cast<int64_t
>(
Value + Addend);
898 if (SignExtend64<32>(Result) !=
Result)
902 case ELF::R_PPC64_REL24: {
904 int64_t delta =
static_cast<int64_t
>(
Value - FinalAddress + Addend);
905 if (SignExtend64<26>(delta) != delta)
909 writeInt32BE(LocalAddress, (Inst & 0xFC000003) | (delta & 0x03FFFFFC));
911 case ELF::R_PPC64_REL32: {
913 int64_t delta =
static_cast<int64_t
>(
Value - FinalAddress + Addend);
914 if (SignExtend64<32>(delta) != delta)
918 case ELF::R_PPC64_REL64: {
923 case ELF::R_PPC64_ADDR64:
929void RuntimeDyldELF::resolveSystemZRelocation(
const SectionEntry &Section,
932 uint8_t *LocalAddress =
Section.getAddressWithOffset(
Offset);
937 case ELF::R_390_PC16DBL:
938 case ELF::R_390_PLT16DBL: {
940 assert(int16_t(Delta / 2) * 2 == Delta &&
"R_390_PC16DBL overflow");
944 case ELF::R_390_PC32DBL:
945 case ELF::R_390_PLT32DBL: {
947 assert(int32_t(Delta / 2) * 2 == Delta &&
"R_390_PC32DBL overflow");
951 case ELF::R_390_PC16: {
953 assert(int16_t(Delta) == Delta &&
"R_390_PC16 overflow");
957 case ELF::R_390_PC32: {
959 assert(int32_t(Delta) == Delta &&
"R_390_PC32 overflow");
963 case ELF::R_390_PC64: {
969 *LocalAddress = (uint8_t)(
Value + Addend);
983void RuntimeDyldELF::resolveBPFRelocation(
const SectionEntry &Section,
992 case ELF::R_BPF_NONE:
993 case ELF::R_BPF_64_64:
994 case ELF::R_BPF_64_32:
995 case ELF::R_BPF_64_NODYLD32:
997 case ELF::R_BPF_64_ABS64: {
1003 case ELF::R_BPF_64_ABS32: {
1041void RuntimeDyldELF::resolveRelocation(
const SectionEntry &Section,
1044 uint64_t SymOffset, SID SectionID) {
1047 resolveX86_64Relocation(Section,
Offset,
Value,
Type, Addend, SymOffset);
1084void *RuntimeDyldELF::computePlaceholderAddress(
unsigned SectionID,
uint64_t Offset)
const {
1090 if (
Value.SymbolName)
1097 bool IsLocal)
const {
1099 case ELF::R_MICROMIPS_GOT16:
1101 return ELF::R_MICROMIPS_LO16;
1103 case ELF::R_MICROMIPS_HI16:
1104 return ELF::R_MICROMIPS_LO16;
1105 case ELF::R_MIPS_GOT16:
1107 return ELF::R_MIPS_LO16;
1109 case ELF::R_MIPS_HI16:
1110 return ELF::R_MIPS_LO16;
1111 case ELF::R_MIPS_PCHI16:
1112 return ELF::R_MIPS_PCLO16;
1116 return ELF::R_MIPS_NONE;
1128bool RuntimeDyldELF::resolveAArch64ShortBranch(
1132 unsigned TargetSectionID;
1133 if (
Value.SymbolName) {
1140 const auto &
SymInfo = Loc->second;
1142 TargetSectionID =
SymInfo.getSectionID();
1143 TargetOffset =
SymInfo.getOffset();
1145 TargetSectionID =
Value.SectionID;
1151 if (TargetSectionID != SectionID)
1154 uint64_t SourceOffset = RelI->getOffset();
1159 if (!isInt<28>(TargetOffset +
Value.Addend - SourceOffset))
1163 if (
Value.SymbolName)
1171void RuntimeDyldELF::resolveAArch64Branch(
unsigned SectionID,
1176 LLVM_DEBUG(
dbgs() <<
"\t\tThis is an AArch64 branch relocation.");
1180 unsigned RelType = RelI->getType();
1182 StubMap::const_iterator i = Stubs.find(
Value);
1183 if (i != Stubs.end()) {
1184 resolveRelocation(Section,
Offset,
1185 Section.getLoadAddressWithOffset(i->second), RelType, 0);
1187 }
else if (!resolveAArch64ShortBranch(SectionID, RelI,
Value)) {
1195 ELF::R_AARCH64_MOVW_UABS_G3,
Value.Addend);
1197 StubTargetAddr -
Section.getAddress() + 4,
1198 ELF::R_AARCH64_MOVW_UABS_G2_NC,
Value.Addend);
1200 StubTargetAddr -
Section.getAddress() + 8,
1201 ELF::R_AARCH64_MOVW_UABS_G1_NC,
Value.Addend);
1203 StubTargetAddr -
Section.getAddress() + 12,
1204 ELF::R_AARCH64_MOVW_UABS_G0_NC,
Value.Addend);
1206 if (
Value.SymbolName) {
1217 resolveRelocation(Section,
Offset,
1220 Section.advanceStubOffset(getMaxStubSize());
1228 const auto &Obj = cast<ELFObjectFileBase>(O);
1229 uint64_t RelType = RelI->getType();
1232 Addend = *AddendOrErr;
1240 if (
auto TargetNameOrErr = Symbol->getName())
1241 TargetName = *TargetNameOrErr;
1243 return TargetNameOrErr.takeError();
1245 LLVM_DEBUG(
dbgs() <<
"\t\tRelType: " << RelType <<
" Addend: " << Addend
1246 <<
" TargetName: " << TargetName <<
"\n");
1256 if (!SymTypeOrErr) {
1262 SymType = *SymTypeOrErr;
1265 const auto &
SymInfo = gsi->second;
1275 auto SectionOrErr = Symbol->getSection();
1276 if (!SectionOrErr) {
1286 bool isCode = si->
isText();
1289 Value.SectionID = *SectionIDOrErr;
1291 return SectionIDOrErr.takeError();
1292 Value.Addend = Addend;
1300 Value.Addend = Addend;
1306 if (!
Value.SymbolName)
1307 Value.SymbolName =
"";
1321 if ((RelType == ELF::R_AARCH64_CALL26 ||
1322 RelType == ELF::R_AARCH64_JUMP26) &&
1324 resolveAArch64Branch(SectionID,
Value, RelI, Stubs);
1325 }
else if (RelType == ELF::R_AARCH64_ADR_GOT_PAGE) {
1328 uint64_t GOTOffset = findOrAllocGOTEntry(
Value, ELF::R_AARCH64_ABS64);
1329 resolveGOTOffsetRelocation(SectionID,
Offset, GOTOffset + Addend,
1330 ELF::R_AARCH64_ADR_PREL_PG_HI21);
1332 }
else if (RelType == ELF::R_AARCH64_LD64_GOT_LO12_NC) {
1333 uint64_t GOTOffset = findOrAllocGOTEntry(
Value, ELF::R_AARCH64_ABS64);
1334 resolveGOTOffsetRelocation(SectionID,
Offset, GOTOffset + Addend,
1335 ELF::R_AARCH64_LDST64_ABS_LO12_NC);
1337 processSimpleRelocation(SectionID,
Offset, RelType,
Value);
1340 if (RelType == ELF::R_ARM_PC24 || RelType == ELF::R_ARM_CALL ||
1341 RelType == ELF::R_ARM_JUMP24) {
1347 StubMap::const_iterator i = Stubs.find(
Value);
1348 if (i != Stubs.end()) {
1349 resolveRelocation(Section,
Offset,
1350 Section.getLoadAddressWithOffset(i->second), RelType,
1356 Stubs[
Value] = Section.getStubOffset();
1358 Section.getAddressWithOffset(Section.getStubOffset()));
1360 ELF::R_ARM_ABS32,
Value.Addend);
1361 if (
Value.SymbolName)
1368 Section.getLoadAddressWithOffset(Section.getStubOffset()), RelType,
1370 Section.advanceStubOffset(getMaxStubSize());
1374 reinterpret_cast<uint32_t*
>(computePlaceholderAddress(SectionID,
Offset));
1375 if (RelType == ELF::R_ARM_PREL31 || RelType == ELF::R_ARM_TARGET1 ||
1376 RelType == ELF::R_ARM_ABS32) {
1377 Value.Addend += *Placeholder;
1378 }
else if (RelType == ELF::R_ARM_MOVW_ABS_NC || RelType == ELF::R_ARM_MOVT_ABS) {
1380 Value.Addend += (int16_t)((*Placeholder & 0xFFF) | (((*Placeholder >> 16) & 0xF) << 12));
1382 processSimpleRelocation(SectionID,
Offset, RelType,
Value);
1385 uint8_t *Placeholder =
reinterpret_cast<uint8_t *
>(
1386 computePlaceholderAddress(SectionID,
Offset));
1388 if (RelType == ELF::R_MIPS_26) {
1396 uint32_t Addend = (Opcode & 0x03ffffff) << 2;
1398 Value.Addend += Addend;
1401 StubMap::const_iterator i = Stubs.find(
Value);
1402 if (i != Stubs.end()) {
1409 Stubs[
Value] = Section.getStubOffset();
1414 Section.getAddressWithOffset(Section.getStubOffset()), AbiVariant);
1417 RelocationEntry REHi(SectionID, StubTargetAddr - Section.getAddress(),
1418 ELF::R_MIPS_HI16,
Value.Addend);
1420 StubTargetAddr - Section.getAddress() + 4,
1421 ELF::R_MIPS_LO16,
Value.Addend);
1423 if (
Value.SymbolName) {
1433 Section.advanceStubOffset(getMaxStubSize());
1435 }
else if (RelType == ELF::R_MIPS_HI16 || RelType == ELF::R_MIPS_PCHI16) {
1436 int64_t Addend = (Opcode & 0x0000ffff) << 16;
1438 PendingRelocs.push_back(std::make_pair(
Value, RE));
1439 }
else if (RelType == ELF::R_MIPS_LO16 || RelType == ELF::R_MIPS_PCLO16) {
1440 int64_t Addend =
Value.Addend + SignExtend32<16>(Opcode & 0x0000ffff);
1441 for (
auto I = PendingRelocs.begin();
I != PendingRelocs.end();) {
1444 if (MatchingValue ==
Value &&
1445 RelType == getMatchingLoRelocation(Reloc.
RelType) &&
1448 if (
Value.SymbolName)
1452 I = PendingRelocs.erase(
I);
1457 if (
Value.SymbolName)
1462 if (RelType == ELF::R_MIPS_32)
1463 Value.Addend += Opcode;
1464 else if (RelType == ELF::R_MIPS_PC16)
1465 Value.Addend += SignExtend32<18>((Opcode & 0x0000ffff) << 2);
1466 else if (RelType == ELF::R_MIPS_PC19_S2)
1467 Value.Addend += SignExtend32<21>((Opcode & 0x0007ffff) << 2);
1468 else if (RelType == ELF::R_MIPS_PC21_S2)
1469 Value.Addend += SignExtend32<23>((Opcode & 0x001fffff) << 2);
1470 else if (RelType == ELF::R_MIPS_PC26_S2)
1471 Value.Addend += SignExtend32<28>((Opcode & 0x03ffffff) << 2);
1472 processSimpleRelocation(SectionID,
Offset, RelType,
Value);
1477 if (r_type == ELF::R_MIPS_CALL16 || r_type == ELF::R_MIPS_GOT_PAGE
1478 || r_type == ELF::R_MIPS_GOT_DISP) {
1480 if (i != GOTSymbolOffsets.
end())
1484 GOTSymbolOffsets[TargetName] = RE.
SymOffset;
1486 if (
Value.SymbolName)
1490 }
else if (RelType == ELF::R_MIPS_26) {
1496 StubMap::const_iterator i = Stubs.find(
Value);
1497 if (i != Stubs.end()) {
1504 Stubs[
Value] = Section.getStubOffset();
1509 Section.getAddressWithOffset(Section.getStubOffset()), AbiVariant);
1513 RelocationEntry REHi(SectionID, StubTargetAddr - Section.getAddress(),
1514 ELF::R_MIPS_HI16,
Value.Addend);
1516 StubTargetAddr - Section.getAddress() + 4,
1517 ELF::R_MIPS_LO16,
Value.Addend);
1518 if (
Value.SymbolName) {
1529 StubTargetAddr - Section.getAddress(),
1530 ELF::R_MIPS_HIGHEST,
Value.Addend);
1532 StubTargetAddr - Section.getAddress() + 4,
1533 ELF::R_MIPS_HIGHER,
Value.Addend);
1535 StubTargetAddr - Section.getAddress() + 12,
1536 ELF::R_MIPS_HI16,
Value.Addend);
1538 StubTargetAddr - Section.getAddress() + 20,
1539 ELF::R_MIPS_LO16,
Value.Addend);
1540 if (
Value.SymbolName) {
1554 Section.advanceStubOffset(getMaxStubSize());
1557 processSimpleRelocation(SectionID,
Offset, RelType,
Value);
1561 if (RelType == ELF::R_PPC64_REL24) {
1570 uint8_t *
Target = Section.getAddressWithOffset(
Offset);
1571 bool RangeOverflow =
false;
1574 if (AbiVariant != 2) {
1578 if (
auto Err = findOPDEntrySection(Obj, ObjSectionToID,
Value))
1579 return std::move(Err);
1583 if (
Value.SectionID == SectionID){
1584 uint8_t SymOther = Symbol->getOther();
1588 uint8_t *RelocTarget =
1590 int64_t delta =
static_cast<int64_t
>(
Target - RelocTarget);
1592 if (SignExtend64<26>(delta) != delta) {
1593 RangeOverflow =
true;
1594 }
else if ((AbiVariant != 2) ||
1595 (AbiVariant == 2 &&
Value.SectionID == SectionID)) {
1600 if (IsExtern || (AbiVariant == 2 &&
Value.SectionID != SectionID) ||
1604 StubMap::const_iterator i = Stubs.find(
Value);
1605 if (i != Stubs.end()) {
1607 resolveRelocation(Section,
Offset,
1608 Section.getLoadAddressWithOffset(i->second),
1614 Stubs[
Value] = Section.getStubOffset();
1616 Section.getAddressWithOffset(Section.getStubOffset()),
1619 ELF::R_PPC64_ADDR64,
Value.Addend);
1625 uint64_t StubRelocOffset = StubTargetAddr - Section.getAddress();
1627 StubRelocOffset += 2;
1630 ELF::R_PPC64_ADDR16_HIGHEST,
Value.Addend);
1632 ELF::R_PPC64_ADDR16_HIGHER,
Value.Addend);
1634 ELF::R_PPC64_ADDR16_HI,
Value.Addend);
1636 ELF::R_PPC64_ADDR16_LO,
Value.Addend);
1638 if (
Value.SymbolName) {
1652 Section.getLoadAddressWithOffset(Section.getStubOffset()),
1654 Section.advanceStubOffset(getMaxStubSize());
1656 if (IsExtern || (AbiVariant == 2 &&
Value.SectionID != SectionID)) {
1658 if (AbiVariant == 2)
1664 }
else if (RelType == ELF::R_PPC64_TOC16 ||
1665 RelType == ELF::R_PPC64_TOC16_DS ||
1666 RelType == ELF::R_PPC64_TOC16_LO ||
1667 RelType == ELF::R_PPC64_TOC16_LO_DS ||
1668 RelType == ELF::R_PPC64_TOC16_HI ||
1669 RelType == ELF::R_PPC64_TOC16_HA) {
1681 case ELF::R_PPC64_TOC16: RelType = ELF::R_PPC64_ADDR16;
break;
1682 case ELF::R_PPC64_TOC16_DS: RelType = ELF::R_PPC64_ADDR16_DS;
break;
1683 case ELF::R_PPC64_TOC16_LO: RelType = ELF::R_PPC64_ADDR16_LO;
break;
1684 case ELF::R_PPC64_TOC16_LO_DS: RelType = ELF::R_PPC64_ADDR16_LO_DS;
break;
1685 case ELF::R_PPC64_TOC16_HI: RelType = ELF::R_PPC64_ADDR16_HI;
break;
1686 case ELF::R_PPC64_TOC16_HA: RelType = ELF::R_PPC64_ADDR16_HA;
break;
1691 if (
auto Err = findPPC64TOCSection(Obj, ObjSectionToID, TOCValue))
1692 return std::move(Err);
1702 if (RelType == ELF::R_PPC64_TOC) {
1703 RelType = ELF::R_PPC64_ADDR64;
1704 if (
auto Err = findPPC64TOCSection(Obj, ObjSectionToID,
Value))
1705 return std::move(Err);
1706 }
else if (TargetName ==
".TOC.") {
1707 if (
auto Err = findPPC64TOCSection(Obj, ObjSectionToID,
Value))
1708 return std::move(Err);
1709 Value.Addend += Addend;
1714 if (
Value.SymbolName)
1720 (RelType == ELF::R_390_PLT32DBL || RelType == ELF::R_390_GOTENT)) {
1730 LLVM_DEBUG(
dbgs() <<
"\t\tThis is a SystemZ indirect relocation.");
1734 StubMap::const_iterator i = Stubs.find(
Value);
1735 uintptr_t StubAddress;
1736 if (i != Stubs.end()) {
1737 StubAddress = uintptr_t(Section.getAddressWithOffset(i->second));
1743 uintptr_t BaseAddress = uintptr_t(Section.getAddress());
1745 alignTo(BaseAddress + Section.getStubOffset(), getStubAlignment());
1746 unsigned StubOffset = StubAddress - BaseAddress;
1748 Stubs[
Value] = StubOffset;
1752 if (
Value.SymbolName)
1756 Section.advanceStubOffset(getMaxStubSize());
1759 if (RelType == ELF::R_390_GOTENT)
1760 resolveRelocation(Section,
Offset, StubAddress + 8, ELF::R_390_PC32DBL,
1763 resolveRelocation(Section,
Offset, StubAddress, RelType, Addend);
1765 if (RelType == ELF::R_X86_64_PLT32) {
1786 StubMap::const_iterator i = Stubs.find(
Value);
1787 uintptr_t StubAddress;
1788 if (i != Stubs.end()) {
1789 StubAddress = uintptr_t(Section->getAddress()) + i->second;
1795 uintptr_t BaseAddress = uintptr_t(Section->getAddress());
1796 StubAddress =
alignTo(BaseAddress + Section->getStubOffset(),
1797 getStubAlignment());
1798 unsigned StubOffset = StubAddress - BaseAddress;
1799 Stubs[
Value] = StubOffset;
1803 Section->advanceStubOffset(getMaxStubSize());
1806 uint64_t GOTOffset = allocateGOTEntries(1);
1812 resolveGOTOffsetRelocation(SectionID, StubOffset + 2, GOTOffset - 4,
1813 ELF::R_X86_64_PC32);
1817 computeGOTOffsetRE(GOTOffset, 0, ELF::R_X86_64_64),
1822 resolveRelocation(*Section,
Offset, StubAddress, ELF::R_X86_64_PC32,
1826 computePlaceholderAddress(SectionID,
Offset));
1827 processSimpleRelocation(SectionID,
Offset, ELF::R_X86_64_PC32,
Value);
1829 }
else if (RelType == ELF::R_X86_64_GOTPCREL ||
1830 RelType == ELF::R_X86_64_GOTPCRELX ||
1831 RelType == ELF::R_X86_64_REX_GOTPCRELX) {
1832 uint64_t GOTOffset = allocateGOTEntries(1);
1833 resolveGOTOffsetRelocation(SectionID,
Offset, GOTOffset + Addend,
1834 ELF::R_X86_64_PC32);
1838 computeGOTOffsetRE(GOTOffset,
Value.Offset, ELF::R_X86_64_64);
1839 if (
Value.SymbolName)
1843 }
else if (RelType == ELF::R_X86_64_GOT64) {
1845 uint64_t GOTOffset = allocateGOTEntries(1);
1847 ELF::R_X86_64_64, 0);
1851 computeGOTOffsetRE(GOTOffset,
Value.Offset, ELF::R_X86_64_64);
1852 if (
Value.SymbolName)
1856 }
else if (RelType == ELF::R_X86_64_GOTPC32) {
1860 (void)allocateGOTEntries(0);
1861 resolveGOTOffsetRelocation(SectionID,
Offset, Addend, ELF::R_X86_64_PC32);
1862 }
else if (RelType == ELF::R_X86_64_GOTPC64) {
1863 (void)allocateGOTEntries(0);
1864 resolveGOTOffsetRelocation(SectionID,
Offset, Addend, ELF::R_X86_64_PC64);
1865 }
else if (RelType == ELF::R_X86_64_GOTOFF64) {
1867 (void)allocateGOTEntries(0);
1868 processSimpleRelocation(SectionID,
Offset, RelType,
Value);
1869 }
else if (RelType == ELF::R_X86_64_PC32) {
1871 processSimpleRelocation(SectionID,
Offset, RelType,
Value);
1872 }
else if (RelType == ELF::R_X86_64_PC64) {
1874 processSimpleRelocation(SectionID,
Offset, RelType,
Value);
1875 }
else if (RelType == ELF::R_X86_64_GOTTPOFF) {
1876 processX86_64GOTTPOFFRelocation(SectionID,
Offset,
Value, Addend);
1877 }
else if (RelType == ELF::R_X86_64_TLSGD ||
1878 RelType == ELF::R_X86_64_TLSLD) {
1881 auto &GetAddrRelocation = *RelI;
1882 processX86_64TLSRelocation(SectionID,
Offset, RelType,
Value, Addend,
1885 processSimpleRelocation(SectionID,
Offset, RelType,
Value);
1891 processSimpleRelocation(SectionID,
Offset, RelType,
Value);
1896void RuntimeDyldELF::processX86_64GOTTPOFFRelocation(
unsigned SectionID,
1907 struct CodeSequence {
1919 std::array<CodeSequence, 2> CodeSequences;
1923 static const std::initializer_list<uint8_t> ExpectedCodeSequenceList = {
1924 0x64, 0x48, 0x8b, 0x04, 0x25, 0x00, 0x00, 0x00,
1926 0x48, 0x03, 0x05, 0x00, 0x00, 0x00, 0x00
1929 CodeSequences[0].ExpectedCodeSequence =
1931 CodeSequences[0].TLSSequenceOffset = 12;
1933 static const std::initializer_list<uint8_t> NewCodeSequenceList = {
1934 0x64, 0x48, 0x8b, 0x04, 0x25, 0x00, 0x00, 0x00, 0x00,
1935 0x48, 0x8d, 0x80, 0x00, 0x00, 0x00, 0x00
1938 CodeSequences[0].TpoffRelocationOffset = 12;
1943 static const std::initializer_list<uint8_t> ExpectedCodeSequenceList = {
1944 0x48, 0x8b, 0x05, 0x00, 0x00, 0x00, 0x00,
1945 0x64, 0x48, 0x8b, 0x00, 0x00, 0x00, 0x00
1947 CodeSequences[1].ExpectedCodeSequence =
1949 CodeSequences[1].TLSSequenceOffset = 3;
1951 static const std::initializer_list<uint8_t> NewCodeSequenceList = {
1952 0x66, 0x0f, 0x1f, 0x44, 0x00, 0x00,
1953 0x64, 0x8b, 0x04, 0x25, 0x00, 0x00, 0x00, 0x00,
1956 CodeSequences[1].TpoffRelocationOffset = 10;
1961 for (
const auto &
C : CodeSequences) {
1962 assert(
C.ExpectedCodeSequence.size() ==
C.NewCodeSequence.size() &&
1963 "Old and new code sequences must have the same size");
1965 if (
Offset <
C.TLSSequenceOffset ||
1966 (
Offset -
C.TLSSequenceOffset +
C.NewCodeSequence.size()) >
1973 auto TLSSequenceStartOffset =
Offset -
C.TLSSequenceOffset;
1974 auto *TLSSequence =
Section.getAddressWithOffset(TLSSequenceStartOffset);
1976 C.ExpectedCodeSequence) {
1980 memcpy(TLSSequence,
C.NewCodeSequence.data(),
C.NewCodeSequence.size());
1987 TLSSequenceStartOffset +
C.TpoffRelocationOffset,
1988 ELF::R_X86_64_TPOFF32,
Value.Addend - Addend);
1990 if (
Value.SymbolName)
2003 uint64_t GOTOffset = allocateGOTEntries(1);
2004 resolveGOTOffsetRelocation(SectionID,
Offset, GOTOffset + Addend,
2005 ELF::R_X86_64_PC32);
2007 computeGOTOffsetRE(GOTOffset,
Value.Offset, ELF::R_X86_64_TPOFF64);
2008 if (
Value.SymbolName)
2015void RuntimeDyldELF::processX86_64TLSRelocation(
2028 bool IsSmallCodeModel;
2030 bool IsGOTPCRel =
false;
2032 switch (GetAddrRelocation.
getType()) {
2033 case ELF::R_X86_64_GOTPCREL:
2034 case ELF::R_X86_64_REX_GOTPCRELX:
2035 case ELF::R_X86_64_GOTPCRELX:
2038 case ELF::R_X86_64_PLT32:
2039 IsSmallCodeModel =
true;
2041 case ELF::R_X86_64_PLTOFF64:
2042 IsSmallCodeModel =
false;
2046 "invalid TLS relocations for General/Local Dynamic TLS Model: "
2047 "expected PLT or GOT relocation for __tls_get_addr function");
2058 if (RelType == ELF::R_X86_64_TLSGD) {
2063 if (IsSmallCodeModel) {
2065 static const std::initializer_list<uint8_t> CodeSequence = {
2067 0x48, 0x8d, 0x3d, 0x00, 0x00,
2071 0xe8, 0x00, 0x00, 0x00, 0x00
2074 TLSSequenceOffset = 4;
2078 static const std::initializer_list<uint8_t> CodeSequence = {
2080 0x48, 0x8d, 0x3d, 0x00, 0x00,
2084 0xff, 0x15, 0x00, 0x00, 0x00,
2088 TLSSequenceOffset = 4;
2093 static const std::initializer_list<uint8_t> SmallSequence = {
2094 0x64, 0x48, 0x8b, 0x04, 0x25, 0x00, 0x00, 0x00,
2096 0x48, 0x8d, 0x80, 0x00, 0x00, 0x00, 0x00
2100 TpoffRelocOffset = 12;
2102 static const std::initializer_list<uint8_t> CodeSequence = {
2103 0x48, 0x8d, 0x3d, 0x00, 0x00, 0x00, 0x00,
2105 0x48, 0xb8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
2111 TLSSequenceOffset = 3;
2114 static const std::initializer_list<uint8_t> LargeSequence = {
2115 0x64, 0x48, 0x8b, 0x04, 0x25, 0x00, 0x00, 0x00,
2117 0x48, 0x8d, 0x80, 0x00, 0x00, 0x00, 0x00,
2119 0x66, 0x0f, 0x1f, 0x44, 0x00, 0x00
2122 TpoffRelocOffset = 12;
2129 ELF::R_X86_64_TPOFF32,
Value.Addend - Addend);
2130 if (
Value.SymbolName)
2134 }
else if (RelType == ELF::R_X86_64_TLSLD) {
2135 if (IsSmallCodeModel) {
2137 static const std::initializer_list<uint8_t> CodeSequence = {
2138 0x48, 0x8d, 0x3d, 0x00, 0x00, 0x00,
2139 0x00, 0xe8, 0x00, 0x00, 0x00, 0x00
2142 TLSSequenceOffset = 3;
2145 static const std::initializer_list<uint8_t> SmallSequence = {
2147 0x64, 0x48, 0x8b, 0x04, 0x25,
2148 0x00, 0x00, 0x00, 0x00
2154 static const std::initializer_list<uint8_t> CodeSequence = {
2155 0x48, 0x8d, 0x3d, 0x00,
2157 0xff, 0x15, 0x00, 0x00,
2162 TLSSequenceOffset = 3;
2166 static const std::initializer_list<uint8_t> SmallSequence = {
2167 0x0f, 0x1f, 0x40, 0x00,
2168 0x64, 0x48, 0x8b, 0x04, 0x25,
2169 0x00, 0x00, 0x00, 0x00
2176 static const std::initializer_list<uint8_t> CodeSequence = {
2177 0x48, 0x8d, 0x3d, 0x00, 0x00, 0x00, 0x00,
2179 0x48, 0xb8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
2185 TLSSequenceOffset = 3;
2188 static const std::initializer_list<uint8_t> LargeSequence = {
2190 0x66, 0x66, 0x0f, 0x1f, 0x84, 0x00, 0x00, 0x00, 0x00,
2192 0x64, 0x48, 0x8b, 0x04, 0x25, 0x00, 0x00, 0x00, 0x00
2201 "Old and new code sequences must have the same size");
2204 if (
Offset < TLSSequenceOffset ||
2205 (
Offset - TLSSequenceOffset + NewCodeSequence.
size()) >
2210 auto *TLSSequence =
Section.getAddressWithOffset(
Offset - TLSSequenceOffset);
2212 ExpectedCodeSequence) {
2214 "invalid TLS sequence for Global/Local Dynamic TLS Model");
2217 memcpy(TLSSequence, NewCodeSequence.
data(), NewCodeSequence.
size());
2255uint64_t RuntimeDyldELF::allocateGOTEntries(
unsigned no) {
2256 if (GOTSectionID == 0) {
2263 CurrentGOTIndex += no;
2268 unsigned GOTRelType) {
2269 auto E = GOTOffsetMap.insert({
Value, 0});
2271 uint64_t GOTOffset = allocateGOTEntries(1);
2275 computeGOTOffsetRE(GOTOffset,
Value.Offset, GOTRelType);
2276 if (
Value.SymbolName)
2281 E.first->second = GOTOffset;
2284 return E.first->second;
2287void RuntimeDyldELF::resolveGOTOffsetRelocation(
unsigned SectionID,
2307 if (ObjSymbolFlags & SymbolRef::SF_Indirect) {
2308 if (IFuncStubSectionID == 0) {
2311 IFuncStubSectionID =
Sections.size();
2313 SectionEntry(
".text.__llvm_IFuncStubs",
nullptr, 0, 0, 0));
2315 IFuncStubOffset = 64;
2323 IFuncStubOffset += getMaxIFuncStubSize();
2330 if (!PendingRelocs.empty())
2331 return make_error<RuntimeDyldError>(
"Can't find matching LO16 reloc");
2335 if (IFuncStubSectionID != 0) {
2337 IFuncStubOffset, 1, IFuncStubSectionID,
".text.__llvm_IFuncStubs");
2338 if (!IFuncStubsAddr)
2339 return make_error<RuntimeDyldError>(
2340 "Unable to allocate memory for IFunc stubs!");
2342 SectionEntry(
".text.__llvm_IFuncStubs", IFuncStubsAddr, IFuncStubOffset,
2343 IFuncStubOffset, 0);
2345 createIFuncResolver(IFuncStubsAddr);
2348 << IFuncStubSectionID <<
" Addr: "
2350 for (
auto &IFuncStub : IFuncStubs) {
2351 auto &Symbol = IFuncStub.OriginalSymbol;
2353 <<
" Offset: " <<
format(
"%p", Symbol.getOffset())
2354 <<
" IFuncStubOffset: "
2355 <<
format(
"%p\n", IFuncStub.StubOffset));
2356 createIFuncStub(IFuncStubSectionID, 0, IFuncStub.StubOffset,
2357 Symbol.getSectionID(), Symbol.getOffset());
2360 IFuncStubSectionID = 0;
2361 IFuncStubOffset = 0;
2366 if (GOTSectionID != 0) {
2370 GOTSectionID,
".got",
false);
2372 return make_error<RuntimeDyldError>(
"Unable to allocate memory for GOT!");
2379 memset(
Addr, 0, TotalSize);
2385 if (SI->relocation_begin() != SI->relocation_end()) {
2388 return make_error<RuntimeDyldError>(
2392 ObjSectionToIDMap::iterator i = SectionMap.find(*RelocatedSection);
2393 assert(i != SectionMap.end());
2397 GOTSymbolOffsets.
clear();
2402 ObjSectionToIDMap::iterator i, e;
2403 for (i = SectionMap.begin(), e = SectionMap.end(); i != e; ++i) {
2413 if (
Name ==
".eh_frame") {
2414 UnregisteredEHFrameSections.
push_back(i->second);
2419 GOTOffsetMap.clear();
2421 CurrentGOTIndex = 0;
2430void RuntimeDyldELF::createIFuncResolver(uint8_t *
Addr)
const {
2442 const uint8_t StubCode[] = {
2450 0x41, 0xff, 0x53, 0x08,
2462 static_assert(
sizeof(StubCode) <= 64,
2463 "maximum size of the IFunc resolver is 64B");
2464 memcpy(
Addr, StubCode,
sizeof(StubCode));
2467 "IFunc resolver is not supported for target architecture");
2471void RuntimeDyldELF::createIFuncStub(
unsigned IFuncStubSectionID,
2474 unsigned IFuncSectionID,
2476 auto &IFuncStubSection =
Sections[IFuncStubSectionID];
2477 auto *
Addr = IFuncStubSection.getAddressWithOffset(IFuncStubOffset);
2501 uint64_t GOT1 = allocateGOTEntries(2);
2505 IFuncResolverOffset, {});
2507 RelocationEntry RE2(GOTSectionID, GOT2, ELF::R_X86_64_64, IFuncOffset, {});
2510 const uint8_t StubCode[] = {
2511 0x4c, 0x8d, 0x1d, 0x00, 0x00, 0x00, 0x00,
2514 assert(
sizeof(StubCode) <= getMaxIFuncStubSize() &&
2515 "IFunc stub size must not exceed getMaxIFuncStubSize()");
2516 memcpy(
Addr, StubCode,
sizeof(StubCode));
2520 resolveGOTOffsetRelocation(IFuncStubSectionID, IFuncStubOffset + 3,
2521 GOT1 - 4, ELF::R_X86_64_PC32);
2527unsigned RuntimeDyldELF::getMaxIFuncStubSize()
const {
2534bool RuntimeDyldELF::relocationNeedsGot(
const RelocationRef &R)
const {
2535 unsigned RelTy =
R.getType();
2537 return RelTy == ELF::R_AARCH64_ADR_GOT_PAGE ||
2538 RelTy == ELF::R_AARCH64_LD64_GOT_LO12_NC;
2541 return RelTy == ELF::R_X86_64_GOTPCREL ||
2542 RelTy == ELF::R_X86_64_GOTPCRELX ||
2543 RelTy == ELF::R_X86_64_GOT64 ||
2544 RelTy == ELF::R_X86_64_REX_GOTPCRELX;
2548bool RuntimeDyldELF::relocationNeedsStub(
const RelocationRef &R)
const {
2552 switch (
R.getType()) {
2557 case ELF::R_X86_64_GOTPCREL:
2558 case ELF::R_X86_64_GOTPCRELX:
2559 case ELF::R_X86_64_REX_GOTPCRELX:
2560 case ELF::R_X86_64_GOTPC64:
2561 case ELF::R_X86_64_GOT64:
2562 case ELF::R_X86_64_GOTOFF64:
2563 case ELF::R_X86_64_PC32:
2564 case ELF::R_X86_64_PC64:
2565 case ELF::R_X86_64_64:
amdgpu aa AMDGPU Address space based Alias Analysis Wrapper
Given that RA is a live value
#define LLVM_ELF_IMPORT_TYPES_ELFT(ELFT)
static void or32le(void *P, int32_t V)
static void or32AArch64Imm(void *L, uint64_t Imm)
static void write(bool isBE, void *P, T V)
static uint64_t getBits(uint64_t Val, int Start, int End)
static void write32AArch64Addr(void *L, uint64_t Imm)
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
const Value * getAddress(const DbgVariableIntrinsic *DVI)
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
size_t size() const
size - Get the array size.
Lightweight error class with error context and mandatory checking.
static ErrorSuccess success()
Create a success value.
Tagged union holding either a T or a Error.
Error takeError()
Take ownership of the stored error.
Symbol resolution interface.
static std::unique_ptr< MemoryBuffer > getMemBufferCopy(StringRef InputData, const Twine &BufferName="")
Open the specified memory range as a MemoryBuffer, copying the contents and taking ownership of it.
RelocationEntry - used to represent relocations internally in the dynamic linker.
uint32_t RelType
RelType - relocation type.
uint64_t Offset
Offset - offset into the section.
int64_t Addend
Addend - the relocation addend encoded in the instruction itself.
unsigned SectionID
SectionID - the section this relocation points to.
Interface for looking up the initializer for a variable name, used by Init::resolveReferences.
void registerEHFrames() override
size_t getGOTEntrySize() override
~RuntimeDyldELF() override
static std::unique_ptr< RuntimeDyldELF > create(Triple::ArchType Arch, RuntimeDyld::MemoryManager &MemMgr, JITSymbolResolver &Resolver)
Error finalizeLoad(const ObjectFile &Obj, ObjSectionToIDMap &SectionMap) override
DenseMap< SID, SID > SectionToGOTMap
bool isCompatibleFile(const object::ObjectFile &Obj) const override
std::unique_ptr< RuntimeDyld::LoadedObjectInfo > loadObject(const object::ObjectFile &O) override
RuntimeDyldELF(RuntimeDyld::MemoryManager &MemMgr, JITSymbolResolver &Resolver)
Expected< relocation_iterator > processRelocationRef(unsigned SectionID, relocation_iterator RelI, const ObjectFile &Obj, ObjSectionToIDMap &ObjSectionToID, StubMap &Stubs) override
Parses one or more object file relocations (some object files use relocation pairs) and stores it to ...
std::map< SectionRef, unsigned > ObjSectionToIDMap
void writeInt32BE(uint8_t *Addr, uint32_t Value)
void writeInt64BE(uint8_t *Addr, uint64_t Value)
std::map< RelocationValueRef, uintptr_t > StubMap
void writeInt16BE(uint8_t *Addr, uint16_t Value)
void addRelocationForSymbol(const RelocationEntry &RE, StringRef SymbolName)
bool IsTargetLittleEndian
RuntimeDyld::MemoryManager & MemMgr
void addRelocationForSection(const RelocationEntry &RE, unsigned SectionID)
Expected< unsigned > findOrEmitSection(const ObjectFile &Obj, const SectionRef &Section, bool IsCode, ObjSectionToIDMap &LocalSections)
Find Section in LocalSections.
uint8_t * createStubFunction(uint8_t *Addr, unsigned AbiVariant=0)
Emits long jump instruction to Addr.
uint64_t readBytesUnaligned(uint8_t *Src, unsigned Size) const
Endian-aware read Read the least significant Size bytes from Src.
RTDyldSymbolTable GlobalSymbolTable
Expected< ObjSectionToIDMap > loadObjectImpl(const object::ObjectFile &Obj)
virtual uint8_t * allocateDataSection(uintptr_t Size, unsigned Alignment, unsigned SectionID, StringRef SectionName, bool IsReadOnly)=0
Allocate a memory block of (at least) the given size suitable for data.
virtual uint8_t * allocateCodeSection(uintptr_t Size, unsigned Alignment, unsigned SectionID, StringRef SectionName)=0
Allocate a memory block of (at least) the given size suitable for executable code.
virtual void registerEHFrames(uint8_t *Addr, uint64_t LoadAddr, size_t Size)=0
Register the EH frames with the runtime so that c++ exceptions work.
virtual bool allowStubAllocation() const
Override to return false to tell LLVM no stub space will be needed.
SectionEntry - represents a section emitted into memory by the dynamic linker.
void push_back(const T &Elt)
iterator find(StringRef Key)
StringRef - Represent a constant reference to a string, i.e.
constexpr const char * data() const
data - Get a pointer to the start of the string (which may not be null terminated).
Symbol info for RuntimeDyld.
Target - Wrapper for Target specific information.
static StringRef getArchTypePrefix(ArchType Kind)
Get the "prefix" canonical name for the Kind architecture.
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
The instances of the Type class are immutable: once they are created, they are never changed.
LLVM Value Representation.
Expected< uint32_t > getFlags() const
Get symbol flags (bitwise OR of SymbolRef::Flags)
DataRefImpl getRawDataRefImpl() const
StringRef getData() const
bool isLittleEndian() const
StringRef getFileName() const
virtual unsigned getPlatformFlags() const =0
Returns platform-specific object flags, if any.
static bool classof(const Binary *v)
Expected< const Elf_Sym * > getSymbol(DataRefImpl Sym) const
static Expected< ELFObjectFile< ELFT > > create(MemoryBufferRef Object, bool InitContent=true)
Expected< int64_t > getAddend() const
This class is the base class for all object file types.
virtual section_iterator section_end() const =0
virtual uint8_t getBytesInAddress() const =0
The number of bytes used to represent an address in this object file format.
section_iterator_range sections() const
virtual StringRef getFileFormatName() const =0
virtual section_iterator section_begin() const =0
This is a value type class that represents a single relocation in the list of relocations in the obje...
This is a value type class that represents a single section in the list of sections in the object fil...
DataRefImpl getRawDataRefImpl() const
bool isText() const
Whether this section contains instructions.
Expected< StringRef > getName() const
This is a value type class that represents a single symbol in the list of symbols in the object file.
Expected< section_iterator > getSection() const
Get section this symbol is defined in reference to.
virtual basic_symbol_iterator symbol_end() const =0
A raw_ostream that writes to an std::string.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
@ C
The default llvm calling convention, compatible with C.
static int64_t decodePPC64LocalEntryOffset(unsigned Other)
std::optional< const char * > toString(const std::optional< DWARFFormValue > &V)
Take an optional DWARFFormValue and try to extract a string value from it.
@ Resolved
Queried, materialization begun.
void write32le(void *P, uint32_t V)
uint32_t read32le(const void *P)
This is an optimization pass for GlobalISel generic memory operations.
void logAllUnhandledErrors(Error E, raw_ostream &OS, Twine ErrorBanner={})
Log all errors (if any) in E to OS.
static uint16_t applyPPChighera(uint64_t value)
static uint16_t applyPPChi(uint64_t value)
void handleAllErrors(Error E, HandlerTs &&... Handlers)
Behaves the same as handleErrors, except that by contract all errors must be handled by the given han...
static uint16_t applyPPChighesta(uint64_t value)
static uint16_t applyPPChighest(uint64_t value)
Error write(MCStreamer &Out, ArrayRef< std::string > Inputs, OnCuIndexOverflow OverflowOptValue)
static uint16_t applyPPCha(uint64_t value)
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
void report_fatal_error(Error Err, bool gen_crash_diag=true)
Report a serious error, calling any installed error handler.
bool isa(const From &Val)
isa<X> - Return true if the parameter to the template is an instance of one of the template type argu...
static uint16_t applyPPClo(uint64_t value)
format_object< Ts... > format(const char *Fmt, const Ts &... Vals)
These are helper functions used to produce formatted output.
void cantFail(Error Err, const char *Msg=nullptr)
Report a fatal error if Err is a failure value.
static uint16_t applyPPChigher(uint64_t value)
uint64_t alignTo(uint64_t Size, Align A)
Returns a multiple of A needed to store Size bytes.
static void or32le(void *P, int32_t V)
OutputIt move(R &&Range, OutputIt Out)
Provide wrappers to std::move which take ranges instead of having to pass begin/end explicitly.
decltype(auto) cast(const From &Val)
cast<X> - Return the argument parameter cast to the specified type.
constexpr int64_t SignExtend64(uint64_t x)
Sign-extend the number in the bottom B bits of X to a 64-bit integer.
void consumeError(Error Err)
Consume a Error without doing anything.
static void write32AArch64Addr(void *T, uint64_t s, uint64_t p, int shift)
Implement std::hash so that hash_code can be used in STL containers.
SymInfo contains information about symbol: it's address and section index which is -1LL for absolute ...