81#include <system_error>
86using namespace lowertypetests;
88#define DEBUG_TYPE "lowertypetests"
90STATISTIC(ByteArraySizeBits,
"Byte array size in bits");
91STATISTIC(ByteArraySizeBytes,
"Byte array size in bytes");
92STATISTIC(NumByteArraysCreated,
"Number of byte arrays created");
93STATISTIC(NumTypeTestCallsLowered,
"Number of type test calls lowered");
94STATISTIC(NumTypeIdDisjointSets,
"Number of disjoint sets of type identifiers");
97 "lowertypetests-avoid-reuse",
98 cl::desc(
"Try to avoid reuse of byte array addresses using aliases"),
102 "lowertypetests-summary-action",
103 cl::desc(
"What to do with the summary when running this pass"),
105 clEnumValN(PassSummaryAction::Import,
"import",
106 "Import typeid resolutions from summary and globals"),
107 clEnumValN(PassSummaryAction::Export,
"export",
108 "Export typeid resolutions to summary and globals")),
112 "lowertypetests-read-summary",
113 cl::desc(
"Read summary from given YAML file before running pass"),
117 "lowertypetests-write-summary",
118 cl::desc(
"Write summary to given YAML file after running pass"),
123 cl::desc(
"Simply drop type test assume sequences"),
137 return Bits.count(BitOffset);
191 std::vector<uint64_t> &Fragment =
Fragments.back();
194 for (
auto ObjIndex :
F) {
196 if (OldFragmentIndex == 0) {
199 Fragment.push_back(ObjIndex);
206 std::vector<uint64_t> &OldFragment =
Fragments[OldFragmentIndex];
219 uint8_t &AllocMask) {
229 unsigned ReqSize = AllocByteOffset + BitSize;
231 if (
Bytes.size() < ReqSize)
232 Bytes.resize(ReqSize);
235 AllocMask = 1 << Bit;
237 Bytes[AllocByteOffset +
B] |= AllocMask;
241 if (
F->isDeclarationForLinker())
243 auto *CI = mdconst::extract_or_null<ConstantInt>(
244 F->getParent()->getModuleFlag(
"CFI Canonical Jump Tables"));
245 if (!CI || !CI->isZero())
247 return F->hasFnAttribute(
"cfi-canonical-jump-table");
252struct ByteArrayInfo {
253 std::set<uint64_t> Bits;
257 uint8_t *MaskPtr =
nullptr;
265class GlobalTypeMember final :
TrailingObjects<GlobalTypeMember, MDNode *> {
276 bool IsJumpTableCanonical;
282 size_t numTrailingObjects(OverloadToken<MDNode *>)
const {
return NTypes; }
286 bool IsJumpTableCanonical,
bool IsExported,
288 auto *GTM =
static_cast<GlobalTypeMember *
>(Alloc.
Allocate(
289 totalSizeToAlloc<MDNode *>(
Types.size()),
alignof(GlobalTypeMember)));
291 GTM->NTypes =
Types.size();
292 GTM->IsJumpTableCanonical = IsJumpTableCanonical;
293 GTM->IsExported = IsExported;
294 std::uninitialized_copy(
Types.begin(),
Types.end(),
295 GTM->getTrailingObjects<
MDNode *>());
304 return IsJumpTableCanonical;
307 bool isExported()
const {
312 return ArrayRef(getTrailingObjects<MDNode *>(), NTypes);
316struct ICallBranchFunnel final
321 auto *
Call =
static_cast<ICallBranchFunnel *
>(
322 Alloc.
Allocate(totalSizeToAlloc<GlobalTypeMember *>(Targets.
size()),
323 alignof(ICallBranchFunnel)));
325 Call->UniqueId = UniqueId;
327 std::uninitialized_copy(Targets.
begin(), Targets.
end(),
328 Call->getTrailingObjects<GlobalTypeMember *>());
334 return ArrayRef(getTrailingObjects<GlobalTypeMember *>(), NTargets);
343struct ScopedSaveAliaseesAndUsed {
346 std::vector<std::pair<GlobalAlias *, Function *>> FunctionAliases;
347 std::vector<std::pair<GlobalIFunc *, Function *>> ResolverIFuncs;
349 ScopedSaveAliaseesAndUsed(
Module &M) :
M(
M) {
363 GV->eraseFromParent();
365 GV->eraseFromParent();
367 for (
auto &GA :
M.aliases()) {
370 if (
auto *
F = dyn_cast<Function>(GA.getAliasee()->stripPointerCasts()))
371 FunctionAliases.push_back({&GA,
F});
374 for (
auto &GI :
M.ifuncs())
375 if (
auto *
F = dyn_cast<Function>(GI.getResolver()->stripPointerCasts()))
376 ResolverIFuncs.push_back({&GI,
F});
379 ~ScopedSaveAliaseesAndUsed() {
383 for (
auto P : FunctionAliases)
384 P.first->setAliasee(
P.second);
386 for (
auto P : ResolverIFuncs) {
390 P.first->setResolver(
P.second);
395class LowerTypeTestsModule {
411 bool CanUseArmJumpTable =
false, CanUseThumbBWJumpTable =
false;
414 int HasBranchTargetEnforcement = -1;
422 PointerType *Int8PtrTy = PointerType::getUnqual(
M.getContext());
425 PointerType *Int32PtrTy = PointerType::getUnqual(
M.getContext());
427 IntegerType *IntPtrTy =
M.getDataLayout().getIntPtrType(
M.getContext(), 0);
435 struct TypeIdUserInfo {
436 std::vector<CallInst *> CallSites;
437 bool IsExported =
false;
445 struct TypeIdLowering {
469 std::vector<ByteArrayInfo> ByteArrayInfos;
471 Function *WeakInitializerFn =
nullptr;
473 bool shouldExportConstantsAsAbsoluteSymbols();
474 uint8_t *exportTypeId(
StringRef TypeId,
const TypeIdLowering &TIL);
475 TypeIdLowering importTypeId(
StringRef TypeId);
478 std::vector<GlobalAlias *> &AliasesToErase);
483 ByteArrayInfo *createByteArray(
BitSetInfo &BSI);
484 void allocateByteArrays();
487 void lowerTypeTestCalls(
491 const TypeIdLowering &TIL);
497 bool hasBranchTargetEnforcement();
498 unsigned getJumpTableEntrySize();
499 Type *getJumpTableEntryType();
516 bool IsJumpTableCanonical);
518 void findGlobalVariableUsersOf(
Constant *
C,
528 void replaceCfiUses(
Function *Old,
Value *New,
bool IsJumpTableCanonical);
532 void replaceDirectCalls(
Value *Old,
Value *New);
557 for (
const auto &GlobalAndOffset : GlobalLayout) {
558 for (
MDNode *
Type : GlobalAndOffset.first->types()) {
559 if (
Type->getOperand(1) != TypeId)
563 cast<ConstantAsMetadata>(
Type->getOperand(0))->getValue())
576 auto BitsType = cast<IntegerType>(Bits->getType());
577 unsigned BitWidth = BitsType->getBitWidth();
579 BitOffset =
B.CreateZExtOrTrunc(BitOffset, BitsType);
583 Value *MaskedBits =
B.CreateAnd(Bits, BitMask);
587ByteArrayInfo *LowerTypeTestsModule::createByteArray(
BitSetInfo &BSI) {
596 ByteArrayInfos.emplace_back();
597 ByteArrayInfo *BAI = &ByteArrayInfos.back();
599 BAI->Bits = BSI.
Bits;
601 BAI->ByteArray = ByteArrayGlobal;
602 BAI->MaskGlobal = MaskGlobal;
606void LowerTypeTestsModule::allocateByteArrays() {
608 [](
const ByteArrayInfo &BAI1,
const ByteArrayInfo &BAI2) {
609 return BAI1.BitSize > BAI2.BitSize;
612 std::vector<uint64_t> ByteArrayOffsets(ByteArrayInfos.size());
615 for (
unsigned I = 0;
I != ByteArrayInfos.size(); ++
I) {
616 ByteArrayInfo *BAI = &ByteArrayInfos[
I];
619 BAB.
allocate(BAI->Bits, BAI->BitSize, ByteArrayOffsets[
I], Mask);
621 BAI->MaskGlobal->replaceAllUsesWith(
623 BAI->MaskGlobal->eraseFromParent();
625 *BAI->MaskPtr =
Mask;
633 for (
unsigned I = 0;
I != ByteArrayInfos.size(); ++
I) {
634 ByteArrayInfo *BAI = &ByteArrayInfos[
I];
639 ByteArrayConst->
getType(), ByteArray, Idxs);
646 BAI->ByteArray->replaceAllUsesWith(Alias);
647 BAI->ByteArray->eraseFromParent();
653 ByteArraySizeBytes = BAB.
Bytes.size();
659 const TypeIdLowering &TIL,
666 Constant *ByteArray = TIL.TheByteArray;
673 "bits_use", ByteArray, &M);
676 Value *ByteAddr =
B.CreateGEP(Int8Ty, ByteArray, BitOffset);
687 if (
auto GV = dyn_cast<GlobalObject>(V)) {
689 GV->getMetadata(LLVMContext::MD_type, Types);
691 if (
Type->getOperand(1) != TypeId)
695 cast<ConstantAsMetadata>(
Type->getOperand(0))->getValue())
703 if (
auto GEP = dyn_cast<GEPOperator>(V)) {
704 APInt APOffset(
DL.getIndexSizeInBits(0), 0);
705 bool Result =
GEP->accumulateConstantOffset(
DL, APOffset);
712 if (
auto Op = dyn_cast<Operator>(V)) {
713 if (
Op->getOpcode() == Instruction::BitCast)
716 if (
Op->getOpcode() == Instruction::Select)
727 const TypeIdLowering &TIL) {
743 Value *PtrAsInt =
B.CreatePtrToInt(
Ptr, IntPtrTy);
748 return B.CreateICmpEQ(PtrAsInt, OffsetedGlobalAsInt);
750 Value *PtrOffset =
B.CreateSub(PtrAsInt, OffsetedGlobalAsInt);
761 B.CreateLShr(PtrOffset,
B.CreateZExt(TIL.AlignLog2, IntPtrTy));
762 Value *OffsetSHL =
B.CreateShl(
763 PtrOffset,
B.CreateZExt(
768 Value *BitOffset =
B.CreateOr(OffsetSHR, OffsetSHL);
770 Value *OffsetInRange =
B.CreateICmpULE(BitOffset, TIL.SizeM1);
774 return OffsetInRange;
781 if (
auto *Br = dyn_cast<BranchInst>(*CI->
user_begin()))
787 Br->getMetadata(LLVMContext::MD_prof));
791 for (
auto &Phi :
Else->phis())
792 Phi.addIncoming(
Phi.getIncomingValueForBlock(Then), InitialBB);
795 return createBitSetTest(ThenB, TIL, BitOffset);
802 Value *
Bit = createBitSetTest(ThenB, TIL, BitOffset);
807 B.SetInsertPoint(CI);
810 P->addIncoming(Bit, ThenB.GetInsertBlock());
816void LowerTypeTestsModule::buildBitSetsFromGlobalVariables(
823 std::vector<Constant *> GlobalInits;
829 for (GlobalTypeMember *
G : Globals) {
830 auto *GV = cast<GlobalVariable>(
G->getGlobal());
832 DL.getValueOrABITypeAlignment(GV->getAlign(), GV->getValueType());
833 MaxAlign = std::max(MaxAlign, Alignment);
835 GlobalLayout[
G] = GVOffset;
838 GlobalInits.push_back(
842 GlobalInits.push_back(GV->getInitializer());
843 uint64_t InitSize =
DL.getTypeAllocSize(GV->getValueType());
844 CurOffset = GVOffset + InitSize;
853 if (DesiredPadding > 32)
854 DesiredPadding =
alignTo(InitSize, 32) - InitSize;
858 auto *CombinedGlobal =
861 CombinedGlobal->setAlignment(MaxAlign);
864 lowerTypeTestCalls(TypeIds, CombinedGlobal, GlobalLayout);
869 for (
unsigned I = 0;
I != Globals.size(); ++
I) {
876 NewInit->
getType(), CombinedGlobal, CombinedGlobalIdxs);
880 "", CombinedGlobalElemPtr, &M);
888bool LowerTypeTestsModule::shouldExportConstantsAsAbsoluteSymbols() {
901uint8_t *LowerTypeTestsModule::exportTypeId(
StringRef TypeId,
902 const TypeIdLowering &TIL) {
910 "__typeid_" + TypeId +
"_" +
Name,
C, &M);
915 if (shouldExportConstantsAsAbsoluteSymbols())
918 Storage = cast<ConstantInt>(
C)->getZExtValue();
922 ExportGlobal(
"global_addr", TIL.OffsetedGlobal);
927 ExportConstant(
"align", TTRes.
AlignLog2, TIL.AlignLog2);
928 ExportConstant(
"size_m1", TTRes.
SizeM1, TIL.SizeM1);
930 uint64_t BitSize = cast<ConstantInt>(TIL.SizeM1)->getZExtValue() + 1;
938 ExportGlobal(
"byte_array", TIL.TheByteArray);
939 if (shouldExportConstantsAsAbsoluteSymbols())
940 ExportGlobal(
"bit_mask", TIL.BitMask);
946 ExportConstant(
"inline_bits", TTRes.
InlineBits, TIL.InlineBits);
951LowerTypeTestsModule::TypeIdLowering
952LowerTypeTestsModule::importTypeId(
StringRef TypeId) {
964 Constant *
C =
M.getOrInsertGlobal((
"__typeid_" + TypeId +
"_" +
Name).str(),
966 if (
auto *GV = dyn_cast<GlobalVariable>(
C))
973 if (!shouldExportConstantsAsAbsoluteSymbols()) {
976 if (!isa<IntegerType>(Ty))
982 auto *GV = cast<GlobalVariable>(
C->stripPointerCasts());
983 if (isa<IntegerType>(Ty))
985 if (GV->
getMetadata(LLVMContext::MD_absolute_symbol))
995 SetAbsRange(~0ull, ~0ull);
997 SetAbsRange(0, 1ull << AbsWidth);
1002 TIL.OffsetedGlobal = ImportGlobal(
"global_addr");
1007 TIL.AlignLog2 = ImportConstant(
"align", TTRes.
AlignLog2, 8, Int8Ty);
1013 TIL.TheByteArray = ImportGlobal(
"byte_array");
1014 TIL.BitMask = ImportConstant(
"bit_mask", TTRes.
BitMask, 8, Int8PtrTy);
1018 TIL.InlineBits = ImportConstant(
1025void LowerTypeTestsModule::importTypeTest(
CallInst *CI) {
1026 auto TypeIdMDVal = dyn_cast<MetadataAsValue>(CI->
getArgOperand(1));
1030 auto TypeIdStr = dyn_cast<MDString>(TypeIdMDVal->getMetadata());
1037 TypeIdLowering TIL = importTypeId(TypeIdStr->getString());
1038 Value *Lowered = lowerTypeTestCall(TypeIdStr, CI, TIL);
1047void LowerTypeTestsModule::importFunction(
1049 std::vector<GlobalAlias *> &AliasesToErase) {
1050 assert(
F->getType()->getAddressSpace() == 0);
1053 std::string
Name = std::string(
F->getName());
1058 if (
F->isDSOLocal()) {
1061 F->getAddressSpace(),
1064 replaceDirectCalls(
F, RealF);
1074 F->getAddressSpace(),
Name +
".cfi_jt", &M);
1077 F->setName(
Name +
".cfi");
1080 F->getAddressSpace(),
Name, &M);
1087 for (
auto &U :
F->uses()) {
1088 if (
auto *
A = dyn_cast<GlobalAlias>(
U.getUser())) {
1091 F->getAddressSpace(),
"", &M);
1093 A->replaceAllUsesWith(AliasDecl);
1094 AliasesToErase.push_back(
A);
1099 if (
F->hasExternalWeakLinkage())
1106 F->setVisibility(Visibility);
1109void LowerTypeTestsModule::lowerTypeTestCalls(
1115 BitSetInfo BSI = buildBitSet(TypeId, GlobalLayout);
1117 if (
auto MDS = dyn_cast<MDString>(TypeId))
1118 dbgs() << MDS->getString() <<
": ";
1120 dbgs() <<
"<unnamed>: ";
1124 ByteArrayInfo *BAI =
nullptr;
1133 }
else if (BSI.
BitSize <= 64) {
1136 for (
auto Bit : BSI.
Bits)
1138 if (InlineBits == 0)
1145 ++NumByteArraysCreated;
1146 BAI = createByteArray(BSI);
1147 TIL.TheByteArray = BAI->ByteArray;
1148 TIL.BitMask = BAI->MaskGlobal;
1151 TypeIdUserInfo &TIUI = TypeIdUsers[TypeId];
1153 if (TIUI.IsExported) {
1154 uint8_t *MaskPtr = exportTypeId(cast<MDString>(TypeId)->getString(), TIL);
1156 BAI->MaskPtr = MaskPtr;
1160 for (
CallInst *CI : TIUI.CallSites) {
1161 ++NumTypeTestCallsLowered;
1162 Value *Lowered = lowerTypeTestCall(TypeId, CI, TIL);
1172 if (
Type->getNumOperands() != 2)
1177 if (isa<GlobalVariable>(GO) && GO->
hasSection())
1179 "A member of a type identifier may not have an explicit section");
1185 auto OffsetConstMD = dyn_cast<ConstantAsMetadata>(
Type->getOperand(0));
1188 auto OffsetInt = dyn_cast<ConstantInt>(OffsetConstMD->getValue());
1201bool LowerTypeTestsModule::hasBranchTargetEnforcement() {
1202 if (HasBranchTargetEnforcement == -1) {
1205 if (
const auto *BTE = mdconst::extract_or_null<ConstantInt>(
1206 M.getModuleFlag(
"branch-target-enforcement")))
1207 HasBranchTargetEnforcement = (BTE->getZExtValue() != 0);
1209 HasBranchTargetEnforcement = 0;
1211 return HasBranchTargetEnforcement;
1214unsigned LowerTypeTestsModule::getJumpTableEntrySize() {
1215 switch (JumpTableArch) {
1218 if (
const auto *MD = mdconst::extract_or_null<ConstantInt>(
1219 M.getModuleFlag(
"cf-protection-branch")))
1220 if (MD->getZExtValue())
1226 if (CanUseThumbBWJumpTable) {
1227 if (hasBranchTargetEnforcement())
1234 if (hasBranchTargetEnforcement())
1250void LowerTypeTestsModule::createJumpTableEntry(
1254 unsigned ArgIndex = AsmArgs.
size();
1258 if (
const auto *MD = mdconst::extract_or_null<ConstantInt>(
1260 Endbr = !MD->isZero();
1262 AsmOS << (JumpTableArch ==
Triple::x86 ?
"endbr32\n" :
"endbr64\n");
1263 AsmOS <<
"jmp ${" << ArgIndex <<
":c}@plt\n";
1265 AsmOS <<
".balign 16, 0xcc\n";
1267 AsmOS <<
"int3\nint3\nint3\n";
1269 AsmOS <<
"b $" << ArgIndex <<
"\n";
1271 if (hasBranchTargetEnforcement())
1273 AsmOS <<
"b $" << ArgIndex <<
"\n";
1275 if (!CanUseThumbBWJumpTable) {
1291 AsmOS <<
"push {r0,r1}\n"
1293 <<
"0: add r0, r0, pc\n"
1294 <<
"str r0, [sp, #4]\n"
1297 <<
"1: .word $" << ArgIndex <<
" - (0b + 4)\n";
1299 if (hasBranchTargetEnforcement())
1301 AsmOS <<
"b.w $" << ArgIndex <<
"\n";
1305 AsmOS <<
"tail $" << ArgIndex <<
"@plt\n";
1307 AsmOS <<
"pcalau12i $$t0, %pc_hi20($" << ArgIndex <<
")\n"
1308 <<
"jirl $$r0, $$t0, %pc_lo12($" << ArgIndex <<
")\n";
1313 ConstraintOS << (ArgIndex > 0 ?
",s" :
"s");
1317Type *LowerTypeTestsModule::getJumpTableEntryType() {
1323void LowerTypeTestsModule::buildBitSetsFromFunctions(
1329 buildBitSetsFromFunctionsNative(TypeIds, Functions);
1331 buildBitSetsFromFunctionsWASM(TypeIds, Functions);
1336void LowerTypeTestsModule::moveInitializerToModuleConstructor(
1338 if (WeakInitializerFn ==
nullptr) {
1343 M.getDataLayout().getProgramAddressSpace(),
1344 "__cfi_global_var_init", &M);
1348 WeakInitializerFn->setSection(
1350 ?
"__TEXT,__StaticInit,regular,pure_instructions"
1357 IRBuilder<> IRB(WeakInitializerFn->getEntryBlock().getTerminator());
1363void LowerTypeTestsModule::findGlobalVariableUsersOf(
1365 for (
auto *U :
C->users()){
1366 if (
auto *GV = dyn_cast<GlobalVariable>(U))
1368 else if (
auto *C2 = dyn_cast<Constant>(U))
1369 findGlobalVariableUsersOf(C2, Out);
1374void LowerTypeTestsModule::replaceWeakDeclarationWithJumpTablePtr(
1379 findGlobalVariableUsersOf(
F, GlobalVarUsers);
1380 for (
auto *GV : GlobalVarUsers)
1381 moveInitializerToModuleConstructor(GV);
1388 F->getAddressSpace(),
"", &M);
1389 replaceCfiUses(
F, PlaceholderFn, IsJumpTableCanonical);
1395 auto *InsertPt = dyn_cast<Instruction>(
U.getUser());
1396 assert(InsertPt &&
"Non-instruction users should have been eliminated");
1397 auto *PN = dyn_cast<PHINode>(InsertPt);
1399 InsertPt = PN->getIncomingBlock(U)->getTerminator();
1408 PN->setIncomingValueForBlock(InsertPt->getParent(),
Select);
1416 Attribute TFAttr =
F->getFnAttribute(
"target-features");
1421 if (Feature ==
"-thumb-mode")
1423 else if (Feature ==
"+thumb-mode")
1439 if (!CanUseThumbBWJumpTable && CanUseArmJumpTable) {
1447 unsigned ArmCount = 0, ThumbCount = 0;
1448 for (
const auto GTM : Functions) {
1449 if (!GTM->isJumpTableCanonical()) {
1456 Function *
F = cast<Function>(GTM->getGlobal());
1463void LowerTypeTestsModule::createJumpTable(
1465 std::string AsmStr, ConstraintStr;
1470 for (GlobalTypeMember *GTM : Functions)
1471 createJumpTableEntry(AsmOS, ConstraintOS, JumpTableArch, AsmArgs,
1472 cast<Function>(GTM->getGlobal()));
1475 F->setAlignment(
Align(getJumpTableEntrySize()));
1481 F->addFnAttr(Attribute::Naked);
1483 F->addFnAttr(
"target-features",
"-thumb-mode");
1485 if (hasBranchTargetEnforcement()) {
1488 F->addFnAttr(
"target-features",
"+thumb-mode,+pacbti");
1490 F->addFnAttr(
"target-features",
"+thumb-mode");
1491 if (CanUseThumbBWJumpTable) {
1494 F->addFnAttr(
"target-cpu",
"cortex-a8");
1502 F->addFnAttr(
"branch-target-enforcement",
"false");
1503 F->addFnAttr(
"sign-return-address",
"none");
1508 F->addFnAttr(
"target-features",
"-c,-relax");
1514 F->addFnAttr(Attribute::NoCfCheck);
1516 F->addFnAttr(Attribute::NoUnwind);
1523 for (
const auto &Arg : AsmArgs)
1527 AsmOS.str(), ConstraintOS.str(),
1530 IRB.CreateCall(JumpTableAsm, AsmArgs);
1531 IRB.CreateUnreachable();
1536void LowerTypeTestsModule::buildBitSetsFromFunctionsNative(
1618 JumpTableArch = selectJumpTableArmEncoding(Functions);
1622 unsigned EntrySize = getJumpTableEntrySize();
1623 for (
unsigned I = 0;
I != Functions.
size(); ++
I)
1624 GlobalLayout[Functions[
I]] =
I * EntrySize;
1630 M.getDataLayout().getProgramAddressSpace(),
1631 ".cfi.jumptable", &M);
1637 lowerTypeTestCalls(TypeIds, JumpTable, GlobalLayout);
1640 ScopedSaveAliaseesAndUsed S(M);
1644 for (
unsigned I = 0;
I != Functions.
size(); ++
I) {
1645 Function *
F = cast<Function>(Functions[
I]->getGlobal());
1646 bool IsJumpTableCanonical = Functions[
I]->isJumpTableCanonical();
1649 JumpTableType, JumpTable,
1653 const bool IsExported = Functions[
I]->isExported();
1654 if (!IsJumpTableCanonical) {
1659 F->getName() +
".cfi_jt",
1660 CombinedGlobalElemPtr, &M);
1668 if (IsJumpTableCanonical)
1674 if (!IsJumpTableCanonical) {
1675 if (
F->hasExternalWeakLinkage())
1676 replaceWeakDeclarationWithJumpTablePtr(
F, CombinedGlobalElemPtr,
1677 IsJumpTableCanonical);
1679 replaceCfiUses(
F, CombinedGlobalElemPtr, IsJumpTableCanonical);
1681 assert(
F->getType()->getAddressSpace() == 0);
1685 CombinedGlobalElemPtr, &M);
1689 F->setName(FAlias->
getName() +
".cfi");
1690 replaceCfiUses(
F, FAlias, IsJumpTableCanonical);
1691 if (!
F->hasLocalLinkage())
1697 createJumpTable(JumpTableFn, Functions);
1706void LowerTypeTestsModule::buildBitSetsFromFunctionsWASM(
1713 for (GlobalTypeMember *GTM : Functions) {
1714 Function *
F = cast<Function>(GTM->getGlobal());
1717 if (!
F->hasAddressTaken())
1724 F->setMetadata(
"wasm.index", MD);
1727 GlobalLayout[GTM] = IndirectIndex++;
1736void LowerTypeTestsModule::buildBitSetsFromDisjointSet(
1740 for (
unsigned I = 0;
I != TypeIds.
size(); ++
I)
1741 TypeIdIndices[TypeIds[
I]] =
I;
1745 std::vector<std::set<uint64_t>> TypeMembers(TypeIds.
size());
1746 unsigned GlobalIndex = 0;
1748 for (GlobalTypeMember *GTM : Globals) {
1751 auto I = TypeIdIndices.
find(
Type->getOperand(1));
1752 if (
I != TypeIdIndices.
end())
1753 TypeMembers[
I->second].insert(GlobalIndex);
1755 GlobalIndices[GTM] = GlobalIndex;
1759 for (ICallBranchFunnel *JT : ICallBranchFunnels) {
1760 TypeMembers.emplace_back();
1761 std::set<uint64_t> &TMSet = TypeMembers.back();
1762 for (GlobalTypeMember *
T :
JT->targets())
1763 TMSet.insert(GlobalIndices[
T]);
1769 const std::set<uint64_t> &O2) {
1770 return O1.size() < O2.size();
1777 for (
auto &&MemSet : TypeMembers)
1778 GLB.addFragment(MemSet);
1782 Globals.empty() || isa<GlobalVariable>(Globals[0]->getGlobal());
1783 std::vector<GlobalTypeMember *> OrderedGTMs(Globals.size());
1784 auto OGTMI = OrderedGTMs.begin();
1785 for (
auto &&
F : GLB.Fragments) {
1787 if (IsGlobalSet != isa<GlobalVariable>(Globals[
Offset]->getGlobal()))
1789 "variables and functions");
1790 *OGTMI++ = Globals[
Offset];
1796 buildBitSetsFromGlobalVariables(TypeIds, OrderedGTMs);
1798 buildBitSetsFromFunctions(TypeIds, OrderedGTMs);
1802LowerTypeTestsModule::LowerTypeTestsModule(
1805 :
M(
M), ExportSummary(ExportSummary), ImportSummary(ImportSummary),
1807 assert(!(ExportSummary && ImportSummary));
1808 Triple TargetTriple(
M.getTargetTriple());
1809 Arch = TargetTriple.getArch();
1811 CanUseArmJumpTable =
true;
1818 CanUseArmJumpTable =
true;
1820 CanUseThumbBWJumpTable =
true;
1823 OS = TargetTriple.getOS();
1824 ObjectFormat = TargetTriple.getObjectFormat();
1835 auto ReadSummaryFile =
1838 yaml::Input
In(ReadSummaryFile->getBuffer());
1844 LowerTypeTestsModule(
1858 yaml::Output Out(
OS);
1866 auto *Usr = dyn_cast<CallInst>(U.getUser());
1868 auto *CB = dyn_cast<CallBase>(Usr);
1869 if (CB && CB->isCallee(&U))
1875void LowerTypeTestsModule::replaceCfiUses(
Function *Old,
Value *New,
1876 bool IsJumpTableCanonical) {
1881 if (isa<BlockAddress, NoCFIValue>(
U.getUser()))
1890 if (
auto *
C = dyn_cast<Constant>(
U.getUser())) {
1891 if (!isa<GlobalValue>(
C)) {
1904 C->handleOperandChange(Old, New);
1907void LowerTypeTestsModule::replaceDirectCalls(
Value *Old,
Value *New) {
1913 auto *CI = cast<CallInst>(U.getUser());
1916 if (
auto *Assume = dyn_cast<AssumeInst>(CIU.getUser()))
1917 Assume->eraseFromParent();
1923 all_of(CI->
users(), [](
User *U) ->
bool { return isa<PHINode>(U); }));
1930bool LowerTypeTestsModule::lower() {
1934 if (DropTypeTests) {
1942 if (PublicTypeTestFunc)
1944 if (TypeTestFunc || PublicTypeTestFunc) {
1965 if ((!TypeTestFunc || TypeTestFunc->
use_empty()) &&
1966 (!ICallBranchFunnelFunc || ICallBranchFunnelFunc->
use_empty()) &&
1967 !ExportSummary && !ImportSummary)
1970 if (ImportSummary) {
1973 importTypeTest(cast<CallInst>(
U.getUser()));
1975 if (ICallBranchFunnelFunc && !ICallBranchFunnelFunc->
use_empty())
1977 "unexpected call to llvm.icall.branch.funnel during import phase");
1984 if (
F.hasLocalLinkage())
1989 std::string(
F.getName())))
1993 std::vector<GlobalAlias *> AliasesToErase;
1995 ScopedSaveAliaseesAndUsed S(M);
1996 for (
auto *
F : Defs)
1997 importFunction(
F,
true, AliasesToErase);
1998 for (
auto *
F : Decls)
1999 importFunction(
F,
false, AliasesToErase);
2012 GlobalClassesTy GlobalClasses;
2024 std::vector<GlobalTypeMember *> RefGlobals;
2027 unsigned CurUniqueId = 0;
2032 const bool CrossDsoCfi =
M.getModuleFlag(
"Cross-DSO CFI") !=
nullptr;
2034 struct ExportedFunctionInfo {
2039 if (ExportSummary) {
2042 for (
auto &
I : *ExportSummary)
2043 for (
auto &GVS :
I.second.SummaryList)
2045 for (
const auto &
Ref : GVS->refs())
2048 NamedMDNode *CfiFunctionsMD =
M.getNamedMetadata(
"cfi.functions");
2049 if (CfiFunctionsMD) {
2050 for (
auto *FuncMD : CfiFunctionsMD->
operands()) {
2051 assert(FuncMD->getNumOperands() >= 2);
2053 cast<MDString>(FuncMD->getOperand(0))->getString();
2055 cast<ConstantAsMetadata>(FuncMD->getOperand(1))
2057 ->getUniqueInteger()
2063 if (!ExportSummary->isGUIDLive(GUID))
2065 if (!AddressTaken.
count(GUID)) {
2069 bool Exported =
false;
2070 if (
auto VI = ExportSummary->getValueInfo(GUID))
2071 for (
const auto &GVS :
VI.getSummaryList())
2078 auto P = ExportedFunctions.
insert({FunctionName, {
Linkage, FuncMD}});
2080 P.first->second = {
Linkage, FuncMD};
2083 for (
const auto &
P : ExportedFunctions) {
2086 MDNode *FuncMD =
P.second.FuncMD;
2088 if (
F &&
F->hasLocalLinkage()) {
2095 F->setName(
F->getName() +
".1");
2102 GlobalVariable::ExternalLinkage,
2103 M.getDataLayout().getProgramAddressSpace(), FunctionName, &M);
2110 if (
F->hasAvailableExternallyLinkage()) {
2113 F->setComdat(
nullptr);
2126 if (
F->isDeclaration()) {
2130 F->eraseMetadata(LLVMContext::MD_type);
2132 F->addMetadata(LLVMContext::MD_type,
2147 bool IsJumpTableCanonical =
false;
2148 bool IsExported =
false;
2149 if (
Function *
F = dyn_cast<Function>(&GO)) {
2151 if (ExportedFunctions.count(
F->getName())) {
2152 IsJumpTableCanonical |=
2159 }
else if (!
F->hasAddressTaken()) {
2160 if (!CrossDsoCfi || !IsJumpTableCanonical ||
F->hasLocalLinkage())
2165 auto *GTM = GlobalTypeMember::create(Alloc, &GO, IsJumpTableCanonical,
2167 GlobalTypeMembers[&GO] = GTM;
2169 verifyTypeMDNode(&GO,
Type);
2170 auto &
Info = TypeIdInfo[
Type->getOperand(1)];
2171 Info.UniqueId = ++CurUniqueId;
2172 Info.RefGlobals.push_back(GTM);
2176 auto AddTypeIdUse = [&](
Metadata *TypeId) -> TypeIdUserInfo & {
2181 auto Ins = TypeIdUsers.insert({TypeId, {}});
2184 GlobalClassesTy::iterator GCI = GlobalClasses.insert(TypeId);
2185 GlobalClassesTy::member_iterator CurSet = GlobalClasses.findLeader(GCI);
2188 for (GlobalTypeMember *GTM : TypeIdInfo[TypeId].RefGlobals)
2189 CurSet = GlobalClasses.unionSets(
2190 CurSet, GlobalClasses.findLeader(GlobalClasses.insert(GTM)));
2193 return Ins.first->second;
2197 for (
const Use &U : TypeTestFunc->
uses()) {
2198 auto CI = cast<CallInst>(
U.getUser());
2206 for (
const Use &CIU : CI->
uses()) {
2207 if (isa<AssumeInst>(CIU.getUser()))
2209 OnlyAssumeUses =
false;
2215 auto TypeIdMDVal = dyn_cast<MetadataAsValue>(CI->
getArgOperand(1));
2218 auto TypeId = TypeIdMDVal->getMetadata();
2219 AddTypeIdUse(TypeId).CallSites.push_back(CI);
2223 if (ICallBranchFunnelFunc) {
2224 for (
const Use &U : ICallBranchFunnelFunc->
uses()) {
2227 "llvm.icall.branch.funnel not supported on this target");
2229 auto CI = cast<CallInst>(
U.getUser());
2231 std::vector<GlobalTypeMember *> Targets;
2235 GlobalClassesTy::member_iterator CurSet;
2236 for (
unsigned I = 1;
I != CI->
arg_size();
I += 2) {
2242 "Expected branch funnel operand to be global value");
2244 GlobalTypeMember *GTM = GlobalTypeMembers[
Base];
2245 Targets.push_back(GTM);
2246 GlobalClassesTy::member_iterator NewSet =
2247 GlobalClasses.findLeader(GlobalClasses.insert(GTM));
2251 CurSet = GlobalClasses.unionSets(CurSet, NewSet);
2254 GlobalClasses.unionSets(
2255 CurSet, GlobalClasses.findLeader(
2256 GlobalClasses.insert(ICallBranchFunnel::create(
2257 Alloc, CI, Targets, ++CurUniqueId))));
2261 if (ExportSummary) {
2263 for (
auto &
P : TypeIdInfo) {
2264 if (
auto *TypeId = dyn_cast<MDString>(
P.first))
2269 for (
auto &
P : *ExportSummary) {
2270 for (
auto &S :
P.second.SummaryList) {
2271 if (!ExportSummary->isGlobalValueLive(S.get()))
2273 if (
auto *FS = dyn_cast<FunctionSummary>(S->getBaseObject()))
2276 AddTypeIdUse(MD).IsExported =
true;
2281 if (GlobalClasses.empty())
2286 std::vector<std::pair<GlobalClassesTy::iterator, unsigned>> Sets;
2287 for (GlobalClassesTy::iterator
I = GlobalClasses.begin(),
2288 E = GlobalClasses.end();
2292 ++NumTypeIdDisjointSets;
2294 unsigned MaxUniqueId = 0;
2295 for (GlobalClassesTy::member_iterator
MI = GlobalClasses.member_begin(
I);
2296 MI != GlobalClasses.member_end(); ++
MI) {
2297 if (
auto *MD = dyn_cast_if_present<Metadata *>(*
MI))
2298 MaxUniqueId = std::max(MaxUniqueId, TypeIdInfo[MD].UniqueId);
2299 else if (
auto *BF = dyn_cast_if_present<ICallBranchFunnel *>(*
MI))
2300 MaxUniqueId = std::max(MaxUniqueId, BF->UniqueId);
2302 Sets.emplace_back(
I, MaxUniqueId);
2307 for (
const auto &S : Sets) {
2309 std::vector<Metadata *> TypeIds;
2310 std::vector<GlobalTypeMember *> Globals;
2311 std::vector<ICallBranchFunnel *> ICallBranchFunnels;
2312 for (GlobalClassesTy::member_iterator
MI =
2313 GlobalClasses.member_begin(S.first);
2314 MI != GlobalClasses.member_end(); ++
MI) {
2315 if (isa<Metadata *>(*
MI))
2316 TypeIds.push_back(cast<Metadata *>(*
MI));
2317 else if (isa<GlobalTypeMember *>(*
MI))
2318 Globals.push_back(cast<GlobalTypeMember *>(*
MI));
2320 ICallBranchFunnels.push_back(cast<ICallBranchFunnel *>(*
MI));
2326 return TypeIdInfo[
M1].UniqueId < TypeIdInfo[M2].UniqueId;
2331 [&](ICallBranchFunnel *F1, ICallBranchFunnel *F2) {
2332 return F1->UniqueId < F2->UniqueId;
2336 buildBitSetsFromDisjointSet(TypeIds, Globals, ICallBranchFunnels);
2339 allocateByteArrays();
2343 if (ExportSummary) {
2344 if (
NamedMDNode *AliasesMD =
M.getNamedMetadata(
"aliases")) {
2345 for (
auto *AliasMD : AliasesMD->operands()) {
2346 assert(AliasMD->getNumOperands() >= 4);
2348 cast<MDString>(AliasMD->getOperand(0))->getString();
2349 StringRef Aliasee = cast<MDString>(AliasMD->getOperand(1))->getString();
2351 if (!ExportedFunctions.count(Aliasee) ||
2353 !
M.getNamedAlias(Aliasee))
2358 cast<ConstantAsMetadata>(AliasMD->getOperand(2))
2360 ->getUniqueInteger()
2363 static_cast<bool>(cast<ConstantAsMetadata>(AliasMD->getOperand(3))
2365 ->getUniqueInteger()
2373 if (
auto *
F =
M.getFunction(AliasName)) {
2375 F->replaceAllUsesWith(Alias);
2376 F->eraseFromParent();
2385 if (ExportSummary) {
2386 if (
NamedMDNode *SymversMD =
M.getNamedMetadata(
"symvers")) {
2387 for (
auto *Symver : SymversMD->operands()) {
2388 assert(Symver->getNumOperands() >= 2);
2390 cast<MDString>(Symver->getOperand(0))->getString();
2391 StringRef Alias = cast<MDString>(Symver->getOperand(1))->getString();
2393 if (!ExportedFunctions.count(SymbolName))
2396 M.appendModuleInlineAsm(
2397 (
llvm::Twine(
".symver ") + SymbolName +
", " + Alias).str());
2409 Changed = LowerTypeTestsModule::runForTesting(M, AM);
2412 LowerTypeTestsModule(M, AM, ExportSummary, ImportSummary, DropTypeTests)
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
amdgpu AMDGPU Register Bank Select
This file implements a class to represent arbitrary precision integral constant values and operations...
This file defines the BumpPtrAllocator interface.
This file contains the simple types necessary to represent the attributes associated with functions a...
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
Analysis containing CSE Info
#define clEnumValN(ENUMVAL, FLAGNAME, DESC)
This file contains the declarations for the subclasses of Constant, which represent the different fla...
This file defines the DenseMap class.
Generic implementation of equivalence classes through the use Tarjan's efficient union-find algorithm...
static const unsigned kARMJumpTableEntrySize
static const unsigned kLOONGARCH64JumpTableEntrySize
static bool isKnownTypeIdMember(Metadata *TypeId, const DataLayout &DL, Value *V, uint64_t COffset)
static const unsigned kX86IBTJumpTableEntrySize
static cl::opt< std::string > ClReadSummary("lowertypetests-read-summary", cl::desc("Read summary from given YAML file before running pass"), cl::Hidden)
static const unsigned kRISCVJumpTableEntrySize
static Value * createMaskedBitTest(IRBuilder<> &B, Value *Bits, Value *BitOffset)
Build a test that bit BitOffset mod sizeof(Bits)*8 is set in Bits.
static bool isThumbFunction(Function *F, Triple::ArchType ModuleArch)
static const unsigned kX86JumpTableEntrySize
static cl::opt< bool > AvoidReuse("lowertypetests-avoid-reuse", cl::desc("Try to avoid reuse of byte array addresses using aliases"), cl::Hidden, cl::init(true))
static void dropTypeTests(Module &M, Function &TypeTestFunc)
static cl::opt< PassSummaryAction > ClSummaryAction("lowertypetests-summary-action", cl::desc("What to do with the summary when running this pass"), cl::values(clEnumValN(PassSummaryAction::None, "none", "Do nothing"), clEnumValN(PassSummaryAction::Import, "import", "Import typeid resolutions from summary and globals"), clEnumValN(PassSummaryAction::Export, "export", "Export typeid resolutions to summary and globals")), cl::Hidden)
static const unsigned kARMBTIJumpTableEntrySize
static cl::opt< bool > ClDropTypeTests("lowertypetests-drop-type-tests", cl::desc("Simply drop type test assume sequences"), cl::Hidden, cl::init(false))
static cl::opt< std::string > ClWriteSummary("lowertypetests-write-summary", cl::desc("Write summary to given YAML file after running pass"), cl::Hidden)
static bool isDirectCall(Use &U)
static const unsigned kARMv6MJumpTableEntrySize
ModuleSummaryIndex.h This file contains the declarations the classes that hold the module index and s...
Module.h This file contains the declarations for the Module class.
FunctionAnalysisManager FAM
This header defines various interfaces for pass management in LLVM.
This file defines the PointerUnion class, which is a discriminated union of pointer types.
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file implements a set that has insertion order iteration characteristics.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
This header defines support for implementing classes that have some trailing object (or arrays of obj...
This defines the Use class.
Class for arbitrary precision integers.
uint64_t getZExtValue() const
Get zero extended value.
A container for analyses that lazily runs them and caches their results.
PassT::Result & getResult(IRUnitT &IR, ExtraArgTs... ExtraArgs)
Get the result of an analysis pass for a given IR unit.
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
size_t size() const
size - Get the array size.
bool empty() const
empty - Check if the array is empty.
static ArrayType * get(Type *ElementType, uint64_t NumElements)
This static method is the primary way to construct an ArrayType.
StringRef getValueAsString() const
Return the attribute's value as a string.
bool isValid() const
Return true if the attribute is any kind of attribute.
LLVM Basic Block Representation.
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
BasicBlock * splitBasicBlock(iterator I, const Twine &BBName="", bool Before=false)
Split the basic block into two basic blocks at the specified instruction.
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
Conditional or Unconditional Branch instruction.
static BranchInst * Create(BasicBlock *IfTrue, Instruction *InsertBefore=nullptr)
Allocate memory in an ever growing pool, as if by bump-pointer.
LLVM_ATTRIBUTE_RETURNS_NONNULL void * Allocate(size_t Size, Align Alignment)
Allocate space at the specified alignment.
Value * getArgOperand(unsigned i) const
unsigned arg_size() const
This class represents a function call, abstracting a target machine's calling convention.
static ConstantAggregateZero * get(Type *Ty)
static Constant * get(LLVMContext &Context, ArrayRef< ElementTy > Elts)
get() constructor - Return a constant with array type with an element count and element type matching...
static Constant * getIntToPtr(Constant *C, Type *Ty, bool OnlyIfReduced=false)
static Constant * getInBoundsGetElementPtr(Type *Ty, Constant *C, ArrayRef< Constant * > IdxList)
Create an "inbounds" getelementptr.
static Constant * getPointerCast(Constant *C, Type *Ty)
Create a BitCast, AddrSpaceCast, or a PtrToInt cast constant expression.
static Constant * getSub(Constant *C1, Constant *C2, bool HasNUW=false, bool HasNSW=false)
static Constant * getPtrToInt(Constant *C, Type *Ty, bool OnlyIfReduced=false)
static Constant * getGetElementPtr(Type *Ty, Constant *C, ArrayRef< Constant * > IdxList, bool InBounds=false, std::optional< unsigned > InRangeIndex=std::nullopt, Type *OnlyIfReducedTy=nullptr)
Getelementptr form.
static ConstantInt * getTrue(LLVMContext &Context)
static Constant * get(Type *Ty, uint64_t V, bool IsSigned=false)
If Ty is a vector type, return a Constant with a splat of the given value.
static ConstantInt * getFalse(LLVMContext &Context)
static ConstantPointerNull * get(PointerType *T)
Static factory methods - Return objects of the specified value.
static Constant * getAnon(ArrayRef< Constant * > V, bool Packed=false)
Return an anonymous struct that has the specified elements.
This is an important base class in LLVM.
static Constant * getNullValue(Type *Ty)
Constructor to create a '0' constant of arbitrary type.
This class represents an Operation in the Expression.
A parsed version of the target data layout string in and methods for querying it.
iterator find(const_arg_type_t< KeyT > Val)
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
Implements a dense probed hash-table based set.
EquivalenceClasses - This represents a collection of equivalence classes and supports three efficient...
Helper for check-and-exit error handling.
static FunctionType * get(Type *Result, ArrayRef< Type * > Params, bool isVarArg)
This static method is the primary way of constructing a FunctionType.
static Function * Create(FunctionType *Ty, LinkageTypes Linkage, unsigned AddrSpace, const Twine &N="", Module *M=nullptr)
void eraseFromParent()
eraseFromParent - This method unlinks 'this' from the containing module and deletes it.
void eraseFromParent()
eraseFromParent - This method unlinks 'this' from the containing module and deletes it.
static GlobalAlias * create(Type *Ty, unsigned AddressSpace, LinkageTypes Linkage, const Twine &Name, Constant *Aliasee, Module *Parent)
If a parent module is specified, the alias is automatically inserted into the end of the specified mo...
MaybeAlign getAlign() const
Returns the alignment of the given variable or function.
void setMetadata(unsigned KindID, MDNode *Node)
Set a particular kind of metadata attachment.
bool eraseMetadata(unsigned KindID)
Erase all metadata attachments with the given kind.
bool hasSection() const
Check if this global has a custom object file section.
MDNode * getMetadata(unsigned KindID) const
Get the current metadata attachments for the given kind, if any.
bool isThreadLocal() const
If the value is "Thread Local", its value isn't shared by the threads.
VisibilityTypes getVisibility() const
static bool isLocalLinkage(LinkageTypes Linkage)
LinkageTypes getLinkage() const
static StringRef dropLLVMManglingEscape(StringRef Name)
If the given string begins with the GlobalValue name mangling escape character '\1',...
void setLinkage(LinkageTypes LT)
bool isDeclarationForLinker() const
GUID getGUID() const
Return a 64-bit global unique ID constructed from global value name (i.e.
Module * getParent()
Get the module that this global value is contained inside of...
PointerType * getType() const
Global values are always pointers.
VisibilityTypes
An enumeration for the kinds of visibility of global values.
@ HiddenVisibility
The GV is hidden.
void setVisibility(VisibilityTypes V)
LinkageTypes
An enumeration for the kinds of linkage for global values.
@ PrivateLinkage
Like Internal, but omit from symbol table.
@ InternalLinkage
Rename collisions when linking (static functions).
@ ExternalLinkage
Externally visible function.
@ WeakAnyLinkage
Keep one copy of named function when linking (weak)
@ ExternalWeakLinkage
ExternalWeak linkage description.
Type * getValueType() const
const Constant * getInitializer() const
getInitializer - Return the initializer for this global variable.
void setInitializer(Constant *InitVal)
setInitializer - Sets the initializer for this global variable, removing any existing initializer if ...
void setConstant(bool Val)
void eraseFromParent()
eraseFromParent - This method unlinks 'this' from the containing module and deletes it.
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
static InlineAsm * get(FunctionType *Ty, StringRef AsmString, StringRef Constraints, bool hasSideEffects, bool isAlignStack=false, AsmDialect asmDialect=AD_ATT, bool canThrow=false)
InlineAsm::get - Return the specified uniqued inline asm string.
An analysis over an "outer" IR unit that provides access to an analysis manager over an "inner" IR un...
const BasicBlock * getParent() const
InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
void setMetadata(unsigned KindID, MDNode *Node)
Set the metadata of the specified kind to the specified node.
Class to represent integer types.
unsigned getBitWidth() const
Get the number of bits in this IntegerType.
PreservedAnalyses run(Module &M, ModuleAnalysisManager &AM)
const MDOperand & getOperand(unsigned I) const
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
unsigned getNumOperands() const
Return number of MDNode operands.
static ErrorOr< std::unique_ptr< MemoryBuffer > > getFile(const Twine &Filename, bool IsText=false, bool RequiresNullTerminator=true, bool IsVolatile=false, std::optional< Align > Alignment=std::nullopt)
Open the specified file as a MemoryBuffer, returning a new MemoryBuffer if successful,...
Class to hold module path string table and global value map, and encapsulate methods for operating on...
std::set< std::string > & cfiFunctionDecls()
TypeIdSummary & getOrInsertTypeIdSummary(StringRef TypeId)
Return an existing or new TypeIdSummary entry for TypeId.
const TypeIdSummary * getTypeIdSummary(StringRef TypeId) const
This returns either a pointer to the type id summary (if present in the summary map) or null (if not ...
bool partiallySplitLTOUnits() const
std::set< std::string > & cfiFunctionDefs()
A Module instance is used to store all the information related to an LLVM module.
Metadata * getModuleFlag(StringRef Key) const
Return the corresponding value if Key appears in module flags, otherwise return null.
iterator_range< op_iterator > operands()
unsigned getAddressSpace() const
Return the address space of the Pointer type.
A discriminated union of two or more pointer types, with the discriminator in the low bit of the poin...
A set of analyses that are preserved following a run of a transformation pass.
static PreservedAnalyses none()
Convenience factory function for the empty preserved set.
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
static ReturnInst * Create(LLVMContext &C, Value *retVal=nullptr, Instruction *InsertBefore=nullptr)
bool insert(const value_type &X)
Insert a new element into the SetVector.
A SetVector that performs no allocations if smaller than a certain size.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
void reserve(size_type N)
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
StringRef - Represent a constant reference to a string, i.e.
std::pair< StringRef, StringRef > split(char Separator) const
Split into two substrings around the first occurrence of a separator character.
Class to represent struct types.
Type * getElementType(unsigned N) const
Analysis pass providing the TargetTransformInfo.
See the file comment for details on the usage of the TrailingObjects type.
Triple - Helper class for working with autoconf configuration names.
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
The instances of the Type class are immutable: once they are created, they are never changed.
static IntegerType * getInt1Ty(LLVMContext &C)
static Type * getVoidTy(LLVMContext &C)
static IntegerType * getInt8Ty(LLVMContext &C)
static IntegerType * getInt32Ty(LLVMContext &C)
static IntegerType * getInt64Ty(LLVMContext &C)
A Use represents the edge between a Value definition and its users.
Value * getOperand(unsigned i) const
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
user_iterator user_begin()
void setName(const Twine &Name)
Change the name of the value.
bool hasOneUse() const
Return true if there is exactly one use of this value.
void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
iterator_range< user_iterator > users()
void replaceUsesWithIf(Value *New, llvm::function_ref< bool(Use &U)> ShouldReplace)
Go through the uses list for this definition and make each use point to "V" if the callback ShouldRep...
iterator_range< use_iterator > uses()
StringRef getName() const
Return a constant reference to the value's name.
void takeName(Value *V)
Transfer the name from V to this value.
std::pair< iterator, bool > insert(const ValueT &V)
size_type count(const_arg_type_t< ValueT > V) const
Return 1 if the specified key is in the set, 0 otherwise.
self_iterator getIterator()
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
A raw_ostream that writes to a file descriptor.
This class implements an extremely fast bulk output stream that can only output to a stream.
A raw_ostream that writes to an std::string.
constexpr char SymbolName[]
Key for Kernel::Metadata::mSymbolName.
constexpr std::underlying_type_t< E > Mask()
Get a bitmask with 1s in all places up to the high-order bit of E's largest value.
@ C
The default llvm calling convention, compatible with C.
StringRef getName(ID id)
Return the LLVM name for an intrinsic, such as "llvm.ppc.altivec.lvx".
ValuesClass values(OptsTy... Options)
Helper to build a ValuesClass by forwarding a variable number of arguments as an initializer list to ...
initializer< Ty > init(const Ty &Val)
Linkage
Describes symbol linkage. This can be used to resolve definition clashes.
bool isJumpTableCanonical(Function *F)
NodeAddr< PhiNode * > Phi
@ OF_TextWithCRLF
The file should be opened in text mode and use a carriage linefeed '\r '.
This is an optimization pass for GlobalISel generic memory operations.
void ReplaceInstWithInst(BasicBlock *BB, BasicBlock::iterator &BI, Instruction *I)
Replace the instruction specified by BI with the instruction specified by I.
void stable_sort(R &&Range)
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly.
bool convertUsersOfConstantsToInstructions(ArrayRef< Constant * > Consts)
Replace constant expressions users of the given constants with instructions.
void append_range(Container &C, Range &&R)
Wrapper function to append range R to container C.
Value * GetPointerBaseWithConstantOffset(Value *Ptr, int64_t &Offset, const DataLayout &DL, bool AllowNonInbounds=true)
Analyze the specified pointer to see if it can be expressed as a base pointer plus a constant offset.
iterator_range< early_inc_iterator_impl< detail::IterOfRange< RangeT > > > make_early_inc_range(RangeT &&Range)
Make a range that does early increment to allow mutation of the underlying range without disrupting i...
int countr_zero(T Val)
Count number of 0's from the least significant bit to the most stopping at the first 1.
unsigned M1(unsigned Val)
void sort(IteratorTy Start, IteratorTy End)
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
void report_fatal_error(Error Err, bool gen_crash_diag=true)
Report a serious error, calling any installed error handler.
@ Ref
The access may reference the value stored in memory.
void appendToCompilerUsed(Module &M, ArrayRef< GlobalValue * > Values)
Adds global values to the llvm.compiler.used list.
uint64_t alignTo(uint64_t Size, Align A)
Returns a multiple of A needed to store Size bytes.
Expected< T > errorOrToExpected(ErrorOr< T > &&EO)
Convert an ErrorOr<T> to an Expected<T>.
constexpr unsigned BitWidth
void appendToGlobalCtors(Module &M, Function *F, int Priority, Constant *Data=nullptr)
Append F to the list of global ctors of module M with the given Priority.
Error errorCodeToError(std::error_code EC)
Helper for converting an std::error_code to a Error.
Instruction * SplitBlockAndInsertIfThen(Value *Cond, BasicBlock::iterator SplitBefore, bool Unreachable, MDNode *BranchWeights=nullptr, DomTreeUpdater *DTU=nullptr, LoopInfo *LI=nullptr, BasicBlock *ThenBlock=nullptr)
Split the containing block at the specified instruction - everything before SplitBefore stays in the ...
void appendToUsed(Module &M, ArrayRef< GlobalValue * > Values)
Adds global values to the llvm.used list.
CfiFunctionLinkage
The type of CFI jumptable needed for a function.
constexpr uint64_t NextPowerOf2(uint64_t A)
Returns the next power of two (in 64-bits) that is strictly greater than A.
GlobalVariable * collectUsedGlobalVariables(const Module &M, SmallVectorImpl< GlobalValue * > &Vec, bool CompilerUsed)
Given "llvm.used" or "llvm.compiler.used" as a global name, collect the initializer elements of that ...
This struct is a compact representation of a valid (non-zero power of two) alignment.
Kind
Specifies which kind of type check we should emit for this byte array.
@ Unknown
Unknown (analysis not performed, don't lower)
@ Single
Single element (last example in "Short Inline Bit Vectors")
@ Inline
Inlined bit vector ("Short Inline Bit Vectors")
@ Unsat
Unsatisfiable type (i.e. no global has this type metadata)
@ AllOnes
All-ones bit vector ("Eliminating Bit Vector Checks for All-Ones Bit Vectors")
@ ByteArray
Test a byte array (first example)
unsigned SizeM1BitWidth
Range of size-1 expressed as a bit width.
enum llvm::TypeTestResolution::Kind TheKind
Function object to check whether the second component of a container supported by std::get (like std:...
SmallVector< uint64_t, 16 > Offsets
void addOffset(uint64_t Offset)
bool containsGlobalOffset(uint64_t Offset) const
void print(raw_ostream &OS) const
std::set< uint64_t > Bits
This class is used to build a byte array containing overlapping bit sets.
uint64_t BitAllocs[BitsPerByte]
The number of bytes allocated so far for each of the bits.
std::vector< uint8_t > Bytes
The byte array built so far.
void allocate(const std::set< uint64_t > &Bits, uint64_t BitSize, uint64_t &AllocByteOffset, uint8_t &AllocMask)
Allocate BitSize bits in the byte array where Bits contains the bits to set.
This class implements a layout algorithm for globals referenced by bit sets that tries to keep member...
std::vector< std::vector< uint64_t > > Fragments
The computed layout.
void addFragment(const std::set< uint64_t > &F)
Add F to the layout while trying to keep its indices contiguous.
std::vector< uint64_t > FragmentMap
Mapping from object index to fragment index.