83#include <system_error>
88using namespace lowertypetests;
90#define DEBUG_TYPE "lowertypetests"
92STATISTIC(ByteArraySizeBits,
"Byte array size in bits");
93STATISTIC(ByteArraySizeBytes,
"Byte array size in bytes");
94STATISTIC(NumByteArraysCreated,
"Number of byte arrays created");
95STATISTIC(NumTypeTestCallsLowered,
"Number of type test calls lowered");
96STATISTIC(NumTypeIdDisjointSets,
"Number of disjoint sets of type identifiers");
99 "lowertypetests-avoid-reuse",
100 cl::desc(
"Try to avoid reuse of byte array addresses using aliases"),
104 "lowertypetests-summary-action",
105 cl::desc(
"What to do with the summary when running this pass"),
107 clEnumValN(PassSummaryAction::Import,
"import",
108 "Import typeid resolutions from summary and globals"),
109 clEnumValN(PassSummaryAction::Export,
"export",
110 "Export typeid resolutions to summary and globals")),
114 "lowertypetests-read-summary",
115 cl::desc(
"Read summary from given YAML file before running pass"),
119 "lowertypetests-write-summary",
120 cl::desc(
"Write summary to given YAML file after running pass"),
125 cl::desc(
"Simply drop type test assume sequences"),
139 return Bits.count(BitOffset);
193 std::vector<uint64_t> &Fragment =
Fragments.back();
196 for (
auto ObjIndex :
F) {
198 if (OldFragmentIndex == 0) {
201 Fragment.push_back(ObjIndex);
208 std::vector<uint64_t> &OldFragment =
Fragments[OldFragmentIndex];
221 uint8_t &AllocMask) {
231 unsigned ReqSize = AllocByteOffset + BitSize;
233 if (
Bytes.size() < ReqSize)
234 Bytes.resize(ReqSize);
237 AllocMask = 1 << Bit;
239 Bytes[AllocByteOffset +
B] |= AllocMask;
243 if (
F->isDeclarationForLinker())
245 auto *CI = mdconst::extract_or_null<ConstantInt>(
246 F->getParent()->getModuleFlag(
"CFI Canonical Jump Tables"));
247 if (!CI || !CI->isZero())
249 return F->hasFnAttribute(
"cfi-canonical-jump-table");
254struct ByteArrayInfo {
255 std::set<uint64_t> Bits;
259 uint8_t *MaskPtr =
nullptr;
267class GlobalTypeMember final :
TrailingObjects<GlobalTypeMember, MDNode *> {
278 bool IsJumpTableCanonical;
284 size_t numTrailingObjects(OverloadToken<MDNode *>)
const {
return NTypes; }
288 bool IsJumpTableCanonical,
bool IsExported,
290 auto *GTM =
static_cast<GlobalTypeMember *
>(Alloc.
Allocate(
291 totalSizeToAlloc<MDNode *>(
Types.size()),
alignof(GlobalTypeMember)));
293 GTM->NTypes =
Types.size();
294 GTM->IsJumpTableCanonical = IsJumpTableCanonical;
295 GTM->IsExported = IsExported;
296 std::uninitialized_copy(
Types.begin(),
Types.end(),
297 GTM->getTrailingObjects<
MDNode *>());
306 return IsJumpTableCanonical;
309 bool isExported()
const {
314 return ArrayRef(getTrailingObjects<MDNode *>(), NTypes);
318struct ICallBranchFunnel final
323 auto *
Call =
static_cast<ICallBranchFunnel *
>(
325 alignof(ICallBranchFunnel)));
327 Call->UniqueId = UniqueId;
330 Call->getTrailingObjects<GlobalTypeMember *>());
336 return ArrayRef(getTrailingObjects<GlobalTypeMember *>(), NTargets);
345struct ScopedSaveAliaseesAndUsed {
348 std::vector<std::pair<GlobalAlias *, Function *>> FunctionAliases;
349 std::vector<std::pair<GlobalIFunc *, Function *>> ResolverIFuncs;
351 ScopedSaveAliaseesAndUsed(
Module &M) :
M(
M) {
365 GV->eraseFromParent();
367 GV->eraseFromParent();
369 for (
auto &GA :
M.aliases()) {
372 if (
auto *
F = dyn_cast<Function>(GA.getAliasee()->stripPointerCasts()))
373 FunctionAliases.push_back({&GA,
F});
376 for (
auto &GI :
M.ifuncs())
377 if (
auto *
F = dyn_cast<Function>(GI.getResolver()->stripPointerCasts()))
378 ResolverIFuncs.push_back({&GI,
F});
381 ~ScopedSaveAliaseesAndUsed() {
385 for (
auto P : FunctionAliases)
389 for (
auto P : ResolverIFuncs) {
393 P.first->setResolver(
P.second);
398class LowerTypeTestsModule {
414 bool CanUseArmJumpTable =
false, CanUseThumbBWJumpTable =
false;
427 IntegerType *IntPtrTy =
M.getDataLayout().getIntPtrType(
M.getContext(), 0);
435 struct TypeIdUserInfo {
436 std::vector<CallInst *> CallSites;
437 bool IsExported =
false;
445 struct TypeIdLowering {
469 std::vector<ByteArrayInfo> ByteArrayInfos;
471 Function *WeakInitializerFn =
nullptr;
473 bool shouldExportConstantsAsAbsoluteSymbols();
474 uint8_t *exportTypeId(
StringRef TypeId,
const TypeIdLowering &TIL);
475 TypeIdLowering importTypeId(
StringRef TypeId);
478 std::vector<GlobalAlias *> &AliasesToErase);
483 ByteArrayInfo *createByteArray(
BitSetInfo &BSI);
484 void allocateByteArrays();
487 void lowerTypeTestCalls(
491 const TypeIdLowering &TIL);
497 unsigned getJumpTableEntrySize();
498 Type *getJumpTableEntryType();
515 bool IsJumpTableCanonical);
517 void findGlobalVariableUsersOf(
Constant *
C,
527 void replaceCfiUses(
Function *Old,
Value *New,
bool IsJumpTableCanonical);
531 void replaceDirectCalls(
Value *Old,
Value *New);
556 for (
const auto &GlobalAndOffset : GlobalLayout) {
557 for (
MDNode *
Type : GlobalAndOffset.first->types()) {
558 if (
Type->getOperand(1) != TypeId)
562 cast<ConstantAsMetadata>(
Type->getOperand(0))->getValue())
575 auto BitsType = cast<IntegerType>(Bits->getType());
576 unsigned BitWidth = BitsType->getBitWidth();
578 BitOffset =
B.CreateZExtOrTrunc(BitOffset, BitsType);
582 Value *MaskedBits =
B.CreateAnd(Bits, BitMask);
586ByteArrayInfo *LowerTypeTestsModule::createByteArray(
BitSetInfo &BSI) {
595 ByteArrayInfos.emplace_back();
596 ByteArrayInfo *BAI = &ByteArrayInfos.back();
598 BAI->Bits = BSI.
Bits;
600 BAI->ByteArray = ByteArrayGlobal;
601 BAI->MaskGlobal = MaskGlobal;
605void LowerTypeTestsModule::allocateByteArrays() {
607 [](
const ByteArrayInfo &BAI1,
const ByteArrayInfo &BAI2) {
608 return BAI1.BitSize > BAI2.BitSize;
611 std::vector<uint64_t> ByteArrayOffsets(ByteArrayInfos.size());
614 for (
unsigned I = 0;
I != ByteArrayInfos.size(); ++
I) {
615 ByteArrayInfo *BAI = &ByteArrayInfos[
I];
618 BAB.
allocate(BAI->Bits, BAI->BitSize, ByteArrayOffsets[
I], Mask);
620 BAI->MaskGlobal->replaceAllUsesWith(
622 BAI->MaskGlobal->eraseFromParent();
624 *BAI->MaskPtr =
Mask;
632 for (
unsigned I = 0;
I != ByteArrayInfos.size(); ++
I) {
633 ByteArrayInfo *BAI = &ByteArrayInfos[
I];
638 ByteArrayConst->
getType(), ByteArray, Idxs);
645 BAI->ByteArray->replaceAllUsesWith(Alias);
646 BAI->ByteArray->eraseFromParent();
652 ByteArraySizeBytes = BAB.
Bytes.size();
658 const TypeIdLowering &TIL,
665 Constant *ByteArray = TIL.TheByteArray;
672 "bits_use", ByteArray, &M);
675 Value *ByteAddr =
B.CreateGEP(Int8Ty, ByteArray, BitOffset);
686 if (
auto GV = dyn_cast<GlobalObject>(V)) {
688 GV->getMetadata(LLVMContext::MD_type, Types);
690 if (
Type->getOperand(1) != TypeId)
694 cast<ConstantAsMetadata>(
Type->getOperand(0))->getValue())
702 if (
auto GEP = dyn_cast<GEPOperator>(V)) {
703 APInt APOffset(
DL.getIndexSizeInBits(0), 0);
704 bool Result =
GEP->accumulateConstantOffset(
DL, APOffset);
711 if (
auto Op = dyn_cast<Operator>(V)) {
712 if (Op->getOpcode() == Instruction::BitCast)
715 if (Op->getOpcode() == Instruction::Select)
726 const TypeIdLowering &TIL) {
742 Value *PtrAsInt =
B.CreatePtrToInt(
Ptr, IntPtrTy);
747 return B.CreateICmpEQ(PtrAsInt, OffsetedGlobalAsInt);
749 Value *PtrOffset =
B.CreateSub(PtrAsInt, OffsetedGlobalAsInt);
761 Value *OffsetSHL =
B.CreateShl(
767 Value *BitOffset =
B.CreateOr(OffsetSHR, OffsetSHL);
769 Value *OffsetInRange =
B.CreateICmpULE(BitOffset, TIL.SizeM1);
773 return OffsetInRange;
780 if (
auto *Br = dyn_cast<BranchInst>(*CI->
user_begin()))
786 Br->getMetadata(LLVMContext::MD_prof));
790 for (
auto &Phi :
Else->phis())
791 Phi.addIncoming(Phi.getIncomingValueForBlock(Then), InitialBB);
794 return createBitSetTest(ThenB, TIL, BitOffset);
801 Value *
Bit = createBitSetTest(ThenB, TIL, BitOffset);
806 B.SetInsertPoint(CI);
809 P->addIncoming(Bit, ThenB.GetInsertBlock());
815void LowerTypeTestsModule::buildBitSetsFromGlobalVariables(
822 std::vector<Constant *> GlobalInits;
828 for (GlobalTypeMember *
G : Globals) {
829 auto *GV = cast<GlobalVariable>(
G->getGlobal());
831 DL.getValueOrABITypeAlignment(GV->getAlign(), GV->getValueType());
832 MaxAlign = std::max(MaxAlign, Alignment);
834 GlobalLayout[
G] = GVOffset;
837 GlobalInits.push_back(
841 GlobalInits.push_back(GV->getInitializer());
842 uint64_t InitSize =
DL.getTypeAllocSize(GV->getValueType());
843 CurOffset = GVOffset + InitSize;
852 if (DesiredPadding > 32)
853 DesiredPadding =
alignTo(InitSize, 32) - InitSize;
857 auto *CombinedGlobal =
860 CombinedGlobal->setAlignment(MaxAlign);
863 lowerTypeTestCalls(TypeIds, CombinedGlobal, GlobalLayout);
868 for (
unsigned I = 0;
I !=
Globals.size(); ++
I) {
875 NewInit->
getType(), CombinedGlobal, CombinedGlobalIdxs);
879 "", CombinedGlobalElemPtr, &M);
887bool LowerTypeTestsModule::shouldExportConstantsAsAbsoluteSymbols() {
900uint8_t *LowerTypeTestsModule::exportTypeId(
StringRef TypeId,
901 const TypeIdLowering &TIL) {
909 "__typeid_" + TypeId +
"_" +
Name,
C, &M);
914 if (shouldExportConstantsAsAbsoluteSymbols())
917 Storage = cast<ConstantInt>(
C)->getZExtValue();
921 ExportGlobal(
"global_addr", TIL.OffsetedGlobal);
926 ExportConstant(
"align", TTRes.
AlignLog2, TIL.AlignLog2);
927 ExportConstant(
"size_m1", TTRes.
SizeM1, TIL.SizeM1);
929 uint64_t BitSize = cast<ConstantInt>(TIL.SizeM1)->getZExtValue() + 1;
937 ExportGlobal(
"byte_array", TIL.TheByteArray);
938 if (shouldExportConstantsAsAbsoluteSymbols())
939 ExportGlobal(
"bit_mask", TIL.BitMask);
945 ExportConstant(
"inline_bits", TTRes.
InlineBits, TIL.InlineBits);
950LowerTypeTestsModule::TypeIdLowering
951LowerTypeTestsModule::importTypeId(
StringRef TypeId) {
963 Constant *
C =
M.getOrInsertGlobal((
"__typeid_" + TypeId +
"_" +
Name).str(),
965 if (
auto *GV = dyn_cast<GlobalVariable>(
C))
973 if (!shouldExportConstantsAsAbsoluteSymbols()) {
976 if (!isa<IntegerType>(Ty))
982 auto *GV = cast<GlobalVariable>(
C->stripPointerCasts());
983 if (isa<IntegerType>(Ty))
985 if (GV->
getMetadata(LLVMContext::MD_absolute_symbol))
995 SetAbsRange(~0ull, ~0ull);
997 SetAbsRange(0, 1ull << AbsWidth);
1002 TIL.OffsetedGlobal = ImportGlobal(
"global_addr");
1007 TIL.AlignLog2 = ImportConstant(
"align", TTRes.
AlignLog2, 8, Int8Ty);
1013 TIL.TheByteArray = ImportGlobal(
"byte_array");
1014 TIL.BitMask = ImportConstant(
"bit_mask", TTRes.
BitMask, 8, Int8PtrTy);
1018 TIL.InlineBits = ImportConstant(
1025void LowerTypeTestsModule::importTypeTest(
CallInst *CI) {
1026 auto TypeIdMDVal = dyn_cast<MetadataAsValue>(CI->
getArgOperand(1));
1030 auto TypeIdStr = dyn_cast<MDString>(TypeIdMDVal->getMetadata());
1037 TypeIdLowering TIL = importTypeId(TypeIdStr->getString());
1038 Value *Lowered = lowerTypeTestCall(TypeIdStr, CI, TIL);
1047void LowerTypeTestsModule::importFunction(
1049 std::vector<GlobalAlias *> &AliasesToErase) {
1050 assert(
F->getType()->getAddressSpace() == 0);
1053 std::string
Name = std::string(
F->getName());
1058 if (
F->isDSOLocal()) {
1061 F->getAddressSpace(),
1064 replaceDirectCalls(
F, RealF);
1074 F->getAddressSpace(),
Name +
".cfi_jt", &M);
1077 F->setName(
Name +
".cfi");
1080 F->getAddressSpace(),
Name, &M);
1087 for (
auto &U :
F->uses()) {
1088 if (
auto *
A = dyn_cast<GlobalAlias>(
U.getUser())) {
1091 F->getAddressSpace(),
"", &M);
1093 A->replaceAllUsesWith(AliasDecl);
1094 AliasesToErase.push_back(
A);
1099 if (
F->hasExternalWeakLinkage())
1106 F->setVisibility(Visibility);
1109void LowerTypeTestsModule::lowerTypeTestCalls(
1117 BitSetInfo BSI = buildBitSet(TypeId, GlobalLayout);
1119 if (
auto MDS = dyn_cast<MDString>(TypeId))
1120 dbgs() << MDS->getString() <<
": ";
1122 dbgs() <<
"<unnamed>: ";
1126 ByteArrayInfo *BAI =
nullptr;
1135 }
else if (BSI.
BitSize <= 64) {
1138 for (
auto Bit : BSI.
Bits)
1140 if (InlineBits == 0)
1147 ++NumByteArraysCreated;
1148 BAI = createByteArray(BSI);
1149 TIL.TheByteArray = BAI->ByteArray;
1150 TIL.BitMask = BAI->MaskGlobal;
1153 TypeIdUserInfo &TIUI = TypeIdUsers[TypeId];
1155 if (TIUI.IsExported) {
1156 uint8_t *MaskPtr = exportTypeId(cast<MDString>(TypeId)->getString(), TIL);
1158 BAI->MaskPtr = MaskPtr;
1162 for (
CallInst *CI : TIUI.CallSites) {
1163 ++NumTypeTestCallsLowered;
1164 Value *Lowered = lowerTypeTestCall(TypeId, CI, TIL);
1174 if (
Type->getNumOperands() != 2)
1179 if (isa<GlobalVariable>(GO) && GO->
hasSection())
1181 "A member of a type identifier may not have an explicit section");
1187 auto OffsetConstMD = dyn_cast<ConstantAsMetadata>(
Type->getOperand(0));
1190 auto OffsetInt = dyn_cast<ConstantInt>(OffsetConstMD->getValue());
1202unsigned LowerTypeTestsModule::getJumpTableEntrySize() {
1203 switch (JumpTableArch) {
1206 if (
const auto *MD = mdconst::extract_or_null<ConstantInt>(
1207 M.getModuleFlag(
"cf-protection-branch")))
1208 if (MD->getZExtValue())
1214 if (CanUseThumbBWJumpTable)
1219 if (
const auto *BTE = mdconst::extract_or_null<ConstantInt>(
1220 M.getModuleFlag(
"branch-target-enforcement")))
1221 if (BTE->getZExtValue())
1235void LowerTypeTestsModule::createJumpTableEntry(
1239 unsigned ArgIndex = AsmArgs.
size();
1243 if (
const auto *MD = mdconst::extract_or_null<ConstantInt>(
1245 Endbr = !MD->isZero();
1247 AsmOS << (JumpTableArch ==
Triple::x86 ?
"endbr32\n" :
"endbr64\n");
1248 AsmOS <<
"jmp ${" << ArgIndex <<
":c}@plt\n";
1250 AsmOS <<
".balign 16, 0xcc\n";
1252 AsmOS <<
"int3\nint3\nint3\n";
1254 AsmOS <<
"b $" << ArgIndex <<
"\n";
1256 if (
const auto *BTE = mdconst::extract_or_null<ConstantInt>(
1258 if (BTE->getZExtValue())
1260 AsmOS <<
"b $" << ArgIndex <<
"\n";
1262 if (!CanUseThumbBWJumpTable) {
1278 AsmOS <<
"push {r0,r1}\n"
1280 <<
"0: add r0, r0, pc\n"
1281 <<
"str r0, [sp, #4]\n"
1284 <<
"1: .word $" << ArgIndex <<
" - (0b + 4)\n";
1286 AsmOS <<
"b.w $" << ArgIndex <<
"\n";
1290 AsmOS <<
"tail $" << ArgIndex <<
"@plt\n";
1295 ConstraintOS << (ArgIndex > 0 ?
",s" :
"s");
1299Type *LowerTypeTestsModule::getJumpTableEntryType() {
1305void LowerTypeTestsModule::buildBitSetsFromFunctions(
1310 buildBitSetsFromFunctionsNative(TypeIds, Functions);
1312 buildBitSetsFromFunctionsWASM(TypeIds, Functions);
1317void LowerTypeTestsModule::moveInitializerToModuleConstructor(
1319 if (WeakInitializerFn ==
nullptr) {
1324 M.getDataLayout().getProgramAddressSpace(),
1325 "__cfi_global_var_init", &M);
1329 WeakInitializerFn->setSection(
1331 ?
"__TEXT,__StaticInit,regular,pure_instructions"
1338 IRBuilder<> IRB(WeakInitializerFn->getEntryBlock().getTerminator());
1344void LowerTypeTestsModule::findGlobalVariableUsersOf(
1346 for (
auto *U :
C->users()){
1347 if (
auto *GV = dyn_cast<GlobalVariable>(U))
1349 else if (
auto *C2 = dyn_cast<Constant>(U))
1350 findGlobalVariableUsersOf(C2, Out);
1355void LowerTypeTestsModule::replaceWeakDeclarationWithJumpTablePtr(
1360 findGlobalVariableUsersOf(
F, GlobalVarUsers);
1361 for (
auto *GV : GlobalVarUsers)
1362 moveInitializerToModuleConstructor(GV);
1369 F->getAddressSpace(),
"", &M);
1370 replaceCfiUses(
F, PlaceholderFn, IsJumpTableCanonical);
1376 auto *InsertPt = dyn_cast<Instruction>(
U.getUser());
1377 assert(InsertPt &&
"Non-instruction users should have been eliminated");
1378 auto *PN = dyn_cast<PHINode>(InsertPt);
1380 InsertPt = PN->getIncomingBlock(U)->getTerminator();
1389 PN->setIncomingValueForBlock(InsertPt->getParent(),
Select);
1397 Attribute TFAttr =
F->getFnAttribute(
"target-features");
1402 if (Feature ==
"-thumb-mode")
1404 else if (Feature ==
"+thumb-mode")
1420 if (!CanUseThumbBWJumpTable && CanUseArmJumpTable) {
1428 unsigned ArmCount = 0, ThumbCount = 0;
1429 for (
const auto GTM : Functions) {
1430 if (!GTM->isJumpTableCanonical()) {
1437 Function *
F = cast<Function>(GTM->getGlobal());
1444void LowerTypeTestsModule::createJumpTable(
1446 std::string AsmStr, ConstraintStr;
1451 for (GlobalTypeMember *GTM : Functions)
1452 createJumpTableEntry(AsmOS, ConstraintOS, JumpTableArch, AsmArgs,
1453 cast<Function>(GTM->getGlobal()));
1456 F->setAlignment(
Align(getJumpTableEntrySize()));
1462 F->addFnAttr(Attribute::Naked);
1464 F->addFnAttr(
"target-features",
"-thumb-mode");
1466 F->addFnAttr(
"target-features",
"+thumb-mode");
1467 if (CanUseThumbBWJumpTable) {
1470 F->addFnAttr(
"target-cpu",
"cortex-a8");
1477 F->addFnAttr(
"branch-target-enforcement",
"false");
1478 F->addFnAttr(
"sign-return-address",
"none");
1483 F->addFnAttr(
"target-features",
"-c,-relax");
1489 F->addFnAttr(Attribute::NoCfCheck);
1491 F->addFnAttr(Attribute::NoUnwind);
1498 for (
const auto &
Arg : AsmArgs)
1502 AsmOS.str(), ConstraintOS.str(),
1505 IRB.CreateCall(JumpTableAsm, AsmArgs);
1506 IRB.CreateUnreachable();
1511void LowerTypeTestsModule::buildBitSetsFromFunctionsNative(
1593 JumpTableArch = selectJumpTableArmEncoding(Functions);
1597 unsigned EntrySize = getJumpTableEntrySize();
1598 for (
unsigned I = 0;
I != Functions.
size(); ++
I)
1599 GlobalLayout[Functions[
I]] =
I * EntrySize;
1605 M.getDataLayout().getProgramAddressSpace(),
1606 ".cfi.jumptable", &M);
1612 lowerTypeTestCalls(TypeIds, JumpTable, GlobalLayout);
1615 ScopedSaveAliaseesAndUsed S(M);
1619 for (
unsigned I = 0;
I != Functions.
size(); ++
I) {
1620 Function *
F = cast<Function>(Functions[
I]->getGlobal());
1621 bool IsJumpTableCanonical = Functions[
I]->isJumpTableCanonical();
1625 JumpTableType, JumpTable,
1630 const bool IsExported = Functions[
I]->isExported();
1631 if (!IsJumpTableCanonical) {
1636 F->getName() +
".cfi_jt",
1637 CombinedGlobalElemPtr, &M);
1645 if (IsJumpTableCanonical)
1651 if (!IsJumpTableCanonical) {
1652 if (
F->hasExternalWeakLinkage())
1653 replaceWeakDeclarationWithJumpTablePtr(
F, CombinedGlobalElemPtr,
1654 IsJumpTableCanonical);
1656 replaceCfiUses(
F, CombinedGlobalElemPtr, IsJumpTableCanonical);
1658 assert(
F->getType()->getAddressSpace() == 0);
1662 CombinedGlobalElemPtr, &M);
1666 F->setName(FAlias->
getName() +
".cfi");
1667 replaceCfiUses(
F, FAlias, IsJumpTableCanonical);
1668 if (!
F->hasLocalLinkage())
1674 createJumpTable(JumpTableFn, Functions);
1683void LowerTypeTestsModule::buildBitSetsFromFunctionsWASM(
1690 for (GlobalTypeMember *GTM : Functions) {
1691 Function *
F = cast<Function>(GTM->getGlobal());
1694 if (!
F->hasAddressTaken())
1701 F->setMetadata(
"wasm.index", MD);
1704 GlobalLayout[GTM] = IndirectIndex++;
1713void LowerTypeTestsModule::buildBitSetsFromDisjointSet(
1717 for (
unsigned I = 0;
I != TypeIds.
size(); ++
I)
1718 TypeIdIndices[TypeIds[
I]] =
I;
1722 std::vector<std::set<uint64_t>> TypeMembers(TypeIds.
size());
1723 unsigned GlobalIndex = 0;
1725 for (GlobalTypeMember *GTM : Globals) {
1728 auto I = TypeIdIndices.
find(
Type->getOperand(1));
1729 if (
I != TypeIdIndices.
end())
1730 TypeMembers[
I->second].insert(GlobalIndex);
1732 GlobalIndices[GTM] = GlobalIndex;
1736 for (ICallBranchFunnel *JT : ICallBranchFunnels) {
1737 TypeMembers.emplace_back();
1738 std::set<uint64_t> &TMSet = TypeMembers.back();
1739 for (GlobalTypeMember *
T :
JT->targets())
1740 TMSet.insert(GlobalIndices[
T]);
1746 const std::set<uint64_t> &O2) {
1747 return O1.size() < O2.size();
1754 for (
auto &&MemSet : TypeMembers)
1755 GLB.addFragment(MemSet);
1759 Globals.empty() || isa<GlobalVariable>(Globals[0]->getGlobal());
1760 std::vector<GlobalTypeMember *> OrderedGTMs(
Globals.size());
1761 auto OGTMI = OrderedGTMs.begin();
1762 for (
auto &&
F : GLB.Fragments) {
1764 if (IsGlobalSet != isa<GlobalVariable>(Globals[
Offset]->getGlobal()))
1766 "variables and functions");
1773 buildBitSetsFromGlobalVariables(TypeIds, OrderedGTMs);
1775 buildBitSetsFromFunctions(TypeIds, OrderedGTMs);
1779LowerTypeTestsModule::LowerTypeTestsModule(
1782 :
M(
M), ExportSummary(ExportSummary), ImportSummary(ImportSummary),
1784 assert(!(ExportSummary && ImportSummary));
1785 Triple TargetTriple(
M.getTargetTriple());
1786 Arch = TargetTriple.getArch();
1788 CanUseArmJumpTable =
true;
1795 CanUseArmJumpTable =
true;
1797 CanUseThumbBWJumpTable =
true;
1800 OS = TargetTriple.getOS();
1801 ObjectFormat = TargetTriple.getObjectFormat();
1812 auto ReadSummaryFile =
1815 yaml::Input
In(ReadSummaryFile->getBuffer());
1821 LowerTypeTestsModule(
1835 yaml::Output Out(
OS);
1843 auto *Usr = dyn_cast<CallInst>(U.getUser());
1845 auto *CB = dyn_cast<CallBase>(Usr);
1846 if (CB && CB->isCallee(&U))
1852void LowerTypeTestsModule::replaceCfiUses(
Function *Old,
Value *New,
1853 bool IsJumpTableCanonical) {
1858 if (isa<BlockAddress, NoCFIValue>(
U.getUser()))
1867 if (
auto *
C = dyn_cast<Constant>(
U.getUser())) {
1868 if (!isa<GlobalValue>(
C)) {
1881 C->handleOperandChange(Old, New);
1884void LowerTypeTestsModule::replaceDirectCalls(
Value *Old,
Value *New) {
1890 auto *CI = cast<CallInst>(U.getUser());
1893 if (
auto *Assume = dyn_cast<AssumeInst>(CIU.getUser()))
1894 Assume->eraseFromParent();
1900 all_of(CI->
users(), [](
User *U) ->
bool { return isa<PHINode>(U); }));
1907bool LowerTypeTestsModule::lower() {
1911 if (DropTypeTests) {
1919 if (PublicTypeTestFunc)
1921 if (TypeTestFunc || PublicTypeTestFunc) {
1942 if ((!TypeTestFunc || TypeTestFunc->
use_empty()) &&
1943 (!ICallBranchFunnelFunc || ICallBranchFunnelFunc->
use_empty()) &&
1944 !ExportSummary && !ImportSummary)
1947 if (ImportSummary) {
1950 importTypeTest(cast<CallInst>(
U.getUser()));
1952 if (ICallBranchFunnelFunc && !ICallBranchFunnelFunc->
use_empty())
1954 "unexpected call to llvm.icall.branch.funnel during import phase");
1961 if (
F.hasLocalLinkage())
1966 std::string(
F.getName())))
1970 std::vector<GlobalAlias *> AliasesToErase;
1972 ScopedSaveAliaseesAndUsed S(M);
1973 for (
auto *
F : Defs)
1974 importFunction(
F,
true, AliasesToErase);
1975 for (
auto *
F : Decls)
1976 importFunction(
F,
false, AliasesToErase);
1989 GlobalClassesTy GlobalClasses;
2001 std::vector<GlobalTypeMember *> RefGlobals;
2004 unsigned CurUniqueId = 0;
2009 const bool CrossDsoCfi =
M.getModuleFlag(
"Cross-DSO CFI") !=
nullptr;
2011 struct ExportedFunctionInfo {
2016 if (ExportSummary) {
2019 for (
auto &
I : *ExportSummary)
2020 for (
auto &GVS :
I.second.SummaryList)
2022 for (
const auto &
Ref : GVS->refs())
2025 NamedMDNode *CfiFunctionsMD =
M.getNamedMetadata(
"cfi.functions");
2026 if (CfiFunctionsMD) {
2027 for (
auto *FuncMD : CfiFunctionsMD->
operands()) {
2028 assert(FuncMD->getNumOperands() >= 2);
2030 cast<MDString>(FuncMD->getOperand(0))->getString();
2032 cast<ConstantAsMetadata>(FuncMD->getOperand(1))
2034 ->getUniqueInteger()
2040 if (!ExportSummary->isGUIDLive(GUID))
2042 if (!AddressTaken.
count(GUID)) {
2046 bool Exported =
false;
2047 if (
auto VI = ExportSummary->getValueInfo(GUID))
2048 for (
const auto &GVS :
VI.getSummaryList())
2055 auto P = ExportedFunctions.
insert({FunctionName, {
Linkage, FuncMD}});
2057 P.first->second = {
Linkage, FuncMD};
2060 for (
const auto &
P : ExportedFunctions) {
2063 MDNode *FuncMD =
P.second.FuncMD;
2065 if (
F &&
F->hasLocalLinkage()) {
2072 F->setName(
F->getName() +
".1");
2079 GlobalVariable::ExternalLinkage,
2080 M.getDataLayout().getProgramAddressSpace(), FunctionName, &M);
2087 if (
F->hasAvailableExternallyLinkage()) {
2090 F->setComdat(
nullptr);
2103 if (
F->isDeclaration()) {
2107 F->eraseMetadata(LLVMContext::MD_type);
2109 F->addMetadata(LLVMContext::MD_type,
2124 bool IsJumpTableCanonical =
false;
2125 bool IsExported =
false;
2126 if (
Function *
F = dyn_cast<Function>(&GO)) {
2128 if (ExportedFunctions.count(
F->getName())) {
2129 IsJumpTableCanonical |=
2136 }
else if (!
F->hasAddressTaken()) {
2137 if (!CrossDsoCfi || !IsJumpTableCanonical ||
F->hasLocalLinkage())
2142 auto *GTM = GlobalTypeMember::create(Alloc, &GO, IsJumpTableCanonical,
2144 GlobalTypeMembers[&GO] = GTM;
2146 verifyTypeMDNode(&GO,
Type);
2147 auto &
Info = TypeIdInfo[
Type->getOperand(1)];
2148 Info.UniqueId = ++CurUniqueId;
2149 Info.RefGlobals.push_back(GTM);
2153 auto AddTypeIdUse = [&](
Metadata *TypeId) -> TypeIdUserInfo & {
2158 auto Ins = TypeIdUsers.insert({TypeId, {}});
2161 GlobalClassesTy::iterator GCI = GlobalClasses.insert(TypeId);
2162 GlobalClassesTy::member_iterator CurSet = GlobalClasses.findLeader(GCI);
2165 for (GlobalTypeMember *GTM : TypeIdInfo[TypeId].RefGlobals)
2166 CurSet = GlobalClasses.unionSets(
2167 CurSet, GlobalClasses.findLeader(GlobalClasses.insert(GTM)));
2170 return Ins.first->second;
2174 for (
const Use &U : TypeTestFunc->
uses()) {
2175 auto CI = cast<CallInst>(
U.getUser());
2183 for (
const Use &CIU : CI->
uses()) {
2184 if (isa<AssumeInst>(CIU.getUser()))
2186 OnlyAssumeUses =
false;
2192 auto TypeIdMDVal = dyn_cast<MetadataAsValue>(CI->
getArgOperand(1));
2195 auto TypeId = TypeIdMDVal->getMetadata();
2196 AddTypeIdUse(TypeId).CallSites.push_back(CI);
2200 if (ICallBranchFunnelFunc) {
2201 for (
const Use &U : ICallBranchFunnelFunc->
uses()) {
2204 "llvm.icall.branch.funnel not supported on this target");
2206 auto CI = cast<CallInst>(
U.getUser());
2208 std::vector<GlobalTypeMember *>
Targets;
2212 GlobalClassesTy::member_iterator CurSet;
2213 for (
unsigned I = 1;
I != CI->
arg_size();
I += 2) {
2219 "Expected branch funnel operand to be global value");
2221 GlobalTypeMember *GTM = GlobalTypeMembers[
Base];
2223 GlobalClassesTy::member_iterator NewSet =
2224 GlobalClasses.findLeader(GlobalClasses.insert(GTM));
2228 CurSet = GlobalClasses.unionSets(CurSet, NewSet);
2231 GlobalClasses.unionSets(
2232 CurSet, GlobalClasses.findLeader(
2233 GlobalClasses.insert(ICallBranchFunnel::create(
2234 Alloc, CI,
Targets, ++CurUniqueId))));
2238 if (ExportSummary) {
2240 for (
auto &
P : TypeIdInfo) {
2241 if (
auto *TypeId = dyn_cast<MDString>(
P.first))
2246 for (
auto &
P : *ExportSummary) {
2247 for (
auto &S :
P.second.SummaryList) {
2248 if (!ExportSummary->isGlobalValueLive(S.get()))
2250 if (
auto *FS = dyn_cast<FunctionSummary>(S->getBaseObject()))
2253 AddTypeIdUse(MD).IsExported =
true;
2258 if (GlobalClasses.empty())
2263 std::vector<std::pair<GlobalClassesTy::iterator, unsigned>> Sets;
2264 for (GlobalClassesTy::iterator
I = GlobalClasses.begin(),
2265 E = GlobalClasses.end();
2269 ++NumTypeIdDisjointSets;
2271 unsigned MaxUniqueId = 0;
2272 for (GlobalClassesTy::member_iterator
MI = GlobalClasses.member_begin(
I);
2273 MI != GlobalClasses.member_end(); ++
MI) {
2275 MaxUniqueId = std::max(MaxUniqueId, TypeIdInfo[MD].UniqueId);
2276 else if (
auto *BF =
MI->dyn_cast<ICallBranchFunnel *>())
2277 MaxUniqueId = std::max(MaxUniqueId, BF->UniqueId);
2279 Sets.emplace_back(
I, MaxUniqueId);
2284 for (
const auto &S : Sets) {
2286 std::vector<Metadata *> TypeIds;
2287 std::vector<GlobalTypeMember *>
Globals;
2288 std::vector<ICallBranchFunnel *> ICallBranchFunnels;
2289 for (GlobalClassesTy::member_iterator
MI =
2290 GlobalClasses.member_begin(S.first);
2291 MI != GlobalClasses.member_end(); ++
MI) {
2294 else if (
MI->is<GlobalTypeMember *>())
2295 Globals.push_back(
MI->get<GlobalTypeMember *>());
2297 ICallBranchFunnels.push_back(
MI->get<ICallBranchFunnel *>());
2303 return TypeIdInfo[
M1].UniqueId < TypeIdInfo[M2].UniqueId;
2308 [&](ICallBranchFunnel *F1, ICallBranchFunnel *F2) {
2309 return F1->UniqueId < F2->UniqueId;
2313 buildBitSetsFromDisjointSet(TypeIds, Globals, ICallBranchFunnels);
2316 allocateByteArrays();
2320 if (ExportSummary) {
2321 if (
NamedMDNode *AliasesMD =
M.getNamedMetadata(
"aliases")) {
2322 for (
auto *AliasMD : AliasesMD->operands()) {
2323 assert(AliasMD->getNumOperands() >= 4);
2325 cast<MDString>(AliasMD->getOperand(0))->getString();
2326 StringRef Aliasee = cast<MDString>(AliasMD->getOperand(1))->getString();
2328 if (!ExportedFunctions.count(Aliasee) ||
2330 !
M.getNamedAlias(Aliasee))
2335 cast<ConstantAsMetadata>(AliasMD->getOperand(2))
2337 ->getUniqueInteger()
2340 static_cast<bool>(cast<ConstantAsMetadata>(AliasMD->getOperand(3))
2342 ->getUniqueInteger()
2350 if (
auto *
F =
M.getFunction(AliasName)) {
2352 F->replaceAllUsesWith(Alias);
2353 F->eraseFromParent();
2362 if (ExportSummary) {
2363 if (
NamedMDNode *SymversMD =
M.getNamedMetadata(
"symvers")) {
2364 for (
auto *Symver : SymversMD->operands()) {
2365 assert(Symver->getNumOperands() >= 2);
2367 cast<MDString>(Symver->getOperand(0))->getString();
2368 StringRef Alias = cast<MDString>(Symver->getOperand(1))->getString();
2370 if (!ExportedFunctions.count(SymbolName))
2373 M.appendModuleInlineAsm(
2374 (
llvm::Twine(
".symver ") + SymbolName +
", " + Alias).str());
2386 Changed = LowerTypeTestsModule::runForTesting(M, AM);
2389 LowerTypeTestsModule(M, AM, ExportSummary, ImportSummary, DropTypeTests)
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
amdgpu Simplify well known AMD library false FunctionCallee Value * Arg
amdgpu AMDGPU Register Bank Select
This file implements a class to represent arbitrary precision integral constant values and operations...
This file defines the BumpPtrAllocator interface.
This file contains the simple types necessary to represent the attributes associated with functions a...
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
Analysis containing CSE Info
#define clEnumValN(ENUMVAL, FLAGNAME, DESC)
This file contains the declarations for the subclasses of Constant, which represent the different fla...
This file defines the DenseMap class.
Generic implementation of equivalence classes through the use Tarjan's efficient union-find algorithm...
static const unsigned kARMJumpTableEntrySize
static bool isKnownTypeIdMember(Metadata *TypeId, const DataLayout &DL, Value *V, uint64_t COffset)
static const unsigned kX86IBTJumpTableEntrySize
static cl::opt< std::string > ClReadSummary("lowertypetests-read-summary", cl::desc("Read summary from given YAML file before running pass"), cl::Hidden)
static const unsigned kRISCVJumpTableEntrySize
static Value * createMaskedBitTest(IRBuilder<> &B, Value *Bits, Value *BitOffset)
Build a test that bit BitOffset mod sizeof(Bits)*8 is set in Bits.
static bool isThumbFunction(Function *F, Triple::ArchType ModuleArch)
static const unsigned kX86JumpTableEntrySize
static cl::opt< bool > AvoidReuse("lowertypetests-avoid-reuse", cl::desc("Try to avoid reuse of byte array addresses using aliases"), cl::Hidden, cl::init(true))
static void dropTypeTests(Module &M, Function &TypeTestFunc)
static cl::opt< PassSummaryAction > ClSummaryAction("lowertypetests-summary-action", cl::desc("What to do with the summary when running this pass"), cl::values(clEnumValN(PassSummaryAction::None, "none", "Do nothing"), clEnumValN(PassSummaryAction::Import, "import", "Import typeid resolutions from summary and globals"), clEnumValN(PassSummaryAction::Export, "export", "Export typeid resolutions to summary and globals")), cl::Hidden)
static const unsigned kARMBTIJumpTableEntrySize
static cl::opt< bool > ClDropTypeTests("lowertypetests-drop-type-tests", cl::desc("Simply drop type test assume sequences"), cl::Hidden, cl::init(false))
static cl::opt< std::string > ClWriteSummary("lowertypetests-write-summary", cl::desc("Write summary to given YAML file after running pass"), cl::Hidden)
static bool isDirectCall(Use &U)
static const unsigned kARMv6MJumpTableEntrySize
ModuleSummaryIndex.h This file contains the declarations the classes that hold the module index and s...
Module.h This file contains the declarations for the Module class.
FunctionAnalysisManager FAM
This header defines various interfaces for pass management in LLVM.
This file defines the PointerUnion class, which is a discriminated union of pointer types.
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file implements a set that has insertion order iteration characteristics.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
This header defines support for implementing classes that have some trailing object (or arrays of obj...
This defines the Use class.
Class for arbitrary precision integers.
uint64_t getZExtValue() const
Get zero extended value.
A container for analyses that lazily runs them and caches their results.
PassT::Result & getResult(IRUnitT &IR, ExtraArgTs... ExtraArgs)
Get the result of an analysis pass for a given IR unit.
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
size_t size() const
size - Get the array size.
bool empty() const
empty - Check if the array is empty.
static ArrayType * get(Type *ElementType, uint64_t NumElements)
This static method is the primary way to construct an ArrayType.
StringRef getValueAsString() const
Return the attribute's value as a string.
bool isValid() const
Return true if the attribute is any kind of attribute.
LLVM Basic Block Representation.
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
BasicBlock * splitBasicBlock(iterator I, const Twine &BBName="", bool Before=false)
Split the basic block into two basic blocks at the specified instruction.
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
Conditional or Unconditional Branch instruction.
static BranchInst * Create(BasicBlock *IfTrue, Instruction *InsertBefore=nullptr)
Allocate memory in an ever growing pool, as if by bump-pointer.
LLVM_ATTRIBUTE_RETURNS_NONNULL void * Allocate(size_t Size, Align Alignment)
Allocate space at the specified alignment.
Value * getArgOperand(unsigned i) const
unsigned arg_size() const
This class represents a function call, abstracting a target machine's calling convention.
static ConstantAggregateZero * get(Type *Ty)
static Constant * get(LLVMContext &Context, ArrayRef< ElementTy > Elts)
get() constructor - Return a constant with array type with an element count and element type matching...
static Constant * getIntToPtr(Constant *C, Type *Ty, bool OnlyIfReduced=false)
static Constant * getInBoundsGetElementPtr(Type *Ty, Constant *C, ArrayRef< Constant * > IdxList)
Create an "inbounds" getelementptr.
static Constant * getPointerCast(Constant *C, Type *Ty)
Create a BitCast, AddrSpaceCast, or a PtrToInt cast constant expression.
static Constant * getSub(Constant *C1, Constant *C2, bool HasNUW=false, bool HasNSW=false)
static Constant * getZExt(Constant *C, Type *Ty, bool OnlyIfReduced=false)
static Constant * getPtrToInt(Constant *C, Type *Ty, bool OnlyIfReduced=false)
static Constant * getGetElementPtr(Type *Ty, Constant *C, ArrayRef< Constant * > IdxList, bool InBounds=false, std::optional< unsigned > InRangeIndex=std::nullopt, Type *OnlyIfReducedTy=nullptr)
Getelementptr form.
static Constant * getBitCast(Constant *C, Type *Ty, bool OnlyIfReduced=false)
static ConstantInt * getTrue(LLVMContext &Context)
static Constant * get(Type *Ty, uint64_t V, bool IsSigned=false)
If Ty is a vector type, return a Constant with a splat of the given value.
static ConstantInt * getFalse(LLVMContext &Context)
static ConstantPointerNull * get(PointerType *T)
Static factory methods - Return objects of the specified value.
static Constant * getAnon(ArrayRef< Constant * > V, bool Packed=false)
Return an anonymous struct that has the specified elements.
This is an important base class in LLVM.
static Constant * getNullValue(Type *Ty)
Constructor to create a '0' constant of arbitrary type.
A parsed version of the target data layout string in and methods for querying it.
iterator find(const_arg_type_t< KeyT > Val)
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
Implements a dense probed hash-table based set.
EquivalenceClasses - This represents a collection of equivalence classes and supports three efficient...
Helper for check-and-exit error handling.
static FunctionType * get(Type *Result, ArrayRef< Type * > Params, bool isVarArg)
This static method is the primary way of constructing a FunctionType.
static Function * Create(FunctionType *Ty, LinkageTypes Linkage, unsigned AddrSpace, const Twine &N="", Module *M=nullptr)
void eraseFromParent()
eraseFromParent - This method unlinks 'this' from the containing module and deletes it.
void eraseFromParent()
eraseFromParent - This method unlinks 'this' from the containing module and deletes it.
static GlobalAlias * create(Type *Ty, unsigned AddressSpace, LinkageTypes Linkage, const Twine &Name, Constant *Aliasee, Module *Parent)
If a parent module is specified, the alias is automatically inserted into the end of the specified mo...
MaybeAlign getAlign() const
Returns the alignment of the given variable or function.
void setMetadata(unsigned KindID, MDNode *Node)
Set a particular kind of metadata attachment.
MDNode * getMetadata(unsigned KindID) const
Get the current metadata attachments for the given kind, if any.
bool eraseMetadata(unsigned KindID)
Erase all metadata attachments with the given kind.
bool hasSection() const
Check if this global has a custom object file section.
bool isThreadLocal() const
If the value is "Thread Local", its value isn't shared by the threads.
VisibilityTypes getVisibility() const
static bool isLocalLinkage(LinkageTypes Linkage)
LinkageTypes getLinkage() const
static StringRef dropLLVMManglingEscape(StringRef Name)
If the given string begins with the GlobalValue name mangling escape character '\1',...
void setLinkage(LinkageTypes LT)
bool isDeclarationForLinker() const
GUID getGUID() const
Return a 64-bit global unique ID constructed from global value name (i.e.
Module * getParent()
Get the module that this global value is contained inside of...
PointerType * getType() const
Global values are always pointers.
VisibilityTypes
An enumeration for the kinds of visibility of global values.
@ HiddenVisibility
The GV is hidden.
void setVisibility(VisibilityTypes V)
LinkageTypes
An enumeration for the kinds of linkage for global values.
@ PrivateLinkage
Like Internal, but omit from symbol table.
@ InternalLinkage
Rename collisions when linking (static functions).
@ ExternalLinkage
Externally visible function.
@ WeakAnyLinkage
Keep one copy of named function when linking (weak)
@ ExternalWeakLinkage
ExternalWeak linkage description.
Type * getValueType() const
const Constant * getInitializer() const
getInitializer - Return the initializer for this global variable.
void setInitializer(Constant *InitVal)
setInitializer - Sets the initializer for this global variable, removing any existing initializer if ...
void setConstant(bool Val)
void eraseFromParent()
eraseFromParent - This method unlinks 'this' from the containing module and deletes it.
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
static InlineAsm * get(FunctionType *Ty, StringRef AsmString, StringRef Constraints, bool hasSideEffects, bool isAlignStack=false, AsmDialect asmDialect=AD_ATT, bool canThrow=false)
InlineAsm::get - Return the specified uniqued inline asm string.
An analysis over an "outer" IR unit that provides access to an analysis manager over an "inner" IR un...
const BasicBlock * getParent() const
void setMetadata(unsigned KindID, MDNode *Node)
Set the metadata of the specified kind to the specified node.
SymbolTableList< Instruction >::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
Class to represent integer types.
unsigned getBitWidth() const
Get the number of bits in this IntegerType.
PreservedAnalyses run(Module &M, ModuleAnalysisManager &AM)
const MDOperand & getOperand(unsigned I) const
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
unsigned getNumOperands() const
Return number of MDNode operands.
static ErrorOr< std::unique_ptr< MemoryBuffer > > getFile(const Twine &Filename, bool IsText=false, bool RequiresNullTerminator=true, bool IsVolatile=false, std::optional< Align > Alignment=std::nullopt)
Open the specified file as a MemoryBuffer, returning a new MemoryBuffer if successful,...
Class to hold module path string table and global value map, and encapsulate methods for operating on...
std::set< std::string > & cfiFunctionDecls()
TypeIdSummary & getOrInsertTypeIdSummary(StringRef TypeId)
Return an existing or new TypeIdSummary entry for TypeId.
const TypeIdSummary * getTypeIdSummary(StringRef TypeId) const
This returns either a pointer to the type id summary (if present in the summary map) or null (if not ...
bool partiallySplitLTOUnits() const
std::set< std::string > & cfiFunctionDefs()
A Module instance is used to store all the information related to an LLVM module.
Metadata * getModuleFlag(StringRef Key) const
Return the corresponding value if Key appears in module flags, otherwise return null.
iterator_range< op_iterator > operands()
unsigned getAddressSpace() const
Return the address space of the Pointer type.
A discriminated union of two or more pointer types, with the discriminator in the low bit of the poin...
A set of analyses that are preserved following a run of a transformation pass.
static PreservedAnalyses none()
Convenience factory function for the empty preserved set.
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
static ReturnInst * Create(LLVMContext &C, Value *retVal=nullptr, Instruction *InsertBefore=nullptr)
bool insert(const value_type &X)
Insert a new element into the SetVector.
A SetVector that performs no allocations if smaller than a certain size.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
void reserve(size_type N)
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
StringRef - Represent a constant reference to a string, i.e.
std::pair< StringRef, StringRef > split(char Separator) const
Split into two substrings around the first occurrence of a separator character.
Class to represent struct types.
Type * getElementType(unsigned N) const
Analysis pass providing the TargetTransformInfo.
See the file comment for details on the usage of the TrailingObjects type.
Triple - Helper class for working with autoconf configuration names.
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
The instances of the Type class are immutable: once they are created, they are never changed.
static IntegerType * getInt1Ty(LLVMContext &C)
static Type * getVoidTy(LLVMContext &C)
static IntegerType * getInt8Ty(LLVMContext &C)
static PointerType * getInt8PtrTy(LLVMContext &C, unsigned AS=0)
static IntegerType * getInt32Ty(LLVMContext &C)
static IntegerType * getInt64Ty(LLVMContext &C)
A Use represents the edge between a Value definition and its users.
Value * getOperand(unsigned i) const
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
user_iterator user_begin()
void setName(const Twine &Name)
Change the name of the value.
bool hasOneUse() const
Return true if there is exactly one use of this value.
void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
iterator_range< user_iterator > users()
void replaceUsesWithIf(Value *New, llvm::function_ref< bool(Use &U)> ShouldReplace)
Go through the uses list for this definition and make each use point to "V" if the callback ShouldRep...
iterator_range< use_iterator > uses()
StringRef getName() const
Return a constant reference to the value's name.
void takeName(Value *V)
Transfer the name from V to this value.
std::pair< iterator, bool > insert(const ValueT &V)
size_type count(const_arg_type_t< ValueT > V) const
Return 1 if the specified key is in the set, 0 otherwise.
self_iterator getIterator()
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
A raw_ostream that writes to a file descriptor.
This class implements an extremely fast bulk output stream that can only output to a stream.
A raw_ostream that writes to an std::string.
constexpr char SymbolName[]
Key for Kernel::Metadata::mSymbolName.
constexpr std::underlying_type_t< E > Mask()
Get a bitmask with 1s in all places up to the high-order bit of E's largest value.
@ C
The default llvm calling convention, compatible with C.
StringRef getName(ID id)
Return the LLVM name for an intrinsic, such as "llvm.ppc.altivec.lvx".
ValuesClass values(OptsTy... Options)
Helper to build a ValuesClass by forwarding a variable number of arguments as an initializer list to ...
initializer< Ty > init(const Ty &Val)
Linkage
Describes symbol linkage. This can be used to resolve definition clashes.
bool isJumpTableCanonical(Function *F)
@ OF_TextWithCRLF
The file should be opened in text mode and use a carriage linefeed '\r '.
This is an optimization pass for GlobalISel generic memory operations.
void ReplaceInstWithInst(BasicBlock *BB, BasicBlock::iterator &BI, Instruction *I)
Replace the instruction specified by BI with the instruction specified by I.
void stable_sort(R &&Range)
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly.
bool convertUsersOfConstantsToInstructions(ArrayRef< Constant * > Consts)
Replace constant expressions users of the given constants with instructions.
void append_range(Container &C, Range &&R)
Wrapper function to append a range to a container.
Value * GetPointerBaseWithConstantOffset(Value *Ptr, int64_t &Offset, const DataLayout &DL, bool AllowNonInbounds=true)
Analyze the specified pointer to see if it can be expressed as a base pointer plus a constant offset.
iterator_range< early_inc_iterator_impl< detail::IterOfRange< RangeT > > > make_early_inc_range(RangeT &&Range)
Make a range that does early increment to allow mutation of the underlying range without disrupting i...
int countr_zero(T Val)
Count number of 0's from the least significant bit to the most stopping at the first 1.
unsigned M1(unsigned Val)
void sort(IteratorTy Start, IteratorTy End)
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
void report_fatal_error(Error Err, bool gen_crash_diag=true)
Report a serious error, calling any installed error handler.
@ Ref
The access may reference the value stored in memory.
void appendToCompilerUsed(Module &M, ArrayRef< GlobalValue * > Values)
Adds global values to the llvm.compiler.used list.
uint64_t alignTo(uint64_t Size, Align A)
Returns a multiple of A needed to store Size bytes.
Expected< T > errorOrToExpected(ErrorOr< T > &&EO)
Convert an ErrorOr<T> to an Expected<T>.
constexpr unsigned BitWidth
void appendToGlobalCtors(Module &M, Function *F, int Priority, Constant *Data=nullptr)
Append F to the list of global ctors of module M with the given Priority.
Error errorCodeToError(std::error_code EC)
Helper for converting an std::error_code to a Error.
Instruction * SplitBlockAndInsertIfThen(Value *Cond, Instruction *SplitBefore, bool Unreachable, MDNode *BranchWeights, DominatorTree *DT, LoopInfo *LI=nullptr, BasicBlock *ThenBlock=nullptr)
Split the containing block at the specified instruction - everything before SplitBefore stays in the ...
void appendToUsed(Module &M, ArrayRef< GlobalValue * > Values)
Adds global values to the llvm.used list.
CfiFunctionLinkage
The type of CFI jumptable needed for a function.
constexpr uint64_t NextPowerOf2(uint64_t A)
Returns the next power of two (in 64-bits) that is strictly greater than A.
GlobalVariable * collectUsedGlobalVariables(const Module &M, SmallVectorImpl< GlobalValue * > &Vec, bool CompilerUsed)
Given "llvm.used" or "llvm.compiler.used" as a global name, collect the initializer elements of that ...
This struct is a compact representation of a valid (non-zero power of two) alignment.
Kind
Specifies which kind of type check we should emit for this byte array.
@ Unknown
Unknown (analysis not performed, don't lower)
@ Single
Single element (last example in "Short Inline Bit Vectors")
@ Inline
Inlined bit vector ("Short Inline Bit Vectors")
@ Unsat
Unsatisfiable type (i.e. no global has this type metadata)
@ AllOnes
All-ones bit vector ("Eliminating Bit Vector Checks for All-Ones Bit Vectors")
@ ByteArray
Test a byte array (first example)
unsigned SizeM1BitWidth
Range of size-1 expressed as a bit width.
enum llvm::TypeTestResolution::Kind TheKind
Function object to check whether the second component of a container supported by std::get (like std:...
SmallVector< uint64_t, 16 > Offsets
void addOffset(uint64_t Offset)
bool containsGlobalOffset(uint64_t Offset) const
void print(raw_ostream &OS) const
std::set< uint64_t > Bits
This class is used to build a byte array containing overlapping bit sets.
uint64_t BitAllocs[BitsPerByte]
The number of bytes allocated so far for each of the bits.
std::vector< uint8_t > Bytes
The byte array built so far.
void allocate(const std::set< uint64_t > &Bits, uint64_t BitSize, uint64_t &AllocByteOffset, uint8_t &AllocMask)
Allocate BitSize bits in the byte array where Bits contains the bits to set.
This class implements a layout algorithm for globals referenced by bit sets that tries to keep member...
std::vector< std::vector< uint64_t > > Fragments
The computed layout.
void addFragment(const std::set< uint64_t > &F)
Add F to the layout while trying to keep its indices contiguous.
std::vector< uint64_t > FragmentMap
Mapping from object index to fragment index.