81#include <system_error>
86using namespace lowertypetests;
88#define DEBUG_TYPE "lowertypetests"
90STATISTIC(ByteArraySizeBits,
"Byte array size in bits");
91STATISTIC(ByteArraySizeBytes,
"Byte array size in bytes");
92STATISTIC(NumByteArraysCreated,
"Number of byte arrays created");
93STATISTIC(NumTypeTestCallsLowered,
"Number of type test calls lowered");
94STATISTIC(NumTypeIdDisjointSets,
"Number of disjoint sets of type identifiers");
97 "lowertypetests-avoid-reuse",
98 cl::desc(
"Try to avoid reuse of byte array addresses using aliases"),
102 "lowertypetests-summary-action",
103 cl::desc(
"What to do with the summary when running this pass"),
105 clEnumValN(PassSummaryAction::Import,
"import",
106 "Import typeid resolutions from summary and globals"),
107 clEnumValN(PassSummaryAction::Export,
"export",
108 "Export typeid resolutions to summary and globals")),
112 "lowertypetests-read-summary",
113 cl::desc(
"Read summary from given YAML file before running pass"),
117 "lowertypetests-write-summary",
118 cl::desc(
"Write summary to given YAML file after running pass"),
123 cl::desc(
"Simply drop type test sequences"),
125 "Do not drop any type tests"),
127 "Drop type test assume sequences"),
129 "Drop all type test sequences")),
143 return Bits.count(BitOffset);
197 std::vector<uint64_t> &Fragment =
Fragments.back();
200 for (
auto ObjIndex :
F) {
202 if (OldFragmentIndex == 0) {
205 Fragment.push_back(ObjIndex);
212 std::vector<uint64_t> &OldFragment =
Fragments[OldFragmentIndex];
235 unsigned ReqSize = AllocByteOffset + BitSize;
237 if (
Bytes.size() < ReqSize)
238 Bytes.resize(ReqSize);
241 AllocMask = 1 << Bit;
243 Bytes[AllocByteOffset +
B] |= AllocMask;
247 if (
F->isDeclarationForLinker())
249 auto *CI = mdconst::extract_or_null<ConstantInt>(
250 F->getParent()->getModuleFlag(
"CFI Canonical Jump Tables"));
251 if (!CI || !CI->isZero())
253 return F->hasFnAttribute(
"cfi-canonical-jump-table");
258struct ByteArrayInfo {
259 std::set<uint64_t> Bits;
271class GlobalTypeMember final :
TrailingObjects<GlobalTypeMember, MDNode *> {
282 bool IsJumpTableCanonical;
288 size_t numTrailingObjects(OverloadToken<MDNode *>)
const {
return NTypes; }
292 bool IsJumpTableCanonical,
bool IsExported,
294 auto *GTM =
static_cast<GlobalTypeMember *
>(Alloc.
Allocate(
295 totalSizeToAlloc<MDNode *>(
Types.size()),
alignof(GlobalTypeMember)));
297 GTM->NTypes =
Types.size();
298 GTM->IsJumpTableCanonical = IsJumpTableCanonical;
299 GTM->IsExported = IsExported;
300 std::uninitialized_copy(
Types.begin(),
Types.end(),
301 GTM->getTrailingObjects<
MDNode *>());
310 return IsJumpTableCanonical;
313 bool isExported()
const {
318 return ArrayRef(getTrailingObjects<MDNode *>(), NTypes);
322struct ICallBranchFunnel final
327 auto *
Call =
static_cast<ICallBranchFunnel *
>(
328 Alloc.
Allocate(totalSizeToAlloc<GlobalTypeMember *>(Targets.
size()),
329 alignof(ICallBranchFunnel)));
331 Call->UniqueId = UniqueId;
333 std::uninitialized_copy(Targets.
begin(), Targets.
end(),
334 Call->getTrailingObjects<GlobalTypeMember *>());
340 return ArrayRef(getTrailingObjects<GlobalTypeMember *>(), NTargets);
349struct ScopedSaveAliaseesAndUsed {
352 std::vector<std::pair<GlobalAlias *, Function *>> FunctionAliases;
353 std::vector<std::pair<GlobalIFunc *, Function *>> ResolverIFuncs;
355 ScopedSaveAliaseesAndUsed(
Module &M) :
M(
M) {
369 GV->eraseFromParent();
371 GV->eraseFromParent();
373 for (
auto &GA :
M.aliases()) {
376 if (
auto *
F = dyn_cast<Function>(GA.getAliasee()->stripPointerCasts()))
377 FunctionAliases.push_back({&GA,
F});
380 for (
auto &GI :
M.ifuncs())
381 if (
auto *
F = dyn_cast<Function>(GI.getResolver()->stripPointerCasts()))
382 ResolverIFuncs.push_back({&GI,
F});
385 ~ScopedSaveAliaseesAndUsed() {
389 for (
auto P : FunctionAliases)
390 P.first->setAliasee(
P.second);
392 for (
auto P : ResolverIFuncs) {
396 P.first->setResolver(
P.second);
401class LowerTypeTestsModule {
417 bool CanUseArmJumpTable =
false, CanUseThumbBWJumpTable =
false;
420 int HasBranchTargetEnforcement = -1;
428 PointerType *Int8PtrTy = PointerType::getUnqual(
M.getContext());
431 PointerType *Int32PtrTy = PointerType::getUnqual(
M.getContext());
433 IntegerType *IntPtrTy =
M.getDataLayout().getIntPtrType(
M.getContext(), 0);
441 struct TypeIdUserInfo {
442 std::vector<CallInst *> CallSites;
443 bool IsExported =
false;
451 struct TypeIdLowering {
475 std::vector<ByteArrayInfo> ByteArrayInfos;
477 Function *WeakInitializerFn =
nullptr;
482 bool shouldExportConstantsAsAbsoluteSymbols();
484 TypeIdLowering importTypeId(
StringRef TypeId);
487 std::vector<GlobalAlias *> &AliasesToErase);
492 ByteArrayInfo *createByteArray(
BitSetInfo &BSI);
493 void allocateByteArrays();
496 void lowerTypeTestCalls(
500 const TypeIdLowering &TIL);
506 bool hasBranchTargetEnforcement();
507 unsigned getJumpTableEntrySize();
508 Type *getJumpTableEntryType();
525 bool IsJumpTableCanonical);
527 void findGlobalVariableUsersOf(
Constant *
C,
537 void replaceCfiUses(
Function *Old,
Value *New,
bool IsJumpTableCanonical);
541 void replaceDirectCalls(
Value *Old,
Value *New);
543 bool isFunctionAnnotation(
Value *V)
const {
544 return FunctionAnnotations.
contains(V);
570 for (
const auto &GlobalAndOffset : GlobalLayout) {
571 for (
MDNode *
Type : GlobalAndOffset.first->types()) {
572 if (
Type->getOperand(1) != TypeId)
576 cast<ConstantAsMetadata>(
Type->getOperand(0))->getValue())
589 auto BitsType = cast<IntegerType>(Bits->getType());
590 unsigned BitWidth = BitsType->getBitWidth();
592 BitOffset =
B.CreateZExtOrTrunc(BitOffset, BitsType);
594 B.CreateAnd(BitOffset, ConstantInt::get(BitsType,
BitWidth - 1));
595 Value *BitMask =
B.CreateShl(ConstantInt::get(BitsType, 1), BitIndex);
596 Value *MaskedBits =
B.CreateAnd(Bits, BitMask);
597 return B.CreateICmpNE(MaskedBits, ConstantInt::get(BitsType, 0));
600ByteArrayInfo *LowerTypeTestsModule::createByteArray(
BitSetInfo &BSI) {
609 ByteArrayInfos.emplace_back();
610 ByteArrayInfo *BAI = &ByteArrayInfos.back();
612 BAI->Bits = BSI.
Bits;
614 BAI->ByteArray = ByteArrayGlobal;
615 BAI->MaskGlobal = MaskGlobal;
619void LowerTypeTestsModule::allocateByteArrays() {
621 [](
const ByteArrayInfo &BAI1,
const ByteArrayInfo &BAI2) {
622 return BAI1.BitSize > BAI2.BitSize;
625 std::vector<uint64_t> ByteArrayOffsets(ByteArrayInfos.size());
628 for (
unsigned I = 0;
I != ByteArrayInfos.size(); ++
I) {
629 ByteArrayInfo *BAI = &ByteArrayInfos[
I];
632 BAB.
allocate(BAI->Bits, BAI->BitSize, ByteArrayOffsets[
I], Mask);
634 BAI->MaskGlobal->replaceAllUsesWith(
636 BAI->MaskGlobal->eraseFromParent();
638 *BAI->MaskPtr =
Mask;
646 for (
unsigned I = 0;
I != ByteArrayInfos.size(); ++
I) {
647 ByteArrayInfo *BAI = &ByteArrayInfos[
I];
649 Constant *Idxs[] = {ConstantInt::get(IntPtrTy, 0),
650 ConstantInt::get(IntPtrTy, ByteArrayOffsets[
I])};
652 ByteArrayConst->
getType(), ByteArray, Idxs);
659 BAI->ByteArray->replaceAllUsesWith(Alias);
660 BAI->ByteArray->eraseFromParent();
666 ByteArraySizeBytes = BAB.
Bytes.size();
672 const TypeIdLowering &TIL,
679 Constant *ByteArray = TIL.TheByteArray;
686 "bits_use", ByteArray, &M);
689 Value *ByteAddr =
B.CreateGEP(Int8Ty, ByteArray, BitOffset);
694 return B.CreateICmpNE(ByteAndMask, ConstantInt::get(Int8Ty, 0));
700 if (
auto GV = dyn_cast<GlobalObject>(V)) {
702 GV->getMetadata(LLVMContext::MD_type, Types);
704 if (
Type->getOperand(1) != TypeId)
708 cast<ConstantAsMetadata>(
Type->getOperand(0))->getValue())
716 if (
auto GEP = dyn_cast<GEPOperator>(V)) {
717 APInt APOffset(
DL.getIndexSizeInBits(0), 0);
718 bool Result =
GEP->accumulateConstantOffset(
DL, APOffset);
725 if (
auto Op = dyn_cast<Operator>(V)) {
726 if (
Op->getOpcode() == Instruction::BitCast)
729 if (
Op->getOpcode() == Instruction::Select)
740 const TypeIdLowering &TIL) {
756 Value *PtrAsInt =
B.CreatePtrToInt(
Ptr, IntPtrTy);
761 return B.CreateICmpEQ(PtrAsInt, OffsetedGlobalAsInt);
763 Value *PtrOffset =
B.CreateSub(PtrAsInt, OffsetedGlobalAsInt);
774 B.CreateLShr(PtrOffset,
B.CreateZExt(TIL.AlignLog2, IntPtrTy));
775 Value *OffsetSHL =
B.CreateShl(
776 PtrOffset,
B.CreateZExt(
778 ConstantInt::get(Int8Ty,
DL.getPointerSizeInBits(0)),
781 Value *BitOffset =
B.CreateOr(OffsetSHR, OffsetSHL);
783 Value *OffsetInRange =
B.CreateICmpULE(BitOffset, TIL.SizeM1);
787 return OffsetInRange;
794 if (
auto *Br = dyn_cast<BranchInst>(*CI->
user_begin()))
800 Br->getMetadata(LLVMContext::MD_prof));
804 for (
auto &Phi :
Else->phis())
805 Phi.addIncoming(
Phi.getIncomingValueForBlock(Then), InitialBB);
808 return createBitSetTest(ThenB, TIL, BitOffset);
815 Value *
Bit = createBitSetTest(ThenB, TIL, BitOffset);
820 B.SetInsertPoint(CI);
822 P->addIncoming(ConstantInt::get(Int1Ty, 0), InitialBB);
823 P->addIncoming(Bit, ThenB.GetInsertBlock());
829void LowerTypeTestsModule::buildBitSetsFromGlobalVariables(
836 std::vector<Constant *> GlobalInits;
842 for (GlobalTypeMember *
G : Globals) {
843 auto *GV = cast<GlobalVariable>(
G->getGlobal());
845 DL.getValueOrABITypeAlignment(GV->getAlign(), GV->getValueType());
846 MaxAlign = std::max(MaxAlign, Alignment);
848 GlobalLayout[
G] = GVOffset;
851 GlobalInits.push_back(
855 GlobalInits.push_back(GV->getInitializer());
856 uint64_t InitSize =
DL.getTypeAllocSize(GV->getValueType());
857 CurOffset = GVOffset + InitSize;
866 if (DesiredPadding > 32)
867 DesiredPadding =
alignTo(InitSize, 32) - InitSize;
871 auto *CombinedGlobal =
874 CombinedGlobal->setAlignment(MaxAlign);
877 lowerTypeTestCalls(TypeIds, CombinedGlobal, GlobalLayout);
882 for (
unsigned I = 0;
I != Globals.size(); ++
I) {
886 Constant *CombinedGlobalIdxs[] = {ConstantInt::get(Int32Ty, 0),
887 ConstantInt::get(Int32Ty,
I * 2)};
889 NewInit->
getType(), CombinedGlobal, CombinedGlobalIdxs);
893 "", CombinedGlobalElemPtr, &M);
901bool LowerTypeTestsModule::shouldExportConstantsAsAbsoluteSymbols() {
915 const TypeIdLowering &TIL) {
923 "__typeid_" + TypeId +
"_" +
Name,
C, &M);
928 if (shouldExportConstantsAsAbsoluteSymbols())
931 Storage = cast<ConstantInt>(
C)->getZExtValue();
935 ExportGlobal(
"global_addr", TIL.OffsetedGlobal);
940 ExportConstant(
"align", TTRes.
AlignLog2, TIL.AlignLog2);
941 ExportConstant(
"size_m1", TTRes.
SizeM1, TIL.SizeM1);
943 uint64_t BitSize = cast<ConstantInt>(TIL.SizeM1)->getZExtValue() + 1;
951 ExportGlobal(
"byte_array", TIL.TheByteArray);
952 if (shouldExportConstantsAsAbsoluteSymbols())
953 ExportGlobal(
"bit_mask", TIL.BitMask);
959 ExportConstant(
"inline_bits", TTRes.
InlineBits, TIL.InlineBits);
964LowerTypeTestsModule::TypeIdLowering
965LowerTypeTestsModule::importTypeId(
StringRef TypeId) {
977 Constant *
C =
M.getOrInsertGlobal((
"__typeid_" + TypeId +
"_" +
Name).str(),
979 if (
auto *GV = dyn_cast<GlobalVariable>(
C))
986 if (!shouldExportConstantsAsAbsoluteSymbols()) {
988 ConstantInt::get(isa<IntegerType>(Ty) ? Ty : Int64Ty, Const);
989 if (!isa<IntegerType>(Ty))
995 auto *GV = cast<GlobalVariable>(
C->stripPointerCasts());
996 if (isa<IntegerType>(Ty))
998 if (GV->
getMetadata(LLVMContext::MD_absolute_symbol))
1008 SetAbsRange(~0ull, ~0ull);
1010 SetAbsRange(0, 1ull << AbsWidth);
1015 TIL.OffsetedGlobal = ImportGlobal(
"global_addr");
1020 TIL.AlignLog2 = ImportConstant(
"align", TTRes.
AlignLog2, 8, Int8Ty);
1026 TIL.TheByteArray = ImportGlobal(
"byte_array");
1027 TIL.BitMask = ImportConstant(
"bit_mask", TTRes.
BitMask, 8, Int8PtrTy);
1031 TIL.InlineBits = ImportConstant(
1038void LowerTypeTestsModule::importTypeTest(
CallInst *CI) {
1039 auto TypeIdMDVal = dyn_cast<MetadataAsValue>(CI->
getArgOperand(1));
1043 auto TypeIdStr = dyn_cast<MDString>(TypeIdMDVal->getMetadata());
1050 TypeIdLowering TIL = importTypeId(TypeIdStr->getString());
1051 Value *Lowered = lowerTypeTestCall(TypeIdStr, CI, TIL);
1060void LowerTypeTestsModule::importFunction(
1062 std::vector<GlobalAlias *> &AliasesToErase) {
1063 assert(
F->getType()->getAddressSpace() == 0);
1066 std::string
Name = std::string(
F->getName());
1071 if (
F->isDSOLocal()) {
1074 F->getAddressSpace(),
1077 replaceDirectCalls(
F, RealF);
1087 F->getAddressSpace(),
Name +
".cfi_jt", &M);
1090 F->setName(
Name +
".cfi");
1093 F->getAddressSpace(),
Name, &M);
1100 for (
auto &U :
F->uses()) {
1101 if (
auto *
A = dyn_cast<GlobalAlias>(
U.getUser())) {
1104 F->getAddressSpace(),
"", &M);
1106 A->replaceAllUsesWith(AliasDecl);
1107 AliasesToErase.push_back(
A);
1112 if (
F->hasExternalWeakLinkage())
1119 F->setVisibility(Visibility);
1122void LowerTypeTestsModule::lowerTypeTestCalls(
1128 BitSetInfo BSI = buildBitSet(TypeId, GlobalLayout);
1130 if (
auto MDS = dyn_cast<MDString>(TypeId))
1131 dbgs() << MDS->getString() <<
": ";
1133 dbgs() <<
"<unnamed>: ";
1137 ByteArrayInfo *BAI =
nullptr;
1140 Int8Ty, CombinedGlobalAddr, ConstantInt::get(IntPtrTy, BSI.
ByteOffset)),
1141 TIL.AlignLog2 = ConstantInt::get(Int8Ty, BSI.
AlignLog2);
1142 TIL.SizeM1 = ConstantInt::get(IntPtrTy, BSI.
BitSize - 1);
1146 }
else if (BSI.
BitSize <= 64) {
1149 for (
auto Bit : BSI.
Bits)
1151 if (InlineBits == 0)
1154 TIL.InlineBits = ConstantInt::get(
1155 (BSI.
BitSize <= 32) ? Int32Ty : Int64Ty, InlineBits);
1158 ++NumByteArraysCreated;
1159 BAI = createByteArray(BSI);
1160 TIL.TheByteArray = BAI->ByteArray;
1161 TIL.BitMask = BAI->MaskGlobal;
1164 TypeIdUserInfo &TIUI = TypeIdUsers[TypeId];
1166 if (TIUI.IsExported) {
1167 uint8_t *MaskPtr = exportTypeId(cast<MDString>(TypeId)->getString(), TIL);
1169 BAI->MaskPtr = MaskPtr;
1173 for (
CallInst *CI : TIUI.CallSites) {
1174 ++NumTypeTestCallsLowered;
1175 Value *Lowered = lowerTypeTestCall(TypeId, CI, TIL);
1185 if (
Type->getNumOperands() != 2)
1190 if (isa<GlobalVariable>(GO) && GO->
hasSection())
1192 "A member of a type identifier may not have an explicit section");
1198 auto OffsetConstMD = dyn_cast<ConstantAsMetadata>(
Type->getOperand(0));
1201 auto OffsetInt = dyn_cast<ConstantInt>(OffsetConstMD->getValue());
1214bool LowerTypeTestsModule::hasBranchTargetEnforcement() {
1215 if (HasBranchTargetEnforcement == -1) {
1218 if (
const auto *BTE = mdconst::extract_or_null<ConstantInt>(
1219 M.getModuleFlag(
"branch-target-enforcement")))
1220 HasBranchTargetEnforcement = (BTE->getZExtValue() != 0);
1222 HasBranchTargetEnforcement = 0;
1224 return HasBranchTargetEnforcement;
1227unsigned LowerTypeTestsModule::getJumpTableEntrySize() {
1228 switch (JumpTableArch) {
1231 if (
const auto *MD = mdconst::extract_or_null<ConstantInt>(
1232 M.getModuleFlag(
"cf-protection-branch")))
1233 if (MD->getZExtValue())
1239 if (CanUseThumbBWJumpTable) {
1240 if (hasBranchTargetEnforcement())
1247 if (hasBranchTargetEnforcement())
1263void LowerTypeTestsModule::createJumpTableEntry(
1267 unsigned ArgIndex = AsmArgs.
size();
1271 if (
const auto *MD = mdconst::extract_or_null<ConstantInt>(
1273 Endbr = !MD->isZero();
1275 AsmOS << (JumpTableArch ==
Triple::x86 ?
"endbr32\n" :
"endbr64\n");
1276 AsmOS <<
"jmp ${" << ArgIndex <<
":c}@plt\n";
1278 AsmOS <<
".balign 16, 0xcc\n";
1280 AsmOS <<
"int3\nint3\nint3\n";
1282 AsmOS <<
"b $" << ArgIndex <<
"\n";
1284 if (hasBranchTargetEnforcement())
1286 AsmOS <<
"b $" << ArgIndex <<
"\n";
1288 if (!CanUseThumbBWJumpTable) {
1304 AsmOS <<
"push {r0,r1}\n"
1306 <<
"0: add r0, r0, pc\n"
1307 <<
"str r0, [sp, #4]\n"
1310 <<
"1: .word $" << ArgIndex <<
" - (0b + 4)\n";
1312 if (hasBranchTargetEnforcement())
1314 AsmOS <<
"b.w $" << ArgIndex <<
"\n";
1318 AsmOS <<
"tail $" << ArgIndex <<
"@plt\n";
1320 AsmOS <<
"pcalau12i $$t0, %pc_hi20($" << ArgIndex <<
")\n"
1321 <<
"jirl $$r0, $$t0, %pc_lo12($" << ArgIndex <<
")\n";
1326 ConstraintOS << (ArgIndex > 0 ?
",s" :
"s");
1330Type *LowerTypeTestsModule::getJumpTableEntryType() {
1336void LowerTypeTestsModule::buildBitSetsFromFunctions(
1342 buildBitSetsFromFunctionsNative(TypeIds, Functions);
1344 buildBitSetsFromFunctionsWASM(TypeIds, Functions);
1349void LowerTypeTestsModule::moveInitializerToModuleConstructor(
1351 if (WeakInitializerFn ==
nullptr) {
1356 M.getDataLayout().getProgramAddressSpace(),
1357 "__cfi_global_var_init", &M);
1361 WeakInitializerFn->setSection(
1363 ?
"__TEXT,__StaticInit,regular,pure_instructions"
1370 IRBuilder<> IRB(WeakInitializerFn->getEntryBlock().getTerminator());
1376void LowerTypeTestsModule::findGlobalVariableUsersOf(
1378 for (
auto *U :
C->users()){
1379 if (
auto *GV = dyn_cast<GlobalVariable>(U))
1381 else if (
auto *C2 = dyn_cast<Constant>(U))
1382 findGlobalVariableUsersOf(C2, Out);
1387void LowerTypeTestsModule::replaceWeakDeclarationWithJumpTablePtr(
1392 findGlobalVariableUsersOf(
F, GlobalVarUsers);
1393 for (
auto *GV : GlobalVarUsers) {
1394 if (GV == GlobalAnnotation)
1396 moveInitializerToModuleConstructor(GV);
1404 F->getAddressSpace(),
"", &M);
1405 replaceCfiUses(
F, PlaceholderFn, IsJumpTableCanonical);
1411 auto *InsertPt = dyn_cast<Instruction>(
U.getUser());
1412 assert(InsertPt &&
"Non-instruction users should have been eliminated");
1413 auto *PN = dyn_cast<PHINode>(InsertPt);
1415 InsertPt = PN->getIncomingBlock(U)->getTerminator();
1424 PN->setIncomingValueForBlock(InsertPt->getParent(),
Select);
1432 Attribute TFAttr =
F->getFnAttribute(
"target-features");
1437 if (Feature ==
"-thumb-mode")
1439 else if (Feature ==
"+thumb-mode")
1455 if (!CanUseThumbBWJumpTable && CanUseArmJumpTable) {
1463 unsigned ArmCount = 0, ThumbCount = 0;
1464 for (
const auto GTM : Functions) {
1465 if (!GTM->isJumpTableCanonical()) {
1472 Function *
F = cast<Function>(GTM->getGlobal());
1479void LowerTypeTestsModule::createJumpTable(
1481 std::string AsmStr, ConstraintStr;
1490 bool areAllEntriesNounwind =
true;
1491 for (GlobalTypeMember *GTM : Functions) {
1492 if (!llvm::cast<llvm::Function>(GTM->getGlobal())
1493 ->hasFnAttribute(llvm::Attribute::NoUnwind)) {
1494 areAllEntriesNounwind =
false;
1496 createJumpTableEntry(AsmOS, ConstraintOS, JumpTableArch, AsmArgs,
1497 cast<Function>(GTM->getGlobal()));
1501 F->setAlignment(
Align(getJumpTableEntrySize()));
1507 F->addFnAttr(Attribute::Naked);
1509 F->addFnAttr(
"target-features",
"-thumb-mode");
1511 if (hasBranchTargetEnforcement()) {
1514 F->addFnAttr(
"target-features",
"+thumb-mode,+pacbti");
1516 F->addFnAttr(
"target-features",
"+thumb-mode");
1517 if (CanUseThumbBWJumpTable) {
1520 F->addFnAttr(
"target-cpu",
"cortex-a8");
1528 if (
F->hasFnAttribute(
"branch-target-enforcement"))
1529 F->removeFnAttr(
"branch-target-enforcement");
1530 if (
F->hasFnAttribute(
"sign-return-address"))
1531 F->removeFnAttr(
"sign-return-address");
1536 F->addFnAttr(
"target-features",
"-c,-relax");
1542 F->addFnAttr(Attribute::NoCfCheck);
1545 if (areAllEntriesNounwind)
1546 F->addFnAttr(Attribute::NoUnwind);
1549 F->addFnAttr(Attribute::NoInline);
1556 for (
const auto &Arg : AsmArgs)
1560 AsmOS.str(), ConstraintOS.str(),
1563 IRB.CreateCall(JumpTableAsm, AsmArgs);
1564 IRB.CreateUnreachable();
1569void LowerTypeTestsModule::buildBitSetsFromFunctionsNative(
1651 JumpTableArch = selectJumpTableArmEncoding(Functions);
1655 unsigned EntrySize = getJumpTableEntrySize();
1656 for (
unsigned I = 0;
I != Functions.
size(); ++
I)
1657 GlobalLayout[Functions[
I]] =
I * EntrySize;
1663 M.getDataLayout().getProgramAddressSpace(),
1664 ".cfi.jumptable", &M);
1670 lowerTypeTestCalls(TypeIds, JumpTable, GlobalLayout);
1673 ScopedSaveAliaseesAndUsed S(M);
1677 for (
unsigned I = 0;
I != Functions.
size(); ++
I) {
1678 Function *
F = cast<Function>(Functions[
I]->getGlobal());
1679 bool IsJumpTableCanonical = Functions[
I]->isJumpTableCanonical();
1682 JumpTableType, JumpTable,
1684 ConstantInt::get(IntPtrTy,
I)});
1686 const bool IsExported = Functions[
I]->isExported();
1687 if (!IsJumpTableCanonical) {
1692 F->getName() +
".cfi_jt",
1693 CombinedGlobalElemPtr, &M);
1701 if (IsJumpTableCanonical)
1707 if (!IsJumpTableCanonical) {
1708 if (
F->hasExternalWeakLinkage())
1709 replaceWeakDeclarationWithJumpTablePtr(
F, CombinedGlobalElemPtr,
1710 IsJumpTableCanonical);
1712 replaceCfiUses(
F, CombinedGlobalElemPtr, IsJumpTableCanonical);
1714 assert(
F->getType()->getAddressSpace() == 0);
1718 CombinedGlobalElemPtr, &M);
1722 F->setName(FAlias->
getName() +
".cfi");
1723 replaceCfiUses(
F, FAlias, IsJumpTableCanonical);
1724 if (!
F->hasLocalLinkage())
1730 createJumpTable(JumpTableFn, Functions);
1739void LowerTypeTestsModule::buildBitSetsFromFunctionsWASM(
1746 for (GlobalTypeMember *GTM : Functions) {
1747 Function *
F = cast<Function>(GTM->getGlobal());
1750 if (!
F->hasAddressTaken())
1756 ConstantInt::get(Int64Ty, IndirectIndex))));
1757 F->setMetadata(
"wasm.index", MD);
1760 GlobalLayout[GTM] = IndirectIndex++;
1769void LowerTypeTestsModule::buildBitSetsFromDisjointSet(
1773 for (
unsigned I = 0;
I != TypeIds.
size(); ++
I)
1774 TypeIdIndices[TypeIds[
I]] =
I;
1778 std::vector<std::set<uint64_t>> TypeMembers(TypeIds.
size());
1779 unsigned GlobalIndex = 0;
1781 for (GlobalTypeMember *GTM : Globals) {
1784 auto I = TypeIdIndices.
find(
Type->getOperand(1));
1785 if (
I != TypeIdIndices.
end())
1786 TypeMembers[
I->second].insert(GlobalIndex);
1788 GlobalIndices[GTM] = GlobalIndex;
1792 for (ICallBranchFunnel *JT : ICallBranchFunnels) {
1793 TypeMembers.emplace_back();
1794 std::set<uint64_t> &TMSet = TypeMembers.back();
1795 for (GlobalTypeMember *
T :
JT->targets())
1796 TMSet.insert(GlobalIndices[
T]);
1802 const std::set<uint64_t> &O2) {
1803 return O1.size() < O2.size();
1810 for (
auto &&MemSet : TypeMembers)
1811 GLB.addFragment(MemSet);
1815 Globals.empty() || isa<GlobalVariable>(Globals[0]->getGlobal());
1816 std::vector<GlobalTypeMember *> OrderedGTMs(Globals.size());
1817 auto OGTMI = OrderedGTMs.begin();
1818 for (
auto &&
F : GLB.Fragments) {
1820 if (IsGlobalSet != isa<GlobalVariable>(Globals[
Offset]->getGlobal()))
1822 "variables and functions");
1823 *OGTMI++ = Globals[
Offset];
1829 buildBitSetsFromGlobalVariables(TypeIds, OrderedGTMs);
1831 buildBitSetsFromFunctions(TypeIds, OrderedGTMs);
1835LowerTypeTestsModule::LowerTypeTestsModule(
1838 :
M(
M), ExportSummary(ExportSummary), ImportSummary(ImportSummary),
1841 assert(!(ExportSummary && ImportSummary));
1842 Triple TargetTriple(
M.getTargetTriple());
1843 Arch = TargetTriple.getArch();
1845 CanUseArmJumpTable =
true;
1852 CanUseArmJumpTable =
true;
1854 CanUseThumbBWJumpTable =
true;
1857 OS = TargetTriple.getOS();
1858 ObjectFormat = TargetTriple.getObjectFormat();
1862 GlobalAnnotation =
M.getGlobalVariable(
"llvm.global.annotations");
1863 if (GlobalAnnotation && GlobalAnnotation->hasInitializer()) {
1865 cast<ConstantArray>(GlobalAnnotation->getInitializer());
1867 FunctionAnnotations.insert(
Op);
1882 yaml::Input
In(ReadSummaryFile->getBuffer());
1888 LowerTypeTestsModule(
1902 yaml::Output Out(
OS);
1910 auto *Usr = dyn_cast<CallInst>(U.getUser());
1912 auto *CB = dyn_cast<CallBase>(Usr);
1913 if (CB && CB->isCallee(&U))
1919void LowerTypeTestsModule::replaceCfiUses(
Function *Old,
Value *New,
1920 bool IsJumpTableCanonical) {
1925 if (isa<BlockAddress, NoCFIValue>(
U.getUser()))
1933 if (isFunctionAnnotation(
U.getUser()))
1938 if (
auto *
C = dyn_cast<Constant>(
U.getUser())) {
1939 if (!isa<GlobalValue>(
C)) {
1951 for (
auto *
C : Constants)
1952 C->handleOperandChange(Old, New);
1955void LowerTypeTestsModule::replaceDirectCalls(
Value *Old,
Value *New) {
1960 bool ShouldDropAll) {
1962 auto *CI = cast<CallInst>(U.getUser());
1965 if (
auto *
Assume = dyn_cast<AssumeInst>(CIU.getUser()))
1966 Assume->eraseFromParent();
1975 return isa<PHINode>(U);
1983bool LowerTypeTestsModule::lower() {
1987 if (DropTypeTests != DropTestKind::None) {
1988 bool ShouldDropAll = DropTypeTests == DropTestKind::All;
1996 if (PublicTypeTestFunc)
1998 if (TypeTestFunc || PublicTypeTestFunc) {
2019 if ((!TypeTestFunc || TypeTestFunc->
use_empty()) &&
2020 (!ICallBranchFunnelFunc || ICallBranchFunnelFunc->
use_empty()) &&
2021 !ExportSummary && !ImportSummary)
2024 if (ImportSummary) {
2027 importTypeTest(cast<CallInst>(
U.getUser()));
2029 if (ICallBranchFunnelFunc && !ICallBranchFunnelFunc->
use_empty())
2031 "unexpected call to llvm.icall.branch.funnel during import phase");
2038 if (
F.hasLocalLinkage())
2046 std::vector<GlobalAlias *> AliasesToErase;
2048 ScopedSaveAliaseesAndUsed S(M);
2049 for (
auto *
F : Defs)
2050 importFunction(
F,
true, AliasesToErase);
2051 for (
auto *
F : Decls)
2052 importFunction(
F,
false, AliasesToErase);
2065 GlobalClassesTy GlobalClasses;
2077 std::vector<GlobalTypeMember *> RefGlobals;
2080 unsigned CurUniqueId = 0;
2085 const bool CrossDsoCfi =
M.getModuleFlag(
"Cross-DSO CFI") !=
nullptr;
2087 struct ExportedFunctionInfo {
2092 if (ExportSummary) {
2093 NamedMDNode *CfiFunctionsMD =
M.getNamedMetadata(
"cfi.functions");
2094 if (CfiFunctionsMD) {
2097 for (
auto &
I : *ExportSummary)
2098 for (
auto &GVS :
I.second.SummaryList)
2100 for (
const auto &
Ref : GVS->refs()) {
2102 for (
auto &RefGVS :
Ref.getSummaryList())
2103 if (
auto Alias = dyn_cast<AliasSummary>(RefGVS.get()))
2104 AddressTaken.
insert(Alias->getAliaseeGUID());
2106 for (
auto *FuncMD : CfiFunctionsMD->
operands()) {
2107 assert(FuncMD->getNumOperands() >= 2);
2109 cast<MDString>(FuncMD->getOperand(0))->getString();
2111 cast<ConstantAsMetadata>(FuncMD->getOperand(1))
2113 ->getUniqueInteger()
2119 if (!ExportSummary->isGUIDLive(GUID))
2121 if (!AddressTaken.
count(GUID)) {
2126 if (
auto VI = ExportSummary->getValueInfo(GUID))
2127 for (
const auto &GVS :
VI.getSummaryList())
2134 auto P = ExportedFunctions.
insert({FunctionName, {
Linkage, FuncMD}});
2136 P.first->second = {
Linkage, FuncMD};
2139 for (
const auto &
P : ExportedFunctions) {
2142 MDNode *FuncMD =
P.second.FuncMD;
2144 if (
F &&
F->hasLocalLinkage()) {
2151 F->setName(
F->getName() +
".1");
2158 GlobalVariable::ExternalLinkage,
2159 M.getDataLayout().getProgramAddressSpace(), FunctionName, &M);
2166 if (
F->hasAvailableExternallyLinkage()) {
2169 F->setComdat(
nullptr);
2182 if (
F->isDeclaration()) {
2186 F->eraseMetadata(LLVMContext::MD_type);
2188 F->addMetadata(LLVMContext::MD_type,
2203 bool IsJumpTableCanonical =
false;
2204 bool IsExported =
false;
2205 if (
Function *
F = dyn_cast<Function>(&GO)) {
2207 if (ExportedFunctions.count(
F->getName())) {
2208 IsJumpTableCanonical |=
2215 }
else if (!
F->hasAddressTaken()) {
2216 if (!CrossDsoCfi || !IsJumpTableCanonical ||
F->hasLocalLinkage())
2221 auto *GTM = GlobalTypeMember::create(Alloc, &GO, IsJumpTableCanonical,
2223 GlobalTypeMembers[&GO] = GTM;
2225 verifyTypeMDNode(&GO,
Type);
2226 auto &
Info = TypeIdInfo[
Type->getOperand(1)];
2227 Info.UniqueId = ++CurUniqueId;
2228 Info.RefGlobals.push_back(GTM);
2232 auto AddTypeIdUse = [&](
Metadata *TypeId) -> TypeIdUserInfo & {
2237 auto Ins = TypeIdUsers.insert({TypeId, {}});
2240 GlobalClassesTy::iterator GCI = GlobalClasses.insert(TypeId);
2241 GlobalClassesTy::member_iterator CurSet = GlobalClasses.findLeader(GCI);
2244 for (GlobalTypeMember *GTM : TypeIdInfo[TypeId].RefGlobals)
2245 CurSet = GlobalClasses.unionSets(
2246 CurSet, GlobalClasses.findLeader(GlobalClasses.insert(GTM)));
2249 return Ins.first->second;
2253 for (
const Use &U : TypeTestFunc->
uses()) {
2254 auto CI = cast<CallInst>(
U.getUser());
2262 for (
const Use &CIU : CI->
uses()) {
2263 if (isa<AssumeInst>(CIU.getUser()))
2265 OnlyAssumeUses =
false;
2271 auto TypeIdMDVal = dyn_cast<MetadataAsValue>(CI->
getArgOperand(1));
2274 auto TypeId = TypeIdMDVal->getMetadata();
2275 AddTypeIdUse(TypeId).CallSites.push_back(CI);
2279 if (ICallBranchFunnelFunc) {
2280 for (
const Use &U : ICallBranchFunnelFunc->
uses()) {
2283 "llvm.icall.branch.funnel not supported on this target");
2285 auto CI = cast<CallInst>(
U.getUser());
2287 std::vector<GlobalTypeMember *> Targets;
2291 GlobalClassesTy::member_iterator CurSet;
2292 for (
unsigned I = 1;
I != CI->
arg_size();
I += 2) {
2298 "Expected branch funnel operand to be global value");
2300 GlobalTypeMember *GTM = GlobalTypeMembers[
Base];
2301 Targets.push_back(GTM);
2302 GlobalClassesTy::member_iterator NewSet =
2303 GlobalClasses.findLeader(GlobalClasses.insert(GTM));
2307 CurSet = GlobalClasses.unionSets(CurSet, NewSet);
2310 GlobalClasses.unionSets(
2311 CurSet, GlobalClasses.findLeader(
2312 GlobalClasses.insert(ICallBranchFunnel::create(
2313 Alloc, CI, Targets, ++CurUniqueId))));
2317 if (ExportSummary) {
2319 for (
auto &
P : TypeIdInfo) {
2320 if (
auto *TypeId = dyn_cast<MDString>(
P.first))
2325 for (
auto &
P : *ExportSummary) {
2326 for (
auto &S :
P.second.SummaryList) {
2327 if (!ExportSummary->isGlobalValueLive(S.get()))
2329 if (
auto *FS = dyn_cast<FunctionSummary>(S->getBaseObject()))
2332 AddTypeIdUse(MD).IsExported =
true;
2337 if (GlobalClasses.empty())
2342 std::vector<std::pair<GlobalClassesTy::iterator, unsigned>> Sets;
2343 for (GlobalClassesTy::iterator
I = GlobalClasses.begin(),
2344 E = GlobalClasses.end();
2348 ++NumTypeIdDisjointSets;
2350 unsigned MaxUniqueId = 0;
2351 for (GlobalClassesTy::member_iterator
MI = GlobalClasses.member_begin(
I);
2352 MI != GlobalClasses.member_end(); ++
MI) {
2353 if (
auto *MD = dyn_cast_if_present<Metadata *>(*
MI))
2354 MaxUniqueId = std::max(MaxUniqueId, TypeIdInfo[MD].UniqueId);
2355 else if (
auto *BF = dyn_cast_if_present<ICallBranchFunnel *>(*
MI))
2356 MaxUniqueId = std::max(MaxUniqueId, BF->UniqueId);
2358 Sets.emplace_back(
I, MaxUniqueId);
2363 for (
const auto &S : Sets) {
2365 std::vector<Metadata *> TypeIds;
2366 std::vector<GlobalTypeMember *> Globals;
2367 std::vector<ICallBranchFunnel *> ICallBranchFunnels;
2368 for (GlobalClassesTy::member_iterator
MI =
2369 GlobalClasses.member_begin(S.first);
2370 MI != GlobalClasses.member_end(); ++
MI) {
2371 if (isa<Metadata *>(*
MI))
2372 TypeIds.push_back(cast<Metadata *>(*
MI));
2373 else if (isa<GlobalTypeMember *>(*
MI))
2374 Globals.push_back(cast<GlobalTypeMember *>(*
MI));
2376 ICallBranchFunnels.push_back(cast<ICallBranchFunnel *>(*
MI));
2382 return TypeIdInfo[
M1].UniqueId < TypeIdInfo[M2].UniqueId;
2387 [&](ICallBranchFunnel *F1, ICallBranchFunnel *F2) {
2388 return F1->UniqueId < F2->UniqueId;
2392 buildBitSetsFromDisjointSet(TypeIds, Globals, ICallBranchFunnels);
2395 allocateByteArrays();
2399 if (ExportSummary) {
2400 if (
NamedMDNode *AliasesMD =
M.getNamedMetadata(
"aliases")) {
2401 for (
auto *AliasMD : AliasesMD->operands()) {
2402 assert(AliasMD->getNumOperands() >= 4);
2404 cast<MDString>(AliasMD->getOperand(0))->getString();
2405 StringRef Aliasee = cast<MDString>(AliasMD->getOperand(1))->getString();
2407 if (!ExportedFunctions.count(Aliasee) ||
2409 !
M.getNamedAlias(Aliasee))
2414 cast<ConstantAsMetadata>(AliasMD->getOperand(2))
2416 ->getUniqueInteger()
2419 static_cast<bool>(cast<ConstantAsMetadata>(AliasMD->getOperand(3))
2421 ->getUniqueInteger()
2429 if (
auto *
F =
M.getFunction(AliasName)) {
2431 F->replaceAllUsesWith(Alias);
2432 F->eraseFromParent();
2441 if (ExportSummary) {
2442 if (
NamedMDNode *SymversMD =
M.getNamedMetadata(
"symvers")) {
2443 for (
auto *Symver : SymversMD->operands()) {
2444 assert(Symver->getNumOperands() >= 2);
2446 cast<MDString>(Symver->getOperand(0))->getString();
2447 StringRef Alias = cast<MDString>(Symver->getOperand(1))->getString();
2449 if (!ExportedFunctions.count(SymbolName))
2452 M.appendModuleInlineAsm(
2453 (
llvm::Twine(
".symver ") + SymbolName +
", " + Alias).str());
2465 Changed = LowerTypeTestsModule::runForTesting(M, AM);
2468 LowerTypeTestsModule(M, AM, ExportSummary, ImportSummary, DropTypeTests)
AMDGPU Register Bank Select
This file implements a class to represent arbitrary precision integral constant values and operations...
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
This file defines the BumpPtrAllocator interface.
This file contains the simple types necessary to represent the attributes associated with functions a...
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
Analysis containing CSE Info
#define clEnumValN(ENUMVAL, FLAGNAME, DESC)
This file contains the declarations for the subclasses of Constant, which represent the different fla...
This file defines the DenseMap class.
Generic implementation of equivalence classes through the use Tarjan's efficient union-find algorithm...
Module.h This file contains the declarations for the Module class.
This header defines various interfaces for pass management in LLVM.
This defines the Use class.
static const unsigned kARMJumpTableEntrySize
static const unsigned kLOONGARCH64JumpTableEntrySize
static bool isKnownTypeIdMember(Metadata *TypeId, const DataLayout &DL, Value *V, uint64_t COffset)
static const unsigned kX86IBTJumpTableEntrySize
static cl::opt< std::string > ClReadSummary("lowertypetests-read-summary", cl::desc("Read summary from given YAML file before running pass"), cl::Hidden)
static const unsigned kRISCVJumpTableEntrySize
static void dropTypeTests(Module &M, Function &TypeTestFunc, bool ShouldDropAll)
static Value * createMaskedBitTest(IRBuilder<> &B, Value *Bits, Value *BitOffset)
Build a test that bit BitOffset mod sizeof(Bits)*8 is set in Bits.
static bool isThumbFunction(Function *F, Triple::ArchType ModuleArch)
static const unsigned kX86JumpTableEntrySize
static cl::opt< bool > AvoidReuse("lowertypetests-avoid-reuse", cl::desc("Try to avoid reuse of byte array addresses using aliases"), cl::Hidden, cl::init(true))
static cl::opt< PassSummaryAction > ClSummaryAction("lowertypetests-summary-action", cl::desc("What to do with the summary when running this pass"), cl::values(clEnumValN(PassSummaryAction::None, "none", "Do nothing"), clEnumValN(PassSummaryAction::Import, "import", "Import typeid resolutions from summary and globals"), clEnumValN(PassSummaryAction::Export, "export", "Export typeid resolutions to summary and globals")), cl::Hidden)
static const unsigned kARMBTIJumpTableEntrySize
static cl::opt< std::string > ClWriteSummary("lowertypetests-write-summary", cl::desc("Write summary to given YAML file after running pass"), cl::Hidden)
static bool isDirectCall(Use &U)
static const unsigned kARMv6MJumpTableEntrySize
static cl::opt< DropTestKind > ClDropTypeTests("lowertypetests-drop-type-tests", cl::desc("Simply drop type test sequences"), cl::values(clEnumValN(DropTestKind::None, "none", "Do not drop any type tests"), clEnumValN(DropTestKind::Assume, "assume", "Drop type test assume sequences"), clEnumValN(DropTestKind::All, "all", "Drop all type test sequences")), cl::Hidden, cl::init(DropTestKind::None))
ModuleSummaryIndex.h This file contains the declarations the classes that hold the module index and s...
FunctionAnalysisManager FAM
This file defines the PointerUnion class, which is a discriminated union of pointer types.
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file implements a set that has insertion order iteration characteristics.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
This header defines support for implementing classes that have some trailing object (or arrays of obj...
Class for arbitrary precision integers.
uint64_t getZExtValue() const
Get zero extended value.
A container for analyses that lazily runs them and caches their results.
PassT::Result & getResult(IRUnitT &IR, ExtraArgTs... ExtraArgs)
Get the result of an analysis pass for a given IR unit.
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
size_t size() const
size - Get the array size.
bool empty() const
empty - Check if the array is empty.
static ArrayType * get(Type *ElementType, uint64_t NumElements)
This static method is the primary way to construct an ArrayType.
StringRef getValueAsString() const
Return the attribute's value as a string.
bool isValid() const
Return true if the attribute is any kind of attribute.
LLVM Basic Block Representation.
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
BasicBlock * splitBasicBlock(iterator I, const Twine &BBName="", bool Before=false)
Split the basic block into two basic blocks at the specified instruction.
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
Conditional or Unconditional Branch instruction.
static BranchInst * Create(BasicBlock *IfTrue, InsertPosition InsertBefore=nullptr)
Allocate memory in an ever growing pool, as if by bump-pointer.
LLVM_ATTRIBUTE_RETURNS_NONNULL void * Allocate(size_t Size, Align Alignment)
Allocate space at the specified alignment.
Value * getArgOperand(unsigned i) const
unsigned arg_size() const
This class represents a function call, abstracting a target machine's calling convention.
static ConstantAggregateZero * get(Type *Ty)
ConstantArray - Constant Array Declarations.
static Constant * get(LLVMContext &Context, ArrayRef< ElementTy > Elts)
get() constructor - Return a constant with array type with an element count and element type matching...
static Constant * getIntToPtr(Constant *C, Type *Ty, bool OnlyIfReduced=false)
static Constant * getInBoundsGetElementPtr(Type *Ty, Constant *C, ArrayRef< Constant * > IdxList)
Create an "inbounds" getelementptr.
static Constant * getPointerCast(Constant *C, Type *Ty)
Create a BitCast, AddrSpaceCast, or a PtrToInt cast constant expression.
static Constant * getSub(Constant *C1, Constant *C2, bool HasNUW=false, bool HasNSW=false)
static Constant * getPtrToInt(Constant *C, Type *Ty, bool OnlyIfReduced=false)
static Constant * getGetElementPtr(Type *Ty, Constant *C, ArrayRef< Constant * > IdxList, GEPNoWrapFlags NW=GEPNoWrapFlags::none(), std::optional< ConstantRange > InRange=std::nullopt, Type *OnlyIfReducedTy=nullptr)
Getelementptr form.
static ConstantInt * getTrue(LLVMContext &Context)
static ConstantInt * getFalse(LLVMContext &Context)
static ConstantPointerNull * get(PointerType *T)
Static factory methods - Return objects of the specified value.
static Constant * getAnon(ArrayRef< Constant * > V, bool Packed=false)
Return an anonymous struct that has the specified elements.
This is an important base class in LLVM.
static Constant * getNullValue(Type *Ty)
Constructor to create a '0' constant of arbitrary type.
This class represents an Operation in the Expression.
A parsed version of the target data layout string in and methods for querying it.
iterator find(const_arg_type_t< KeyT > Val)
Implements a dense probed hash-table based set.
EquivalenceClasses - This represents a collection of equivalence classes and supports three efficient...
Helper for check-and-exit error handling.
static FunctionType * get(Type *Result, ArrayRef< Type * > Params, bool isVarArg)
This static method is the primary way of constructing a FunctionType.
static Function * Create(FunctionType *Ty, LinkageTypes Linkage, unsigned AddrSpace, const Twine &N="", Module *M=nullptr)
void eraseFromParent()
eraseFromParent - This method unlinks 'this' from the containing module and deletes it.
void eraseFromParent()
eraseFromParent - This method unlinks 'this' from the containing module and deletes it.
static GlobalAlias * create(Type *Ty, unsigned AddressSpace, LinkageTypes Linkage, const Twine &Name, Constant *Aliasee, Module *Parent)
If a parent module is specified, the alias is automatically inserted into the end of the specified mo...
MaybeAlign getAlign() const
Returns the alignment of the given variable or function.
void setMetadata(unsigned KindID, MDNode *Node)
Set a particular kind of metadata attachment.
bool eraseMetadata(unsigned KindID)
Erase all metadata attachments with the given kind.
bool hasSection() const
Check if this global has a custom object file section.
MDNode * getMetadata(unsigned KindID) const
Get the current metadata attachments for the given kind, if any.
bool isThreadLocal() const
If the value is "Thread Local", its value isn't shared by the threads.
VisibilityTypes getVisibility() const
static bool isLocalLinkage(LinkageTypes Linkage)
LinkageTypes getLinkage() const
static StringRef dropLLVMManglingEscape(StringRef Name)
If the given string begins with the GlobalValue name mangling escape character '\1',...
void setLinkage(LinkageTypes LT)
bool isDeclarationForLinker() const
GUID getGUID() const
Return a 64-bit global unique ID constructed from global value name (i.e.
Module * getParent()
Get the module that this global value is contained inside of...
PointerType * getType() const
Global values are always pointers.
VisibilityTypes
An enumeration for the kinds of visibility of global values.
@ HiddenVisibility
The GV is hidden.
void setVisibility(VisibilityTypes V)
LinkageTypes
An enumeration for the kinds of linkage for global values.
@ PrivateLinkage
Like Internal, but omit from symbol table.
@ InternalLinkage
Rename collisions when linking (static functions).
@ ExternalLinkage
Externally visible function.
@ WeakAnyLinkage
Keep one copy of named function when linking (weak)
@ ExternalWeakLinkage
ExternalWeak linkage description.
Type * getValueType() const
const Constant * getInitializer() const
getInitializer - Return the initializer for this global variable.
void setInitializer(Constant *InitVal)
setInitializer - Sets the initializer for this global variable, removing any existing initializer if ...
void setConstant(bool Val)
void eraseFromParent()
eraseFromParent - This method unlinks 'this' from the containing module and deletes it.
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
static InlineAsm * get(FunctionType *Ty, StringRef AsmString, StringRef Constraints, bool hasSideEffects, bool isAlignStack=false, AsmDialect asmDialect=AD_ATT, bool canThrow=false)
InlineAsm::get - Return the specified uniqued inline asm string.
An analysis over an "outer" IR unit that provides access to an analysis manager over an "inner" IR un...
InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
void setMetadata(unsigned KindID, MDNode *Node)
Set the metadata of the specified kind to the specified node.
Class to represent integer types.
unsigned getBitWidth() const
Get the number of bits in this IntegerType.
PreservedAnalyses run(Module &M, ModuleAnalysisManager &AM)
const MDOperand & getOperand(unsigned I) const
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
unsigned getNumOperands() const
Return number of MDNode operands.
This class implements a map that also provides access to all stored values in a deterministic order.
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
static ErrorOr< std::unique_ptr< MemoryBuffer > > getFile(const Twine &Filename, bool IsText=false, bool RequiresNullTerminator=true, bool IsVolatile=false, std::optional< Align > Alignment=std::nullopt)
Open the specified file as a MemoryBuffer, returning a new MemoryBuffer if successful,...
Class to hold module path string table and global value map, and encapsulate methods for operating on...
TypeIdSummary & getOrInsertTypeIdSummary(StringRef TypeId)
Return an existing or new TypeIdSummary entry for TypeId.
const TypeIdSummary * getTypeIdSummary(StringRef TypeId) const
This returns either a pointer to the type id summary (if present in the summary map) or null (if not ...
bool partiallySplitLTOUnits() const
std::set< std::string, std::less<> > & cfiFunctionDefs()
std::set< std::string, std::less<> > & cfiFunctionDecls()
A Module instance is used to store all the information related to an LLVM module.
Metadata * getModuleFlag(StringRef Key) const
Return the corresponding value if Key appears in module flags, otherwise return null.
iterator_range< op_iterator > operands()
static PointerType * getUnqual(Type *ElementType)
This constructs a pointer to an object of the specified type in the default address space (address sp...
unsigned getAddressSpace() const
Return the address space of the Pointer type.
A discriminated union of two or more pointer types, with the discriminator in the low bit of the poin...
A set of analyses that are preserved following a run of a transformation pass.
static PreservedAnalyses none()
Convenience factory function for the empty preserved set.
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
static ReturnInst * Create(LLVMContext &C, Value *retVal=nullptr, InsertPosition InsertBefore=nullptr)
bool insert(const value_type &X)
Insert a new element into the SetVector.
A SetVector that performs no allocations if smaller than a certain size.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
void reserve(size_type N)
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
StringRef - Represent a constant reference to a string, i.e.
std::pair< StringRef, StringRef > split(char Separator) const
Split into two substrings around the first occurrence of a separator character.
Class to represent struct types.
Type * getElementType(unsigned N) const
Analysis pass providing the TargetTransformInfo.
See the file comment for details on the usage of the TrailingObjects type.
Triple - Helper class for working with autoconf configuration names.
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
The instances of the Type class are immutable: once they are created, they are never changed.
static IntegerType * getInt1Ty(LLVMContext &C)
static Type * getVoidTy(LLVMContext &C)
static IntegerType * getInt8Ty(LLVMContext &C)
static IntegerType * getInt32Ty(LLVMContext &C)
static IntegerType * getInt64Ty(LLVMContext &C)
A Use represents the edge between a Value definition and its users.
Value * getOperand(unsigned i) const
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
user_iterator user_begin()
void setName(const Twine &Name)
Change the name of the value.
bool hasOneUse() const
Return true if there is exactly one use of this value.
void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
iterator_range< user_iterator > users()
void replaceUsesWithIf(Value *New, llvm::function_ref< bool(Use &U)> ShouldReplace)
Go through the uses list for this definition and make each use point to "V" if the callback ShouldRep...
iterator_range< use_iterator > uses()
StringRef getName() const
Return a constant reference to the value's name.
void takeName(Value *V)
Transfer the name from V to this value.
std::pair< iterator, bool > insert(const ValueT &V)
bool contains(const_arg_type_t< ValueT > V) const
Check if the set contains the given element.
size_type count(const_arg_type_t< ValueT > V) const
Return 1 if the specified key is in the set, 0 otherwise.
const ParentTy * getParent() const
self_iterator getIterator()
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
A raw_ostream that writes to a file descriptor.
This class implements an extremely fast bulk output stream that can only output to a stream.
A raw_ostream that writes to an std::string.
constexpr char SymbolName[]
Key for Kernel::Metadata::mSymbolName.
constexpr std::underlying_type_t< E > Mask()
Get a bitmask with 1s in all places up to the high-order bit of E's largest value.
@ C
The default llvm calling convention, compatible with C.
Function * getDeclarationIfExists(Module *M, ID id, ArrayRef< Type * > Tys, FunctionType *FT=nullptr)
This version supports overloaded intrinsics.
ValuesClass values(OptsTy... Options)
Helper to build a ValuesClass by forwarding a variable number of arguments as an initializer list to ...
initializer< Ty > init(const Ty &Val)
Linkage
Describes symbol linkage. This can be used to resolve definition clashes.
DropTestKind
Specifies how to drop type tests.
@ Assume
Do not drop type tests (default).
bool isJumpTableCanonical(Function *F)
NodeAddr< PhiNode * > Phi
@ OF_TextWithCRLF
The file should be opened in text mode and use a carriage linefeed '\r '.
This is an optimization pass for GlobalISel generic memory operations.
void ReplaceInstWithInst(BasicBlock *BB, BasicBlock::iterator &BI, Instruction *I)
Replace the instruction specified by BI with the instruction specified by I.
void stable_sort(R &&Range)
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly.
void append_range(Container &C, Range &&R)
Wrapper function to append range R to container C.
Value * GetPointerBaseWithConstantOffset(Value *Ptr, int64_t &Offset, const DataLayout &DL, bool AllowNonInbounds=true)
Analyze the specified pointer to see if it can be expressed as a base pointer plus a constant offset.
iterator_range< early_inc_iterator_impl< detail::IterOfRange< RangeT > > > make_early_inc_range(RangeT &&Range)
Make a range that does early increment to allow mutation of the underlying range without disrupting i...
int countr_zero(T Val)
Count number of 0's from the least significant bit to the most stopping at the first 1.
unsigned M1(unsigned Val)
bool convertUsersOfConstantsToInstructions(ArrayRef< Constant * > Consts, Function *RestrictToFunc=nullptr, bool RemoveDeadConstants=true, bool IncludeSelf=false)
Replace constant expressions users of the given constants with instructions.
void sort(IteratorTy Start, IteratorTy End)
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
void report_fatal_error(Error Err, bool gen_crash_diag=true)
Report a serious error, calling any installed error handler.
@ Ref
The access may reference the value stored in memory.
void appendToCompilerUsed(Module &M, ArrayRef< GlobalValue * > Values)
Adds global values to the llvm.compiler.used list.
uint64_t alignTo(uint64_t Size, Align A)
Returns a multiple of A needed to store Size bytes.
Expected< T > errorOrToExpected(ErrorOr< T > &&EO)
Convert an ErrorOr<T> to an Expected<T>.
constexpr unsigned BitWidth
void appendToGlobalCtors(Module &M, Function *F, int Priority, Constant *Data=nullptr)
Append F to the list of global ctors of module M with the given Priority.
Error errorCodeToError(std::error_code EC)
Helper for converting an std::error_code to a Error.
Instruction * SplitBlockAndInsertIfThen(Value *Cond, BasicBlock::iterator SplitBefore, bool Unreachable, MDNode *BranchWeights=nullptr, DomTreeUpdater *DTU=nullptr, LoopInfo *LI=nullptr, BasicBlock *ThenBlock=nullptr)
Split the containing block at the specified instruction - everything before SplitBefore stays in the ...
void appendToUsed(Module &M, ArrayRef< GlobalValue * > Values)
Adds global values to the llvm.used list.
CfiFunctionLinkage
The type of CFI jumptable needed for a function.
constexpr uint64_t NextPowerOf2(uint64_t A)
Returns the next power of two (in 64-bits) that is strictly greater than A.
GlobalVariable * collectUsedGlobalVariables(const Module &M, SmallVectorImpl< GlobalValue * > &Vec, bool CompilerUsed)
Given "llvm.used" or "llvm.compiler.used" as a global name, collect the initializer elements of that ...
This struct is a compact representation of a valid (non-zero power of two) alignment.
Kind
Specifies which kind of type check we should emit for this byte array.
@ Unknown
Unknown (analysis not performed, don't lower)
@ Single
Single element (last example in "Short Inline Bit Vectors")
@ Inline
Inlined bit vector ("Short Inline Bit Vectors")
@ Unsat
Unsatisfiable type (i.e. no global has this type metadata)
@ AllOnes
All-ones bit vector ("Eliminating Bit Vector Checks for All-Ones Bit Vectors")
@ ByteArray
Test a byte array (first example)
unsigned SizeM1BitWidth
Range of size-1 expressed as a bit width.
enum llvm::TypeTestResolution::Kind TheKind
Function object to check whether the second component of a container supported by std::get (like std:...
SmallVector< uint64_t, 16 > Offsets
void addOffset(uint64_t Offset)
bool containsGlobalOffset(uint64_t Offset) const
void print(raw_ostream &OS) const
std::set< uint64_t > Bits
This class is used to build a byte array containing overlapping bit sets.
uint64_t BitAllocs[BitsPerByte]
The number of bytes allocated so far for each of the bits.
std::vector< uint8_t > Bytes
The byte array built so far.
void allocate(const std::set< uint64_t > &Bits, uint64_t BitSize, uint64_t &AllocByteOffset, uint8_t &AllocMask)
Allocate BitSize bits in the byte array where Bits contains the bits to set.
This class implements a layout algorithm for globals referenced by bit sets that tries to keep member...
std::vector< std::vector< uint64_t > > Fragments
The computed layout.
void addFragment(const std::set< uint64_t > &F)
Add F to the layout while trying to keep its indices contiguous.
std::vector< uint64_t > FragmentMap
Mapping from object index to fragment index.