108 using namespace llvm;
111 #define DEBUG_TYPE "wholeprogramdevirt"
113 STATISTIC(NumDevirtTargets,
"Number of whole program devirtualization targets");
114 STATISTIC(NumSingleImpl,
"Number of single implementation devirtualizations");
115 STATISTIC(NumBranchFunnel,
"Number of branch funnels");
116 STATISTIC(NumUniformRetVal,
"Number of uniform return value optimizations");
117 STATISTIC(NumUniqueRetVal,
"Number of unique return value optimizations");
119 "Number of 1 bit virtual constant propagations");
120 STATISTIC(NumVirtConstProp,
"Number of virtual constant propagations");
123 "wholeprogramdevirt-summary-action",
124 cl::desc(
"What to do with the summary when running this pass"),
127 "Import typeid resolutions from summary and globals"),
129 "Export typeid resolutions to summary and globals")),
133 "wholeprogramdevirt-read-summary",
135 "Read summary from given bitcode or YAML file before running pass"),
139 "wholeprogramdevirt-write-summary",
140 cl::desc(
"Write summary to given bitcode or YAML file after running pass. "
141 "Output file format is deduced from extension: *.bc means writing "
142 "bitcode, otherwise YAML"),
148 cl::desc(
"Maximum number of call targets per "
149 "call site to enable branch funnels"));
153 cl::desc(
"Print index-based devirtualization messages"));
161 cl::desc(
"Enable whole program visibility"));
166 "disable-whole-program-visibility",
cl::Hidden,
167 cl::desc(
"Disable whole program visibility (overrides enabling options)"));
172 cl::desc(
"Prevent function(s) from being devirtualized"),
183 cl::desc(
"Type of checking for incorrect devirtualizations"),
187 "Fallback to indirect when incorrect")));
191 std::vector<GlobPattern> Patterns;
192 template <
class T>
void init(
const T &StringList) {
193 for (
const auto &
S : StringList)
241 std::vector<ArrayRef<uint8_t>> Used;
244 :
Target.TM->Bits->Before.BytesUsed;
246 : MinByte -
Target.minBeforeBytes();
250 if (VTUsed.
size() > Offset)
251 Used.push_back(VTUsed.
slice(Offset));
256 for (
unsigned I = 0;; ++
I) {
257 uint8_t BitsUsed = 0;
258 for (
auto &&
B : Used)
261 if (BitsUsed != 0xff)
262 return (MinByte +
I) * 8 +
268 for (
unsigned I = 0;; ++
I) {
269 for (
auto &&
B : Used) {
271 while ((
I + Byte) <
B.size() && Byte < (Size / 8)) {
277 return (MinByte +
I) * 8;
287 OffsetByte = -(AllocBefore / 8 + 1);
289 OffsetByte = -((AllocBefore + 7) / 8 + (
BitWidth + 7) / 8);
290 OffsetBit = AllocBefore % 8;
294 Target.setBeforeBit(AllocBefore);
304 OffsetByte = AllocAfter / 8;
306 OffsetByte = (AllocAfter + 7) / 8;
307 OffsetBit = AllocAfter % 8;
311 Target.setAfterBit(AllocAfter);
349 const VTableSlot &
RHS) {
350 return LHS.TypeID ==
RHS.TypeID &&
LHS.ByteOffset ==
RHS.ByteOffset;
369 return LHS.TypeID ==
RHS.TypeID &&
LHS.ByteOffset ==
RHS.ByteOffset;
393 if (!Summary->isLive())
395 if (
auto *
FS = dyn_cast<FunctionSummary>(Summary.get())) {
396 if (!
FS->fflags().MustBeUnreachable)
409 struct VirtualCallSite {
416 unsigned *NumUnsafeUses =
nullptr;
427 <<
NV(
"Optimization", OptName)
428 <<
": devirtualized a call to "
429 <<
NV(
"FunctionName", TargetName));
432 void replaceAndErase(
437 emitRemark(OptName, TargetName, OREGetter);
439 if (
auto *II = dyn_cast<InvokeInst>(&CB)) {
441 II->getUnwindDest()->removePredecessor(II->getParent());
453 struct CallSiteInfo {
458 std::vector<VirtualCallSite> CallSites;
463 bool AllCallSitesDevirted =
true;
470 bool SummaryHasTypeTestAssumeUsers =
false;
479 std::vector<FunctionSummary *> SummaryTypeCheckedLoadUsers;
480 std::vector<FunctionSummary *> SummaryTypeTestAssumeUsers;
482 bool isExported()
const {
483 return SummaryHasTypeTestAssumeUsers ||
484 !SummaryTypeCheckedLoadUsers.empty();
488 SummaryTypeCheckedLoadUsers.push_back(
FS);
489 AllCallSitesDevirted =
false;
493 SummaryTypeTestAssumeUsers.push_back(
FS);
494 SummaryHasTypeTestAssumeUsers =
true;
495 AllCallSitesDevirted =
false;
499 AllCallSitesDevirted =
true;
502 SummaryTypeCheckedLoadUsers.clear();
507 struct VTableSlotInfo {
514 std::map<std::vector<uint64_t>, CallSiteInfo> ConstCSInfo;
516 void addCallSite(
Value *VTable,
CallBase &CB,
unsigned *NumUnsafeUses);
519 CallSiteInfo &findCallSiteInfo(
CallBase &CB);
522 CallSiteInfo &VTableSlotInfo::findCallSiteInfo(
CallBase &CB) {
523 std::vector<uint64_t>
Args;
524 auto *CBType = dyn_cast<IntegerType>(CB.
getType());
525 if (!CBType || CBType->getBitWidth() > 64 || CB.
arg_empty())
528 auto *CI = dyn_cast<ConstantInt>(
Arg);
529 if (!CI || CI->getBitWidth() > 64)
531 Args.push_back(CI->getZExtValue());
533 return ConstCSInfo[
Args];
536 void VTableSlotInfo::addCallSite(
Value *VTable,
CallBase &CB,
537 unsigned *NumUnsafeUses) {
538 auto &CSI = findCallSiteInfo(CB);
539 CSI.AllCallSitesDevirted =
false;
540 CSI.CallSites.push_back({
VTable, CB, NumUnsafeUses});
543 struct DevirtModule {
579 std::map<CallInst *, unsigned> NumUnsafeUsesForTypeTest;
580 PatternList FunctionsToSkip;
587 :
M(
M), AARGetter(AARGetter), LookupDomTree(LookupDomTree),
588 ExportSummary(ExportSummary), ImportSummary(ImportSummary),
589 Int8Ty(
Type::getInt8Ty(
M.getContext())),
590 Int8PtrTy(
Type::getInt8PtrTy(
M.getContext())),
592 Int64Ty(
Type::getInt64Ty(
M.getContext())),
593 IntPtrTy(
M.getDataLayout().getIntPtrType(
M.getContext(), 0)),
595 RemarksEnabled(areRemarksEnabled()), OREGetter(OREGetter) {
596 assert(!(ExportSummary && ImportSummary));
600 bool areRemarksEnabled();
603 scanTypeTestUsers(
Function *TypeTestFunc,
605 void scanTypeCheckedLoadUsers(
Function *TypeCheckedLoadFunc);
607 void buildTypeIdentifierMap(
608 std::vector<VTableBits> &
Bits,
612 tryFindVirtualCallTargets(std::vector<VirtualCallTarget> &TargetsForSlot,
613 const std::set<TypeMemberInfo> &TypeMemberInfos,
617 void applySingleImplDevirt(VTableSlotInfo &SlotInfo,
Constant *TheFn,
621 VTableSlotInfo &SlotInfo,
624 void applyICallBranchFunnel(VTableSlotInfo &SlotInfo,
Constant *
JT,
627 VTableSlotInfo &SlotInfo,
630 bool tryEvaluateFunctionsWithArgs(
634 void applyUniformRetValOpt(CallSiteInfo &CSInfo,
StringRef FnName,
637 CallSiteInfo &CSInfo,
645 bool shouldExportConstantsAsAbsoluteSymbols();
665 void applyUniqueRetValOpt(CallSiteInfo &CSInfo,
StringRef FnName,
bool IsOne,
667 bool tryUniqueRetValOpt(
unsigned BitWidth,
669 CallSiteInfo &CSInfo,
673 void applyVirtualConstProp(CallSiteInfo &CSInfo,
StringRef FnName,
676 VTableSlotInfo &SlotInfo,
682 void importResolution(VTableSlot Slot, VTableSlotInfo &SlotInfo);
686 void removeRedundantTypeTests();
719 std::set<GlobalValue::GUID> &ExportedGUIDs;
723 std::map<ValueInfo, std::vector<VTableSlotSummary>> &LocalWPDTargetsMap;
727 PatternList FunctionsToSkip;
731 std::set<GlobalValue::GUID> &ExportedGUIDs,
732 std::map<
ValueInfo, std::vector<VTableSlotSummary>> &LocalWPDTargetsMap)
733 : ExportSummary(ExportSummary), ExportedGUIDs(ExportedGUIDs),
734 LocalWPDTargetsMap(LocalWPDTargetsMap) {
738 bool tryFindVirtualCallTargets(std::vector<ValueInfo> &TargetsForSlot,
744 VTableSlotInfo &SlotInfo,
746 std::set<ValueInfo> &DevirtTargets);
751 struct WholeProgramDevirt :
public ModulePass {
754 bool UseCommandLine =
false;
766 ImportSummary(ImportSummary) {
770 bool runOnModule(
Module &M)
override {
779 std::unique_ptr<OptimizationRemarkEmitter> ORE;
781 ORE = std::make_unique<OptimizationRemarkEmitter>(
F);
786 return this->getAnalysis<DominatorTreeWrapperPass>(
F).getDomTree();
790 return DevirtModule::runForTesting(M,
LegacyAARGetter(*
this), OREGetter,
793 return DevirtModule(M,
LegacyAARGetter(*
this), OREGetter, LookupDomTree,
794 ExportSummary, ImportSummary)
808 "Whole program devirtualization",
false,
false)
814 char WholeProgramDevirt::
ID = 0;
819 return new WholeProgramDevirt(ExportSummary, ImportSummary);
834 if (UseCommandLine) {
835 if (DevirtModule::runForTesting(
M, AARGetter, OREGetter, LookupDomTree))
839 if (!DevirtModule(
M, AARGetter, OREGetter, LookupDomTree, ExportSummary,
859 Module &M,
bool WholeProgramVisibilityEnabledInLTO,
867 if (GV.hasMetadata(LLVMContext::MD_type) &&
871 !DynamicExportSymbols.
count(GV.getGUID()))
886 if (DynamicExportSymbols.
count(
P.first))
888 for (
auto &
S :
P.second.SummaryList) {
889 auto *GVar = dyn_cast<GlobalVarSummary>(
S.get());
900 std::map<
ValueInfo, std::vector<VTableSlotSummary>> &LocalWPDTargetsMap) {
901 DevirtIndex(Summary, ExportedGUIDs, LocalWPDTargetsMap).run();
907 std::map<
ValueInfo, std::vector<VTableSlotSummary>> &LocalWPDTargetsMap) {
908 for (
auto &
T : LocalWPDTargetsMap) {
911 assert(
VI.getSummaryList().size() == 1 &&
912 "Devirt of local target has more than one copy");
913 auto &
S =
VI.getSummaryList()[0];
914 if (!isExported(
S->modulePath(),
VI))
918 for (
auto &SlotSummary :
T.second) {
921 auto WPDRes = TIdSum->WPDRes.find(SlotSummary.ByteOffset);
922 assert(WPDRes != TIdSum->WPDRes.end());
924 WPDRes->second.SingleImplName,
943 "combined summary should contain Regular LTO module");
947 bool DevirtModule::runForTesting(
951 std::unique_ptr<ModuleSummaryIndex> Summary =
952 std::make_unique<ModuleSummaryIndex>(
false);
959 auto ReadSummaryFile =
961 if (
Expected<std::unique_ptr<ModuleSummaryIndex>> SummaryOrErr =
968 yaml::Input
In(ReadSummaryFile->getBuffer());
975 DevirtModule(M, AARGetter, OREGetter, LookupDomTree,
993 yaml::Output Out(OS);
1001 void DevirtModule::buildTypeIdentifierMap(
1002 std::vector<VTableBits> &
Bits,
1005 Bits.reserve(
M.getGlobalList().size());
1009 GV.getMetadata(LLVMContext::MD_type, Types);
1010 if (GV.isDeclaration() || Types.empty())
1015 Bits.emplace_back();
1016 Bits.back().GV = &GV;
1017 Bits.back().ObjectSize =
1018 M.getDataLayout().getTypeAllocSize(GV.getInitializer()->getType());
1019 BitsPtr = &
Bits.back();
1027 cast<ConstantAsMetadata>(
Type->getOperand(0))->getValue())
1035 bool DevirtModule::tryFindVirtualCallTargets(
1036 std::vector<VirtualCallTarget> &TargetsForSlot,
1037 const std::set<TypeMemberInfo> &TypeMemberInfos,
uint64_t ByteOffset,
1040 if (!
TM.Bits->GV->isConstant())
1045 if (
TM.Bits->GV->getVCallVisibility() ==
1050 TM.Offset + ByteOffset, M);
1058 if (FunctionsToSkip.match(Fn->getName()))
1063 if (Fn->getName() ==
"__cxa_pure_virtual")
1071 TargetsForSlot.push_back({Fn, &
TM});
1075 return !TargetsForSlot.empty();
1078 bool DevirtIndex::tryFindVirtualCallTargets(
1093 bool LocalFound =
false;
1094 for (
auto &
S :
P.VTableVI.getSummaryList()) {
1100 auto *CurVS = cast<GlobalVarSummary>(
S->getBaseObject());
1101 if (!CurVS->vTableFuncs().empty() ||
1123 for (
auto VTP :
VS->vTableFuncs()) {
1124 if (VTP.VTableOffset !=
P.AddressPointOffset + ByteOffset)
1130 TargetsForSlot.push_back(VTP.FuncVI);
1135 return !TargetsForSlot.empty();
1138 void DevirtModule::applySingleImplDevirt(VTableSlotInfo &SlotInfo,
1139 Constant *TheFn,
bool &IsExported) {
1144 auto Apply = [&](CallSiteInfo &CSInfo) {
1145 for (
auto &&VCallSite : CSInfo.CallSites) {
1146 if (!OptimizedCalls.
insert(&VCallSite.CB).second)
1150 VCallSite.emitRemark(
"single-impl",
1153 auto &CB = VCallSite.CB;
1166 Builder.SetInsertPoint(ThenTerm);
1168 auto *CallTrap =
Builder.CreateCall(TrapFn);
1186 NewInst.
setMetadata(LLVMContext::MD_prof,
nullptr);
1187 NewInst.
setMetadata(LLVMContext::MD_callees,
nullptr);
1206 if (VCallSite.NumUnsafeUses)
1207 --*VCallSite.NumUnsafeUses;
1209 if (CSInfo.isExported())
1211 CSInfo.markDevirt();
1213 Apply(SlotInfo.CSInfo);
1214 for (
auto &
P : SlotInfo.ConstCSInfo)
1220 if (
Callee.getSummaryList().empty())
1227 bool IsExported =
false;
1228 auto &
S =
Callee.getSummaryList()[0];
1230 auto AddCalls = [&](CallSiteInfo &CSInfo) {
1231 for (
auto *
FS : CSInfo.SummaryTypeCheckedLoadUsers) {
1233 IsExported |=
S->modulePath() !=
FS->modulePath();
1235 for (
auto *
FS : CSInfo.SummaryTypeTestAssumeUsers) {
1237 IsExported |=
S->modulePath() !=
FS->modulePath();
1241 for (
auto &
P : SlotInfo.ConstCSInfo)
1246 bool DevirtModule::trySingleImplDevirt(
1252 Function *TheFn = TargetsForSlot[0].Fn;
1253 for (
auto &&
Target : TargetsForSlot)
1259 TargetsForSlot[0].WasDevirt =
true;
1261 bool IsExported =
false;
1262 applySingleImplDevirt(SlotInfo, TheFn, IsExported);
1270 std::string NewName = (TheFn->
getName() +
".llvm.merged").str();
1276 if (
C->getName() == TheFn->
getName()) {
1277 Comdat *NewC =
M.getOrInsertComdat(NewName);
1280 if (GO.getComdat() ==
C)
1302 VTableSlotInfo &SlotInfo,
1304 std::set<ValueInfo> &DevirtTargets) {
1307 auto TheFn = TargetsForSlot[0];
1308 for (
auto &&
Target : TargetsForSlot)
1313 auto Size = TheFn.getSummaryList().
size();
1319 if (FunctionsToSkip.match(TheFn.name()))
1324 for (
auto &
S : TheFn.getSummaryList())
1330 DevirtTargets.insert(TheFn);
1332 auto &
S = TheFn.getSummaryList()[0];
1333 bool IsExported =
AddCalls(SlotInfo, TheFn);
1335 ExportedGUIDs.insert(TheFn.
getGUID());
1348 LocalWPDTargetsMap[TheFn].push_back(SlotSummary);
1362 void DevirtModule::tryICallBranchFunnel(
1372 bool HasNonDevirt = !SlotInfo.CSInfo.AllCallSitesDevirted;
1374 for (
auto &
P : SlotInfo.ConstCSInfo)
1375 if (!
P.second.AllCallSitesDevirted) {
1376 HasNonDevirt =
true;
1386 if (isa<MDString>(
Slot.TypeID)) {
1388 M.getDataLayout().getProgramAddressSpace(),
1389 getGlobalName(Slot, {},
"branch_funnel"), &M);
1393 M.getDataLayout().getProgramAddressSpace(),
1394 "branch_funnel", &M);
1396 JT->addParamAttr(0, Attribute::Nest);
1398 std::vector<Value *> JTArgs;
1399 JTArgs.push_back(
JT->arg_begin());
1400 for (
auto &T : TargetsForSlot) {
1401 JTArgs.push_back(getMemberAddr(
T.TM));
1402 JTArgs.push_back(
T.Fn);
1413 bool IsExported =
false;
1414 applyICallBranchFunnel(SlotInfo,
JT, IsExported);
1419 void DevirtModule::applyICallBranchFunnel(VTableSlotInfo &SlotInfo,
1421 auto Apply = [&](CallSiteInfo &CSInfo) {
1422 if (CSInfo.isExported())
1424 if (CSInfo.AllCallSitesDevirted)
1426 for (
auto &&VCallSite : CSInfo.CallSites) {
1437 VCallSite.emitRemark(
"branch-funnel",
1438 JT->stripPointerCasts()->getName(), OREGetter);
1442 std::vector<Type *> NewArgs;
1443 NewArgs.push_back(Int8PtrTy);
1451 std::vector<Value *>
Args;
1452 Args.push_back(IRB.CreateBitCast(VCallSite.VTable, Int8PtrTy));
1456 if (isa<CallInst>(CB))
1457 NewCS = IRB.CreateCall(NewFT, IRB.CreateBitCast(
JT, NewFTPtr),
Args);
1459 NewCS = IRB.CreateInvoke(NewFT, IRB.CreateBitCast(
JT, NewFTPtr),
1460 cast<InvokeInst>(CB).getNormalDest(),
1461 cast<InvokeInst>(CB).getUnwindDest(),
Args);
1465 std::vector<AttributeSet> NewArgAttrs;
1468 M.getContext(), Attribute::Nest)}));
1469 for (
unsigned I = 0;
I + 2 <
Attrs.getNumAttrSets(); ++
I)
1470 NewArgAttrs.push_back(
Attrs.getParamAttrs(
I));
1473 Attrs.getRetAttrs(), NewArgAttrs));
1479 if (VCallSite.NumUnsafeUses)
1480 --*VCallSite.NumUnsafeUses;
1487 Apply(SlotInfo.CSInfo);
1488 for (
auto &
P : SlotInfo.ConstCSInfo)
1492 bool DevirtModule::tryEvaluateFunctionsWithArgs(
1498 if (
Target.Fn->arg_size() !=
Args.size() + 1)
1505 for (
unsigned I = 0;
I !=
Args.size(); ++
I) {
1506 auto *ArgTy = dyn_cast<IntegerType>(
1507 Target.Fn->getFunctionType()->getParamType(
I + 1));
1514 if (!Eval.EvaluateFunction(
Target.Fn, RetVal, EvalArgs) ||
1515 !isa<ConstantInt>(RetVal))
1517 Target.RetVal = cast<ConstantInt>(RetVal)->getZExtValue();
1522 void DevirtModule::applyUniformRetValOpt(CallSiteInfo &CSInfo,
StringRef FnName,
1524 for (
auto Call : CSInfo.CallSites) {
1528 Call.replaceAndErase(
1529 "uniform-ret-val", FnName, RemarksEnabled, OREGetter,
1532 CSInfo.markDevirt();
1535 bool DevirtModule::tryUniformRetValOpt(
1540 uint64_t TheRetVal = TargetsForSlot[0].RetVal;
1542 if (
Target.RetVal != TheRetVal)
1545 if (CSInfo.isExported()) {
1547 Res->
Info = TheRetVal;
1550 applyUniformRetValOpt(CSInfo, TargetsForSlot[0].Fn->getName(), TheRetVal);
1552 for (
auto &&
Target : TargetsForSlot)
1557 std::string DevirtModule::getGlobalName(VTableSlot Slot,
1560 std::string FullName =
"__typeid_";
1562 OS << cast<MDString>(
Slot.TypeID)->getString() <<
'_' <<
Slot.ByteOffset;
1569 bool DevirtModule::shouldExportConstantsAsAbsoluteSymbols() {
1577 getGlobalName(Slot,
Args,
Name),
C, &M);
1584 if (shouldExportConstantsAsAbsoluteSymbols()) {
1597 M.getOrInsertGlobal(getGlobalName(Slot,
Args,
Name), Int8Arr0Ty);
1598 auto *GV = dyn_cast<GlobalVariable>(
C);
1607 if (!shouldExportConstantsAsAbsoluteSymbols())
1611 auto *GV = cast<GlobalVariable>(
C->stripPointerCasts());
1616 if (GV->hasMetadata(LLVMContext::MD_absolute_symbol))
1622 GV->setMetadata(LLVMContext::MD_absolute_symbol,
1627 SetAbsRange(~0ull, ~0ull);
1629 SetAbsRange(0, 1ull << AbsWidth);
1633 void DevirtModule::applyUniqueRetValOpt(CallSiteInfo &CSInfo,
StringRef FnName,
1636 for (
auto &&Call : CSInfo.CallSites) {
1642 B.CreateBitCast(UniqueMemberAddr,
Call.VTable->getType()));
1643 Cmp =
B.CreateZExt(Cmp,
Call.CB.getType());
1645 Call.replaceAndErase(
"unique-ret-val", FnName, RemarksEnabled, OREGetter,
1648 CSInfo.markDevirt();
1657 bool DevirtModule::tryUniqueRetValOpt(
1662 auto tryUniqueRetValOptFor = [&](
bool IsOne) {
1665 if (
Target.RetVal == (IsOne ? 1 : 0)) {
1668 UniqueMember =
Target.TM;
1676 Constant *UniqueMemberAddr = getMemberAddr(UniqueMember);
1677 if (CSInfo.isExported()) {
1681 exportGlobal(Slot,
Args,
"unique_member", UniqueMemberAddr);
1685 applyUniqueRetValOpt(CSInfo, TargetsForSlot[0].Fn->getName(), IsOne,
1690 for (
auto &&
Target : TargetsForSlot)
1697 if (tryUniqueRetValOptFor(
true))
1699 if (tryUniqueRetValOptFor(
false))
1705 void DevirtModule::applyVirtualConstProp(CallSiteInfo &CSInfo,
StringRef FnName,
1707 for (
auto Call : CSInfo.CallSites) {
1710 auto *RetType = cast<IntegerType>(
Call.CB.getType());
1713 B.CreateGEP(Int8Ty,
B.CreateBitCast(
Call.VTable, Int8PtrTy), Byte);
1714 if (RetType->getBitWidth() == 1) {
1718 NumVirtConstProp1Bit++;
1719 Call.replaceAndErase(
"virtual-const-prop-1-bit", FnName, RemarksEnabled,
1720 OREGetter, IsBitSet);
1722 Value *ValAddr =
B.CreateBitCast(
Addr, RetType->getPointerTo());
1723 Value *Val =
B.CreateLoad(RetType, ValAddr);
1725 Call.replaceAndErase(
"virtual-const-prop", FnName, RemarksEnabled,
1729 CSInfo.markDevirt();
1732 bool DevirtModule::tryVirtualConstProp(
1736 auto RetType = dyn_cast<IntegerType>(TargetsForSlot[0].Fn->getReturnType());
1739 unsigned BitWidth = RetType->getBitWidth();
1754 if (
Target.Fn->isDeclaration() ||
1757 Target.Fn->arg_empty() || !
Target.Fn->arg_begin()->use_empty() ||
1758 Target.Fn->getReturnType() != RetType)
1762 for (
auto &&CSByConstantArg : SlotInfo.ConstCSInfo) {
1763 if (!tryEvaluateFunctionsWithArgs(TargetsForSlot, CSByConstantArg.first))
1768 ResByArg = &Res->
ResByArg[CSByConstantArg.first];
1770 if (tryUniformRetValOpt(TargetsForSlot, CSByConstantArg.second, ResByArg))
1773 if (tryUniqueRetValOpt(
BitWidth, TargetsForSlot, CSByConstantArg.second,
1774 ResByArg, Slot, CSByConstantArg.first))
1786 uint64_t TotalPaddingBefore = 0, TotalPaddingAfter = 0;
1787 for (
auto &&
Target : TargetsForSlot) {
1788 TotalPaddingBefore += std::max<int64_t>(
1789 (AllocBefore + 7) / 8 -
Target.allocatedBeforeBytes() - 1, 0);
1790 TotalPaddingAfter += std::max<int64_t>(
1791 (AllocAfter + 7) / 8 -
Target.allocatedAfterBytes() - 1, 0);
1796 if (
std::min(TotalPaddingBefore, TotalPaddingAfter) > 128)
1803 if (TotalPaddingBefore <= TotalPaddingAfter)
1811 for (
auto &&
Target : TargetsForSlot)
1815 if (CSByConstantArg.second.isExported()) {
1817 exportConstant(Slot, CSByConstantArg.first,
"byte", OffsetByte,
1819 exportConstant(Slot, CSByConstantArg.first,
"bit", 1ULL << OffsetBit,
1826 applyVirtualConstProp(CSByConstantArg.second,
1827 TargetsForSlot[0].Fn->getName(), ByteConst, BitConst);
1833 if (
B.Before.Bytes.empty() &&
B.After.Bytes.empty())
1839 B.GV->getAlign(),
B.GV->getValueType());
1840 B.Before.Bytes.resize(
alignTo(
B.Before.Bytes.size(), Alignment));
1843 for (
size_t I = 0, Size =
B.Before.Bytes.size();
I != Size / 2; ++
I)
1850 B.GV->getInitializer(),
1855 NewGV->setSection(
B.GV->getSection());
1856 NewGV->setComdat(
B.GV->getComdat());
1857 NewGV->setAlignment(
B.GV->getAlign());
1861 NewGV->copyMetadata(
B.GV,
B.Before.Bytes.size());
1866 B.GV->getInitializer()->getType(), 0,
B.GV->getLinkage(),
"",
1868 NewInit->getType(), NewGV,
1870 ConstantInt::get(Int32Ty, 1)}),
1872 Alias->setVisibility(
B.GV->getVisibility());
1873 Alias->takeName(
B.GV);
1875 B.GV->replaceAllUsesWith(Alias);
1876 B.GV->eraseFromParent();
1879 bool DevirtModule::areRemarksEnabled() {
1880 const auto &FL =
M.getFunctionList();
1882 const auto &BBL = Fn.getBasicBlockList();
1886 return DI.isEnabled();
1891 void DevirtModule::scanTypeTestUsers(
1900 auto *CI = dyn_cast<CallInst>(U.getUser());
1907 auto &DT = LookupDomTree(*CI->getFunction());
1911 cast<MetadataAsValue>(CI->getArgOperand(1))->getMetadata();
1913 if (!Assumes.empty()) {
1916 CallSlots[{TypeId,
Call.Offset}].addCallSite(Ptr,
Call.CB,
nullptr);
1919 auto RemoveTypeTestAssumes = [&]() {
1921 for (
auto Assume : Assumes)
1922 Assume->eraseFromParent();
1925 if (CI->use_empty())
1926 CI->eraseFromParent();
1941 if (!TypeIdMap.count(TypeId))
1942 RemoveTypeTestAssumes();
1953 else if (ImportSummary && isa<MDString>(TypeId)) {
1957 RemoveTypeTestAssumes();
1966 void DevirtModule::scanTypeCheckedLoadUsers(
Function *TypeCheckedLoadFunc) {
1970 auto *CI = dyn_cast<CallInst>(U.getUser());
1974 Value *Ptr = CI->getArgOperand(0);
1976 Value *TypeIdValue = CI->getArgOperand(2);
1977 Metadata *TypeId = cast<MetadataAsValue>(TypeIdValue)->getMetadata();
1982 bool HasNonCallUses =
false;
1983 auto &DT = LookupDomTree(*CI->getFunction());
1985 HasNonCallUses, CI, DT);
1994 (LoadedPtrs.size() == 1 && !HasNonCallUses) ? LoadedPtrs[0] : CI);
1995 Value *
GEP = LoadB.CreateGEP(Int8Ty, Ptr, Offset);
1997 Value *LoadedValue = LoadB.CreateLoad(Int8PtrTy, GEPPtr);
2001 LoadedPtr->eraseFromParent();
2005 IRBuilder<> CallB((Preds.size() == 1 && !HasNonCallUses) ? Preds[0] : CI);
2006 CallInst *TypeTestCall = CallB.CreateCall(TypeTestFunc, {Ptr, TypeIdValue});
2010 Pred->eraseFromParent();
2017 if (!CI->use_empty()) {
2020 Pair =
B.CreateInsertValue(Pair, LoadedValue, {0});
2021 Pair =
B.CreateInsertValue(Pair, TypeTestCall, {1});
2022 CI->replaceAllUsesWith(Pair);
2026 auto &NumUnsafeUses = NumUnsafeUsesForTypeTest[TypeTestCall];
2027 NumUnsafeUses = DevirtCalls.size();
2035 CallSlots[{TypeId,
Call.Offset}].addCallSite(Ptr,
Call.CB,
2039 CI->eraseFromParent();
2043 void DevirtModule::importResolution(VTableSlot Slot, VTableSlotInfo &SlotInfo) {
2044 auto *TypeId = dyn_cast<MDString>(
Slot.TypeID);
2051 auto ResI = TidSummary->
WPDRes.find(
Slot.ByteOffset);
2052 if (ResI == TidSummary->
WPDRes.end())
2066 bool IsExported =
false;
2067 applySingleImplDevirt(SlotInfo, SingleImpl, IsExported);
2071 for (
auto &CSByConstantArg : SlotInfo.ConstCSInfo) {
2072 auto I = Res.
ResByArg.find(CSByConstantArg.first);
2075 auto &ResByArg =
I->second;
2082 applyUniformRetValOpt(CSByConstantArg.second,
"", ResByArg.
Info);
2086 importGlobal(Slot, CSByConstantArg.first,
"unique_member");
2087 applyUniqueRetValOpt(CSByConstantArg.second,
"", ResByArg.
Info,
2092 Constant *
Byte = importConstant(Slot, CSByConstantArg.first,
"byte",
2094 Constant *
Bit = importConstant(Slot, CSByConstantArg.first,
"bit", Int8Ty,
2096 applyVirtualConstProp(CSByConstantArg.second,
"", Byte,
Bit);
2108 M.getOrInsertFunction(getGlobalName(Slot, {},
"branch_funnel"),
2111 bool IsExported =
false;
2112 applyICallBranchFunnel(SlotInfo,
JT, IsExported);
2117 void DevirtModule::removeRedundantTypeTests() {
2119 for (
auto &&U : NumUnsafeUsesForTypeTest) {
2120 if (U.second == 0) {
2121 U.first->replaceAllUsesWith(True);
2122 U.first->eraseFromParent();
2128 DevirtModule::lookUpFunctionValueInfo(
Function *TheFn,
2130 assert((ExportSummary !=
nullptr) &&
2131 "Caller guarantees ExportSummary is not nullptr");
2133 const auto TheFnGUID = TheFn->
getGUID();
2145 if ((!TheFnVI) && (TheFnGUID != TheFnGUIDWithExportedName)) {
2146 TheFnVI = ExportSummary->
getValueInfo(TheFnGUIDWithExportedName);
2154 if (!
F->isDeclaration()) {
2157 return isa<UnreachableInst>(
F->getEntryBlock().getTerminator());
2160 return ExportSummary &&
2162 DevirtModule::lookUpFunctionValueInfo(
F, ExportSummary));
2183 if (!ExportSummary &&
2184 (!TypeTestFunc || TypeTestFunc->
use_empty() || !AssumeFunc ||
2186 (!TypeCheckedLoadFunc || TypeCheckedLoadFunc->
use_empty()))
2190 std::vector<VTableBits>
Bits;
2192 buildTypeIdentifierMap(
Bits, TypeIdMap);
2194 if (TypeTestFunc && AssumeFunc)
2195 scanTypeTestUsers(TypeTestFunc, TypeIdMap);
2197 if (TypeCheckedLoadFunc)
2198 scanTypeCheckedLoadUsers(TypeCheckedLoadFunc);
2200 if (ImportSummary) {
2201 for (
auto &
S : CallSlots)
2202 importResolution(
S.first,
S.second);
2204 removeRedundantTypeTests();
2210 GV.eraseMetadata(LLVMContext::MD_vcall_visibility);
2217 if (TypeIdMap.
empty())
2221 if (ExportSummary) {
2223 for (
auto &
P : TypeIdMap) {
2224 if (
auto *TypeId = dyn_cast<MDString>(
P.first))
2229 for (
auto &
P : *ExportSummary) {
2230 for (
auto &
S :
P.second.SummaryList) {
2231 auto *
FS = dyn_cast<FunctionSummary>(
S.get());
2236 for (
Metadata *MD : MetadataByGUID[VF.GUID]) {
2237 CallSlots[{MD, VF.Offset}].CSInfo.addSummaryTypeTestAssumeUser(
FS);
2241 for (
Metadata *MD : MetadataByGUID[VF.GUID]) {
2242 CallSlots[{MD, VF.Offset}].CSInfo.addSummaryTypeCheckedLoadUser(
FS);
2246 FS->type_test_assume_const_vcalls()) {
2247 for (
Metadata *MD : MetadataByGUID[
VC.VFunc.GUID]) {
2248 CallSlots[{MD,
VC.VFunc.Offset}]
2249 .ConstCSInfo[
VC.Args]
2250 .addSummaryTypeTestAssumeUser(
FS);
2254 FS->type_checked_load_const_vcalls()) {
2255 for (
Metadata *MD : MetadataByGUID[
VC.VFunc.GUID]) {
2256 CallSlots[{MD,
VC.VFunc.Offset}]
2257 .ConstCSInfo[
VC.Args]
2258 .addSummaryTypeCheckedLoadUser(
FS);
2266 bool DidVirtualConstProp =
false;
2267 std::map<std::string, Function*> DevirtTargets;
2268 for (
auto &
S : CallSlots) {
2272 std::vector<VirtualCallTarget> TargetsForSlot;
2274 const std::set<TypeMemberInfo> &TypeMemberInfos = TypeIdMap[
S.first.TypeID];
2275 if (ExportSummary && isa<MDString>(
S.first.TypeID) &&
2276 TypeMemberInfos.size())
2283 Res = &ExportSummary
2284 ->getOrInsertTypeIdSummary(
2285 cast<MDString>(
S.first.TypeID)->getString())
2286 .WPDRes[
S.first.ByteOffset];
2287 if (tryFindVirtualCallTargets(TargetsForSlot, TypeMemberInfos,
2288 S.first.ByteOffset, ExportSummary)) {
2290 if (!trySingleImplDevirt(ExportSummary, TargetsForSlot,
S.second, Res)) {
2291 DidVirtualConstProp |=
2292 tryVirtualConstProp(TargetsForSlot,
S.second, Res,
S.first);
2294 tryICallBranchFunnel(TargetsForSlot,
S.second, Res,
S.first);
2299 for (
const auto &T : TargetsForSlot)
2301 DevirtTargets[std::string(
T.Fn->getName())] =
T.Fn;
2308 if (ExportSummary && isa<MDString>(
S.first.TypeID)) {
2311 for (
auto FS :
S.second.CSInfo.SummaryTypeCheckedLoadUsers)
2312 FS->addTypeTest(GUID);
2313 for (
auto &CCS :
S.second.ConstCSInfo)
2314 for (
auto FS : CCS.second.SummaryTypeCheckedLoadUsers)
2315 FS->addTypeTest(GUID);
2319 if (RemarksEnabled) {
2321 for (
const auto &DT : DevirtTargets) {
2324 using namespace ore;
2327 <<
NV(
"FunctionName", DT.first));
2331 NumDevirtTargets += DevirtTargets.size();
2333 removeRedundantTypeTests();
2337 if (DidVirtualConstProp)
2345 GV.eraseMetadata(LLVMContext::MD_vcall_visibility);
2351 if (ExportSummary.typeIdCompatibleVtableMap().empty())
2355 for (
auto &
P : ExportSummary.typeIdCompatibleVtableMap()) {
2360 for (
auto &
P : ExportSummary) {
2361 for (
auto &
S :
P.second.SummaryList) {
2362 auto *
FS = dyn_cast<FunctionSummary>(
S.get());
2368 CallSlots[{
Name, VF.Offset}].CSInfo.addSummaryTypeTestAssumeUser(
FS);
2373 CallSlots[{
Name, VF.Offset}].CSInfo.addSummaryTypeCheckedLoadUser(
FS);
2377 FS->type_test_assume_const_vcalls()) {
2379 CallSlots[{
Name,
VC.VFunc.Offset}]
2380 .ConstCSInfo[
VC.Args]
2381 .addSummaryTypeTestAssumeUser(
FS);
2385 FS->type_checked_load_const_vcalls()) {
2387 CallSlots[{
Name,
VC.VFunc.Offset}]
2388 .ConstCSInfo[
VC.Args]
2389 .addSummaryTypeCheckedLoadUser(
FS);
2395 std::set<ValueInfo> DevirtTargets;
2397 for (
auto &
S : CallSlots) {
2401 std::vector<ValueInfo> TargetsForSlot;
2402 auto TidSummary = ExportSummary.getTypeIdCompatibleVtableSummary(
S.first.TypeID);
2408 &ExportSummary.getOrInsertTypeIdSummary(
S.first.TypeID)
2409 .WPDRes[
S.first.ByteOffset];
2410 if (tryFindVirtualCallTargets(TargetsForSlot, *TidSummary,
2411 S.first.ByteOffset)) {
2413 if (!trySingleImplDevirt(TargetsForSlot,
S.first,
S.second, Res,
2422 for (
const auto &DT : DevirtTargets)
2423 errs() <<
"Devirtualized call to " << DT <<
"\n";
2425 NumDevirtTargets += DevirtTargets.size();