52 cl::desc(
"Convert noalias attributes to metadata during inlining."));
57 cl::desc(
"Convert align attributes to assumptions during inlining."));
60 AAResults *CalleeAAR,
bool InsertLifetime) {
64 AAResults *CalleeAAR,
bool InsertLifetime) {
70 class LandingPadInliningInfo {
79 : OuterResumeDest(II->getUnwindDest()), InnerResumeDest(nullptr),
80 CallerLPad(nullptr), InnerEHValuesPHI(nullptr) {
86 for (; isa<PHINode>(
I); ++
I) {
92 CallerLPad = cast<LandingPadInst>(
I);
98 return OuterResumeDest;
115 void addIncomingPHIValuesFor(
BasicBlock *BB)
const {
116 addIncomingPHIValuesForInto(BB, OuterResumeDest);
121 for (
unsigned i = 0, e = UnwindDestPHIValues.size();
i != e; ++
i, ++
I) {
130 BasicBlock *LandingPadInliningInfo::getInnerResumeDest() {
131 if (InnerResumeDest)
return InnerResumeDest;
136 OuterResumeDest->splitBasicBlock(SplitPoint,
137 OuterResumeDest->getName() +
".body");
140 const unsigned PHICapacity = 2;
143 Instruction *InsertPoint = &InnerResumeDest->front();
145 for (
unsigned i = 0, e = UnwindDestPHIValues.size();
i != e; ++
i, ++
I) {
146 PHINode *OuterPHI = cast<PHINode>(
I);
148 OuterPHI->
getName() +
".lpad-body",
151 InnerPHI->addIncoming(OuterPHI, OuterResumeDest);
156 "eh.lpad-body", InsertPoint);
158 InnerEHValuesPHI->addIncoming(CallerLPad, OuterResumeDest);
161 return InnerResumeDest;
168 void LandingPadInliningInfo::forwardResume(
177 addIncomingPHIValuesForInto(Src, Dest);
179 InnerEHValuesPHI->addIncoming(RI->
getOperand(0), Src);
185 if (
auto *FPI = dyn_cast<FuncletPadInst>(EHPad))
186 return FPI->getParentPad();
198 while (!Worklist.
empty()) {
205 Value *UnwindDestToken =
nullptr;
206 if (
auto *CatchSwitch = dyn_cast<CatchSwitchInst>(CurrentPad)) {
207 if (CatchSwitch->hasUnwindDest()) {
208 UnwindDestToken = CatchSwitch->getUnwindDest()->getFirstNonPHI();
216 for (
auto HI = CatchSwitch->handler_begin(),
217 HE = CatchSwitch->handler_end();
218 HI != HE && !UnwindDestToken; ++
HI) {
220 auto *CatchPad = cast<CatchPadInst>(HandlerBlock->
getFirstNonPHI());
226 if (!isa<CleanupPadInst>(Child) && !isa<CatchSwitchInst>(Child))
230 auto Memo = MemoMap.
find(ChildPad);
231 if (Memo == MemoMap.
end()) {
238 Value *ChildUnwindDestToken = Memo->second;
239 if (!ChildUnwindDestToken)
245 if (isa<ConstantTokenNone>(ChildUnwindDestToken)) {
246 UnwindDestToken = ChildUnwindDestToken;
254 auto *CleanupPad = cast<CleanupPadInst>(CurrentPad);
256 if (
auto *CleanupRet = dyn_cast<CleanupReturnInst>(U)) {
257 if (
BasicBlock *RetUnwindDest = CleanupRet->getUnwindDest())
258 UnwindDestToken = RetUnwindDest->getFirstNonPHI();
263 Value *ChildUnwindDestToken;
264 if (
auto *Invoke = dyn_cast<InvokeInst>(U)) {
265 ChildUnwindDestToken = Invoke->getUnwindDest()->getFirstNonPHI();
266 }
else if (isa<CleanupPadInst>(U) || isa<CatchSwitchInst>(U)) {
268 auto Memo = MemoMap.
find(ChildPad);
269 if (Memo == MemoMap.
end()) {
276 ChildUnwindDestToken = Memo->second;
277 if (!ChildUnwindDestToken)
286 if (isa<Instruction>(ChildUnwindDestToken) &&
289 UnwindDestToken = ChildUnwindDestToken;
295 if (!UnwindDestToken)
303 if (
auto *UnwindPad = dyn_cast<Instruction>(UnwindDestToken))
306 UnwindParent =
nullptr;
307 bool ExitedOriginalPad =
false;
309 ExitedPad && ExitedPad != UnwindParent;
312 if (isa<CatchPadInst>(ExitedPad))
314 MemoMap[ExitedPad] = UnwindDestToken;
315 ExitedOriginalPad |= (ExitedPad == EHPad);
318 if (ExitedOriginalPad)
319 return UnwindDestToken;
350 if (
auto *CPI = dyn_cast<CatchPadInst>(EHPad))
351 EHPad = CPI->getCatchSwitch();
354 auto Memo = MemoMap.
find(EHPad);
355 if (Memo != MemoMap.
end())
360 assert((UnwindDestToken ==
nullptr) != (MemoMap.
count(EHPad) != 0));
362 return UnwindDestToken;
369 MemoMap[EHPad] =
nullptr;
375 Value *AncestorToken;
380 if (isa<CatchPadInst>(AncestorPad))
389 assert(!MemoMap.
count(AncestorPad) || MemoMap[AncestorPad]);
390 auto AncestorMemo = MemoMap.
find(AncestorPad);
391 if (AncestorMemo == MemoMap.
end()) {
394 UnwindDestToken = AncestorMemo->second;
398 LastUselessPad = AncestorPad;
399 MemoMap[LastUselessPad] =
nullptr;
401 TempMemos.
insert(LastUselessPad);
419 while (!Worklist.
empty()) {
421 auto Memo = MemoMap.
find(UselessPad);
422 if (Memo != MemoMap.
end() && Memo->second) {
450 MemoMap[UselessPad] = UnwindDestToken;
451 if (
auto *CatchSwitch = dyn_cast<CatchSwitchInst>(UselessPad)) {
452 assert(CatchSwitch->getUnwindDest() ==
nullptr &&
"Expected useless pad");
453 for (
BasicBlock *HandlerBlock : CatchSwitch->handlers()) {
454 auto *CatchPad = HandlerBlock->getFirstNonPHI();
457 (!isa<InvokeInst>(U) ||
459 cast<InvokeInst>(U)->getUnwindDest()->getFirstNonPHI()) ==
461 "Expected useless pad");
462 if (isa<CatchSwitchInst>(U) || isa<CleanupPadInst>(U))
463 Worklist.
push_back(cast<Instruction>(U));
467 assert(isa<CleanupPadInst>(UselessPad));
469 assert(!isa<CleanupReturnInst>(U) &&
"Expected useless pad");
470 assert((!isa<InvokeInst>(U) ||
472 cast<InvokeInst>(U)->getUnwindDest()->getFirstNonPHI()) ==
474 "Expected useless pad");
475 if (isa<CatchSwitchInst>(U) || isa<CleanupPadInst>(U))
476 Worklist.
push_back(cast<Instruction>(U));
481 return UnwindDestToken;
509 if (
F->getIntrinsicID() == Intrinsic::experimental_deoptimize ||
510 F->getIntrinsicID() == Intrinsic::experimental_guard)
521 auto *FuncletPad = cast<Instruction>(FuncletBundle->Inputs[0]);
522 Value *UnwindDestToken =
524 if (UnwindDestToken && !isa<ConstantTokenNone>(UnwindDestToken))
528 if (
auto *CatchPad = dyn_cast<CatchPadInst>(FuncletPad))
529 MemoKey = CatchPad->getCatchSwitch();
531 MemoKey = FuncletPad;
532 assert(FuncletUnwindMap->count(MemoKey) &&
533 (*FuncletUnwindMap)[MemoKey] == UnwindDestToken &&
534 "must get memoized to avoid confusing later searches");
559 LandingPadInliningInfo Invoke(II);
565 if (
InvokeInst *II = dyn_cast<InvokeInst>(I->getTerminator()))
573 InlinedLPad->reserveClauses(OuterNum);
574 for (
unsigned OuterIdx = 0; OuterIdx != OuterNum; ++OuterIdx)
575 InlinedLPad->addClause(OuterLPad->
getClause(OuterIdx));
577 InlinedLPad->setCleanup(
true);
584 &*BB, Invoke.getOuterResumeDest()))
587 Invoke.addIncomingPHIValuesFor(NewBB);
590 if (
ResumeInst *RI = dyn_cast<ResumeInst>(BB->getTerminator()))
591 Invoke.forwardResume(RI, InlinedLPads);
631 for (
Value *V : UnwindDestPHIValues) {
643 if (
auto *CRI = dyn_cast<CleanupReturnInst>(BB->getTerminator())) {
644 if (CRI->unwindsToCaller()) {
645 auto *CleanupPad = CRI->getCleanupPad();
647 CRI->eraseFromParent();
654 isa<ConstantTokenNone>(FuncletUnwindMap[CleanupPad]));
655 FuncletUnwindMap[CleanupPad] =
665 if (
auto *CatchSwitch = dyn_cast<CatchSwitchInst>(I)) {
666 if (CatchSwitch->unwindsToCaller()) {
667 Value *UnwindDestToken;
668 if (
auto *ParentPad =
669 dyn_cast<Instruction>(CatchSwitch->getParentPad())) {
679 if (UnwindDestToken && !isa<ConstantTokenNone>(UnwindDestToken))
692 CatchSwitch->getParentPad(), UnwindDest,
693 CatchSwitch->getNumHandlers(), CatchSwitch->getName(),
695 for (
BasicBlock *PadBB : CatchSwitch->handlers())
701 FuncletUnwindMap[NewCatchSwitch] = UnwindDestToken;
702 Replacement = NewCatchSwitch;
704 }
else if (!isa<FuncletPadInst>(I)) {
721 &*BB, UnwindDest, &FuncletUnwindMap))
730 UnwindDest->removePredecessor(InvokeBB);
744 VMI != VMIE; ++VMI) {
791 while (!
Queue.empty()) {
792 const MDNode *M = cast<MDNode>(
Queue.pop_back_val());
803 for (
const MDNode *I : MD) {
805 MDMap[
I].reset(DummyNodes.
back().get());
811 for (
const MDNode *I : MD) {
813 for (
unsigned i = 0, ie = I->getNumOperands();
i != ie; ++
i) {
815 if (
const MDNode *M = dyn_cast<MDNode>(V))
818 NewOps.
push_back(const_cast<Metadata *>(V));
822 MDTuple *TempM = cast<MDTuple>(MDMap[
I]);
823 assert(TempM->isTemporary() &&
"Expected temporary node");
825 TempM->replaceAllUsesWith(NewM);
831 VMI != VMIE; ++VMI) {
883 if (Arg.hasNoAliasAttr() && !Arg.use_empty())
886 if (NoAliasArgs.
empty())
892 DT.
recalculate(const_cast<Function&>(*CalledFunc));
905 MDB.createAnonymousAliasScopeDomain(CalledFunc->
getName());
906 for (
unsigned i = 0, e = NoAliasArgs.
size();
i != e; ++
i) {
914 Name +=
": argument ";
921 MDNode *NewScope = MDB.createAnonymousAliasScope(NewDomain, Name);
922 NewScopes.
insert(std::make_pair(A, NewScope));
928 VMI != VMIE; ++VMI) {
929 if (
const Instruction *I = dyn_cast<Instruction>(VMI->first)) {
937 bool IsArgMemOnlyCall =
false, IsFuncCall =
false;
940 if (
const LoadInst *LI = dyn_cast<LoadInst>(I))
941 PtrArgs.
push_back(LI->getPointerOperand());
942 else if (
const StoreInst *
SI = dyn_cast<StoreInst>(I))
944 else if (
const VAArgInst *VAAI = dyn_cast<VAArgInst>(I))
945 PtrArgs.
push_back(VAAI->getPointerOperand());
947 PtrArgs.
push_back(CXI->getPointerOperand());
948 else if (
const AtomicRMWInst *RMWI = dyn_cast<AtomicRMWInst>(I))
949 PtrArgs.
push_back(RMWI->getPointerOperand());
954 if (ICS.doesNotAccessMemory())
962 IsArgMemOnlyCall =
true;
965 for (
Value *Arg : ICS.args()) {
971 if (IsArgMemOnlyCall && !Arg->getType()->isPointerTy())
982 if (PtrArgs.
empty() && !IsFuncCall)
992 for (
const Value *V : PtrArgs) {
995 Objects, DL,
nullptr);
997 for (
Value *O : Objects)
1003 bool CanDeriveViaCapture =
false, UsesAliasingPtr =
false;
1004 for (
const Value *V : ObjSet) {
1008 bool IsNonPtrConst = isa<ConstantInt>(V) || isa<ConstantFP>(V) ||
1009 isa<ConstantPointerNull>(V) ||
1010 isa<ConstantDataVector>(V) || isa<UndefValue>(V);
1017 if (
const Argument *
A = dyn_cast<Argument>(V)) {
1018 if (!
A->hasNoAliasAttr())
1019 UsesAliasingPtr =
true;
1021 UsesAliasingPtr =
true;
1028 if (!isa<Argument>(V) &&
1030 CanDeriveViaCapture =
true;
1035 if (IsFuncCall && !IsArgMemOnlyCall)
1036 CanDeriveViaCapture =
true;
1047 if (!ObjSet.count(
A) && (!CanDeriveViaCapture ||
1060 if (!NoAliases.
empty())
1076 bool CanAddScopes = !UsesAliasingPtr;
1077 if (CanAddScopes && IsFuncCall)
1078 CanAddScopes = IsArgMemOnlyCall;
1082 if (ObjSet.count(
A))
1086 if (!Scopes.
empty())
1107 bool DTCalculated =
false;
1113 unsigned Align = I->getType()->isPointerTy() ? I->getParamAlignment() : 0;
1114 if (Align && !I->hasByValOrInAllocaAttr() && !I->hasNUses(0)) {
1115 if (!DTCalculated) {
1118 DTCalculated =
true;
1128 .CreateAlignmentAssumption(DL, Arg, Align);
1154 if (CalleeNode == CallerNode) {
1155 CallCache.assign(I,
E);
1156 I = CallCache.begin();
1157 E = CallCache.end();
1160 for (; I !=
E; ++
I) {
1161 const Value *OrigCall = I->first;
1165 if (VMI == VMap.
end() || VMI->
second ==
nullptr)
1189 if (!I->second->getFunction())
1208 Type *AggTy = cast<PointerType>(Src->
getType())->getElementType();
1216 Builder.CreateMemCpy(Dst, Src, Size, 1);
1224 unsigned ByValAlignment) {
1237 if (ByValAlignment <= 1)
1261 Align = std::max(Align, ByValAlignment);
1264 &*Caller->
begin()->begin());
1276 switch (II->getIntrinsicID()) {
1278 case Intrinsic::lifetime_start:
1279 case Intrinsic::lifetime_end:
1293 if (Ty == Int8PtrTy)
1298 if (U->getType() != Int8PtrTy)
continue;
1299 if (U->stripPointerCasts() != AI)
continue;
1317 while (
DILocation *IA = CurInlinedAt->getInlinedAt()) {
1333 Ctx, MD->getLine(), MD->getColumn(), MD->getScope(), Last);
1362 Ctx, InlinedAtNode->getLine(), InlinedAtNode->getColumn(),
1363 InlinedAtNode->getScope(), InlinedAtNode->getInlinedAt());
1370 for (; FI != Fn->
end(); ++FI) {
1373 if (
DebugLoc DL = BI->getDebugLoc()) {
1379 if (CalleeHasDebugInfo)
1388 if (
auto *AI = dyn_cast<AllocaInst>(BI))
1392 BI->setDebugLoc(TheCallDL);
1406 AAResults *CalleeAAR,
bool InsertLifetime) {
1409 "Instruction not in function!");
1446 if (CalledFunc->
hasGC()) {
1447 if (!Caller->
hasGC())
1449 else if (CalledFunc->
getGC() != Caller->
getGC())
1466 if (CalledPersonality) {
1467 if (!CallerPersonality)
1473 else if (CalledPersonality != CallerPersonality)
1480 if (CallerPersonality) {
1486 CallSiteEHPad = cast<FuncletPadInst>(ParentFunclet->
Inputs.front());
1490 if (CallSiteEHPad) {
1494 if (isa<CleanupPadInst>(CallSiteEHPad)) {
1497 for (
const BasicBlock &CalledBB : *CalledFunc) {
1498 if (isa<CatchSwitchInst>(CalledBB.getFirstNonPHI()))
1505 for (
const BasicBlock &CalledBB : *CalledFunc) {
1506 if (CalledBB.isEHPad())
1516 bool EHPadForCallUnwindsLocally =
false;
1517 if (CallSiteEHPad && CS.
isCall()) {
1519 Value *CallSiteUnwindDestToken =
1522 EHPadForCallUnwindsLocally =
1523 CallSiteUnwindDestToken &&
1524 !isa<ConstantTokenNone>(CallSiteUnwindDestToken);
1545 "No varargs calls can be inlined!");
1552 E = CalledFunc->
arg_end(); I !=
E; ++
I, ++AI, ++ArgNo) {
1553 Value *ActualArg = *AI;
1562 if (ActualArg != *AI)
1566 VMap[&*
I] = ActualArg;
1579 false, Returns,
".i",
1580 &InlinedFunctionInfo, TheCall);
1583 FirstNewBlock = LastBlock; ++FirstNewBlock;
1586 for (std::pair<Value*, Value*> &
Init : ByValInit)
1588 &*FirstNewBlock, IFI);
1596 Instruction *I = dyn_cast_or_null<Instruction>(VH);
1616 std::vector<Value *> MergedDeoptArgs;
1617 MergedDeoptArgs.reserve(ParentDeopt->Inputs.size() +
1618 ChildOB.Inputs.size());
1620 MergedDeoptArgs.insert(MergedDeoptArgs.end(),
1621 ParentDeopt->Inputs.begin(),
1622 ParentDeopt->Inputs.end());
1623 MergedDeoptArgs.insert(MergedDeoptArgs.end(), ChildOB.Inputs.begin(),
1624 ChildOB.Inputs.end());
1626 OpDefs.
emplace_back(
"deopt", std::move(MergedDeoptArgs));
1630 if (isa<CallInst>(I))
1668 if (
auto *II = dyn_cast<IntrinsicInst>(&I))
1669 if (II->getIntrinsicID() == Intrinsic::assume)
1681 E = FirstNewBlock->end(); I !=
E; ) {
1700 while (isa<AllocaInst>(I) &&
1710 InsertPoint, FirstNewBlock->getInstList(), AI->
getIterator(),
I);
1718 bool InlinedMustTailCalls =
false, InlinedDeoptimizeCalls =
false;
1721 if (
CallInst *CI = dyn_cast<CallInst>(TheCall))
1722 CallSiteTailKind = CI->getTailCallKind();
1732 InlinedDeoptimizeCalls |=
1733 F->getIntrinsicID() == Intrinsic::experimental_deoptimize;
1749 ChildTCK =
std::min(CallSiteTailKind, ChildTCK);
1765 for (
unsigned ai = 0, ae = IFI.
StaticAllocas.size(); ai != ae; ++ai) {
1782 uint64_t AllocaTypeSize = DL.getTypeAllocSize(AllocaType);
1783 uint64_t AllocaArraySize = AIArraySize->getLimitedValue();
1786 if (AllocaArraySize == 0)
1791 if (AllocaArraySize != ~0ULL &&
1792 UINT64_MAX / AllocaArraySize >= AllocaTypeSize) {
1794 AllocaArraySize * AllocaTypeSize);
1798 builder.CreateLifetimeStart(AI, AllocaSize);
1802 if (InlinedMustTailCalls &&
1805 if (InlinedDeoptimizeCalls &&
1808 IRBuilder<>(RI).CreateLifetimeEnd(AI, AllocaSize);
1823 .CreateCall(StackSave, {},
"savedstack");
1834 IRBuilder<>(RI).CreateCall(StackRestore, SavedPtr);
1842 if (
auto *II = dyn_cast<InvokeInst>(TheCall)) {
1845 if (isa<LandingPadInst>(FirstNonPHI)) {
1856 if (CallSiteEHPad) {
1871 if (CalledFn && CalledFn->isIntrinsic() && CS.
doesNotThrow())
1897 if (
auto *CleanupRet = dyn_cast<CleanupReturnInst>(BB->getTerminator()))
1898 if (CleanupRet->unwindsToCaller() && EHPadForCallUnwindsLocally)
1905 if (
auto *CatchSwitch = dyn_cast<CatchSwitchInst>(I)) {
1906 if (isa<ConstantTokenNone>(CatchSwitch->getParentPad()))
1907 CatchSwitch->setParentPad(CallSiteEHPad);
1909 auto *FPI = cast<FuncletPadInst>(
I);
1910 if (isa<ConstantTokenNone>(FPI->getParentPad()))
1911 FPI->setParentPad(CallSiteEHPad);
1916 if (InlinedDeoptimizeCalls) {
1926 Returns.
erase(NewEnd, Returns.
end());
1930 Caller->
getParent(), Intrinsic::experimental_deoptimize,
1957 "Expected at least the deopt operand bundle");
1961 Builder.CreateCall(NewDeoptIntrinsic, CallArgs, OpBundles);
1964 Builder.CreateRetVoid();
1966 Builder.CreateRet(NewDeoptCall);
1978 if (InlinedMustTailCalls) {
1981 bool NeedBitCast = !TheCall->
use_empty() && TheCall->
getType() != NewRetTy;
1988 if (!ReturnedMustTail) {
1997 auto *OldCast = dyn_cast_or_null<BitCastInst>(RI->getReturnValue());
2000 OldCast->eraseFromParent();
2028 if (Returns.
size() == 1 && std::distance(FirstNewBlock, Caller->
end()) == 1) {
2031 FirstNewBlock->getInstList(),
2032 FirstNewBlock->begin(), FirstNewBlock->end());
2038 if (
InvokeInst *II = dyn_cast<InvokeInst>(TheCall)) {
2056 Returns[0]->eraseFromParent();
2069 BranchInst *CreatedBranchToNormalDest =
nullptr;
2070 if (
InvokeInst *II = dyn_cast<InvokeInst>(TheCall)) {
2080 CalledFunc->
getName() +
".exit");
2087 CalledFunc->
getName() +
".exit");
2095 "splitBasicBlock broken!");
2110 if (Returns.
size() > 1) {
2115 &AfterCallBB->
front());
2124 for (
unsigned i = 0, e = Returns.
size();
i != e; ++
i) {
2127 "Ret value not consistent in function!");
2134 for (
unsigned i = 0, e = Returns.
size();
i != e; ++
i) {
2145 if (CreatedBranchToNormalDest)
2147 }
else if (!Returns.
empty()) {
2151 if (TheCall == Returns[0]->getReturnValue())
2158 BasicBlock *ReturnBB = Returns[0]->getParent();
2166 if (CreatedBranchToNormalDest)
2167 CreatedBranchToNormalDest->
setDebugLoc(Returns[0]->getDebugLoc());
2170 Returns[0]->eraseFromParent();
2188 assert(cast<BranchInst>(Br)->isUnconditional() &&
"splitBasicBlock broken!");
2189 BasicBlock *CalleeEntry = cast<BranchInst>(Br)->getSuccessor(0);
Return a value (possibly void), from a function.
const Value * getCalledValue() const
Get a pointer to the function that is invoked by this instruction.
SymbolTableList< Instruction >::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
void push_back(const T &Elt)
A parsed version of the target data layout string in and methods for querying it. ...
bool replaceDbgDeclareForAlloca(AllocaInst *AI, Value *NewAllocaAddress, DIBuilder &Builder, bool Deref, int Offset=0)
Replaces llvm.dbg.declare instruction when the alloca it describes is replaced with a new value...
FunTy * getCaller() const
getCaller - Return the caller function for this call site
void removePredecessor(BasicBlock *Pred, bool DontDeleteUselessPHIs=false)
Notify the BasicBlock that the predecessor Pred is no longer able to reach it.
void addIncoming(Value *V, BasicBlock *BB)
Add an incoming value to the end of the PHI list.
static cl::opt< bool > EnableNoAliasConversion("enable-noalias-to-md-conversion", cl::init(true), cl::Hidden, cl::desc("Convert noalias attributes to metadata during inlining."))
unsigned getOrEnforceKnownAlignment(Value *V, unsigned PrefAlign, const DataLayout &DL, const Instruction *CxtI=nullptr, AssumptionCache *AC=nullptr, const DominatorTree *DT=nullptr)
Try to ensure that the alignment of V is at least PrefAlign bytes.
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function. ...
void getOperandBundlesAsDefs(SmallVectorImpl< OperandBundleDef > &Defs) const
unsigned getNumOperandBundles() const
LLVM Argument representation.
iterator erase(iterator where)
MDNode * getScope() const
CallGraph * CG
CG - If non-null, InlineFunction will update the callgraph to reflect the changes it makes...
bool onlyReadsMemory() const
Determine if the function does not access or only reads memory.
A Module instance is used to store all the information related to an LLVM module. ...
Constant * getClause(unsigned Idx) const
Get the value of the clause at index Idx.
static MDTuple * getDistinct(LLVMContext &Context, ArrayRef< Metadata * > MDs)
auto remove_if(R &&Range, UnaryPredicate P) -> decltype(std::begin(Range))
Provide wrappers to std::remove_if which take ranges instead of having to pass begin/end explicitly...
an instruction that atomically checks whether a specified value is in a memory location, and, if it is, stores a new value there.
CallInst * getTerminatingMustTailCall()
Returns the call instruction marked 'musttail' prior to the terminating return instruction of this ba...
std::function< AssumptionCache &(Function &)> * GetAssumptionCache
unsigned getNumOperands() const
Return number of MDNode operands.
unsigned getPrefTypeAlignment(Type *Ty) const
Returns the preferred stack/global alignment for the specified type.
This class represents a function call, abstracting a target machine's calling convention.
bool isIntrinsic() const
isIntrinsic - Returns true if the function's name starts with "llvm.".
void setGC(std::string Str)
size_type count(PtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
A cache of .assume calls within a function.
static void AddAlignmentAssumptions(CallSite CS, InlineFunctionInfo &IFI)
If the inlined function has non-byval align arguments, then add .assume-based alignment assumptions t...
Type * getReturnType() const
Returns the type of the ret val.
const Function * getParent() const
Return the enclosing method, or null if none.
const Instruction & front() const
std::vector< WeakVH > OperandBundleCallSites
All cloned call sites that have operand bundles attached are appended to this vector.
An instruction for reading from memory.
static void AddAliasScopeMetadata(CallSite CS, ValueToValueMapTy &VMap, const DataLayout &DL, AAResults *CalleeAAR)
If the inlined function has noalias arguments, then add new alias scopes for each noalias argument...
static IntegerType * getInt64Ty(LLVMContext &C)
an instruction that atomically reads a memory location, combines it with another value, and then stores the result back.
void GetUnderlyingObjects(Value *V, SmallVectorImpl< Value * > &Objects, const DataLayout &DL, LoopInfo *LI=nullptr, unsigned MaxLookup=6)
This method is similar to GetUnderlyingObject except that it can look through phi and select instruct...
Type * getElementType() const
void reserve(size_type N)
unsigned changeToUnreachable(Instruction *I, bool UseLLVMTrap, bool PreserveLCSSA=false)
Insert an unreachable instruction before the specified instruction, making it and the rest of the cod...
InlineFunctionInfo - This class captures the data input to the InlineFunction call, and records the auxiliary results produced by it.
iterator end()
Get an iterator to the end of the SetVector.
A node in the call graph for a module.
CallingConv::ID getCallingConv() const
getCallingConv()/setCallingConv(CC) - These method get and set the calling convention of this functio...
unsigned arg_size() const
StringRef getName() const
Return a constant reference to the value's name.
static void fixupLineNumbers(Function *Fn, Function::iterator FI, Instruction *TheCall, bool CalleeHasDebugInfo)
Update inlined instructions' line numbers to to encode location where these instructions are inlined...
A templated base class for SmallPtrSet which provides the typesafe interface that is common across al...
iterator begin()
Instruction iterator methods.
void setCallingConv(CallingConv::ID CC)
bool InlineFunction(CallInst *C, InlineFunctionInfo &IFI, AAResults *CalleeAAR=nullptr, bool InsertLifetime=true)
InlineFunction - This function inlines the called function into the basic block of the caller...
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
void addCalledFunction(CallSite CS, CallGraphNode *M)
Adds a function to the list of functions called by this one.
std::vector< CallRecord >::iterator iterator
static DebugLoc updateInlinedAtInfo(const DebugLoc &DL, DILocation *InlinedAtNode, LLVMContext &Ctx, DenseMap< const DILocation *, DILocation * > &IANodes)
Rebuild the entire inlined-at chain for this instruction so that the top of the chain now is inlined-...
void CloneAndPruneFunctionInto(Function *NewFunc, const Function *OldFunc, ValueToValueMapTy &VMap, bool ModuleLevelChanges, SmallVectorImpl< ReturnInst * > &Returns, const char *NameSuffix="", ClonedCodeInfo *CodeInfo=nullptr, Instruction *TheCall=nullptr)
CloneAndPruneFunctionInto - This works exactly like CloneFunctionInto, except that it does some simpl...
bool hasGC() const
hasGC/getGC/setGC/clearGC - The name of the garbage collection algorithm to use during code generatio...
Value * getReturnValue() const
Convenience accessor. Returns null if there is no return value.
DILocation * get() const
Get the underlying DILocation.
static void HandleInlinedLandingPad(InvokeInst *II, BasicBlock *FirstNewBlock, ClonedCodeInfo &InlinedCodeInfo)
If we inlined an invoke site, we need to convert calls in the body of the inlined function into invok...
A Use represents the edge between a Value definition and its users.
ValTy * getCalledValue() const
getCalledValue - Return the pointer to function that is being called.
Instruction * getFirstNonPHI()
Returns a pointer to the first instruction in this block that is not a PHINode instruction.
ReturnInst * CreateRet(Value *V)
Create a 'ret <val>' instruction.
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
bool isCall() const
isCall - true if a CallInst is enclosed.
Constant * getPersonalityFn() const
Get the personality function associated with this function.
This file contains the simple types necessary to represent the attributes associated with functions a...
static void CloneAliasScopeMetadata(CallSite CS, ValueToValueMapTy &VMap)
When inlining a function that contains noalias scope metadata, this metadata needs to be cloned so th...
bool isMustTailCall() const
The only memory references in this function (if it has any) are non-volatile loads from objects point...
LandingPadInst * getLandingPadInst() const
Get the landingpad instruction from the landing pad block (the unwind destination).
LLVM_NODISCARD bool empty() const
bool doesNotThrow() const
Determine if the call cannot unwind.
void addHandler(BasicBlock *Dest)
Add an entry to the switch instruction...
auto reverse(ContainerTy &&C, typename std::enable_if< has_rbegin< ContainerTy >::value >::type *=nullptr) -> decltype(make_range(C.rbegin(), C.rend()))
This file provides interfaces used to build and manipulate a call graph, which is a very useful tool ...
static void UpdateCallGraphAfterInlining(CallSite CS, Function::iterator FirstNewBlock, ValueToValueMapTy &VMap, InlineFunctionInfo &IFI)
Once we have cloned code over from a callee into the caller, update the specified callgraph to reflec...
Value * CreateBitCast(Value *V, Type *DestTy, const Twine &Name="")
bool insert(const value_type &X)
Insert a new element into the SetVector.
LLVMContext & getContext() const
Return the LLVMContext in which this type was uniqued.
op_iterator arg_begin()
Return the iterator pointing to the beginning of the argument list.
FunctionModRefBehavior
Summary of how a function affects memory in the program.
iterator find(const KeyT &Val)
iterator begin()
Get an iterator to the beginning of the SetVector.
bool empty() const
Determine if the SetVector is empty or not.
static std::string utostr(uint64_t X, bool isNeg=false)
Function * getDeclaration(Module *M, ID id, ArrayRef< Type * > Tys=None)
Create or insert an LLVM Function declaration for an intrinsic, and return it.
unsigned getNumClauses() const
Get the number of clauses for this landing pad.
An instruction for storing to memory.
void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
static TempMDTuple getTemporary(LLVMContext &Context, ArrayRef< Metadata * > MDs)
Return a temporary node.
void takeName(Value *V)
Transfer the name from V to this value.
Concrete subclass of DominatorTreeBase that is used to compute a normal dominator tree...
static unsigned getKnownAlignment(Value *V, const DataLayout &DL, const Instruction *CxtI=nullptr, AssumptionCache *AC=nullptr, const DominatorTree *DT=nullptr)
Try to infer an alignment for the specified pointer.
BasicBlock * getNormalDest() const
Maximum length of the test input libFuzzer tries to guess a good value based on the corpus and reports it always prefer smaller inputs during the corpus shuffle When libFuzzer itself reports a bug this exit code will be used If indicates the maximal total time in seconds to run the fuzzer minimizes the provided crash input Use with etc Experimental Use value profile to guide fuzzing Number of simultaneous worker processes to run the jobs If min(jobs, NumberOfCpuCores()/2)\" is used.") FUZZER_FLAG_INT(reload
Class to represent pointers.
bool mayReadOrWriteMemory() const
Return true if this instruction may read or write memory.
static GCRegistry::Add< CoreCLRGC > E("coreclr","CoreCLR-compatible GC")
static BasicBlock * HandleCallsInBlockInlinedThroughInvoke(BasicBlock *BB, BasicBlock *UnwindEdge, UnwindDestMemoTy *FuncletUnwindMap=nullptr)
When we inline a basic block into an invoke, we have to turn all of the calls that can throw into inv...
bool hasOperandBundles() const
static bool isUsedByLifetimeMarker(Value *V)
void getOperandBundlesAsDefs(SmallVectorImpl< OperandBundleDef > &Defs) const
Return the list of operand bundles attached to this instruction as a vector of OperandBundleDefs.
static cl::opt< bool > PreserveAlignmentAssumptions("preserve-alignment-assumptions-during-inlining", cl::init(true), cl::Hidden, cl::desc("Convert align attributes to assumptions during inlining."))
void setCallingConv(CallingConv::ID CC)
SmallVector< CallSite, 8 > InlinedCallSites
All of the new call sites inlined into the caller.
initializer< Ty > init(const Ty &Val)
The landingpad instruction holds all of the information necessary to generate correct exception handl...
Optional< OperandBundleUse > getOperandBundle(StringRef Name) const
Subclasses of this class are all able to terminate a basic block.
void setDebugLoc(DebugLoc Loc)
Set the debug location information for this instruction.
Constant * stripPointerCasts()
LLVM Basic Block Representation.
The instances of the Type class are immutable: once they are created, they are never changed...
This is an important class for using LLVM in a threaded context.
Conditional or Unconditional Branch instruction.
This is an important base class in LLVM.
PointerType * getType() const
Overload to return most specific pointer type.
Resume the propagation of an exception.
This file contains the declarations for the subclasses of Constant, which represent the different fla...
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
Interval::pred_iterator pred_begin(Interval *I)
pred_begin/pred_end - define methods so that Intervals may be used just like BasicBlocks can with the...
bool hasPersonalityFn() const
Check whether this function has a personality function.
const DebugLoc & getDebugLoc() const
Return the debug location for this node as a DebugLoc.
EHPersonality classifyEHPersonality(const Value *Pers)
See if the given exception handling personality function is one that we understand.
FunctionModRefBehavior getModRefBehavior(ImmutableCallSite CS)
Return the behavior of the given call site.
SmallVector< WeakVH, 8 > InlinedCalls
InlinedCalls - InlineFunction fills this in with callsites that were inlined from the callee...
void splice(iterator where, iplist_impl &L2)
static Value * getUnwindDestTokenHelper(Instruction *EHPad, UnwindDestMemoTy &MemoMap)
Helper for getUnwindDestToken that does the descendant-ward part of the search.
const InstListType & getInstList() const
Return the underlying instruction list container.
bool isUsedWithInAlloca() const
Return true if this alloca is used as an inalloca argument to a call.
bool isByValArgument(unsigned ArgNo) const
Determine whether this argument is passed by value.
static bool hasLifetimeMarkers(AllocaInst *AI)
The only memory references in this function (if it has any) are non-volatile loads and stores from ob...
Value * getOperand(unsigned i) const
Interval::pred_iterator pred_end(Interval *I)
self_iterator getIterator()
op_iterator arg_end()
Return the iterator pointing to the end of the argument list.
uint32_t getTagID() const
Return the tag of this operand bundle as an integer.
void setTailCallKind(TailCallKind TCK)
static UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
This class represents the va_arg llvm instruction, which returns an argument of the specified type gi...
iterator erase(const_iterator CI)
LLVMContext & getContext() const
All values hold a context through their type.
static PointerType * getInt8PtrTy(LLVMContext &C, unsigned AS=0)
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
void setMetadata(unsigned KindID, MDNode *Node)
Set the metadata of the specified kind to the specified node.
OperandBundleUse getOperandBundleAt(unsigned Index) const
bool isIdentifiedFunctionLocal(const Value *V)
Return true if V is umabigously identified at the function-level.
static void UpdatePHINodes(BasicBlock *OrigBB, BasicBlock *NewBB, ArrayRef< BasicBlock * > Preds, BranchInst *BI, bool HasLoopExit)
Update the PHI nodes in OrigBB to include the values coming from NewBB.
static InvokeInst * Create(Value *Func, BasicBlock *IfNormal, BasicBlock *IfException, ArrayRef< Value * > Args, const Twine &NameStr, Instruction *InsertBefore=nullptr)
CallInst * getTerminatingDeoptimizeCall()
Returns the call instruction calling .experimental.deoptimize prior to the terminating return instruc...
iterator_range< T > make_range(T x, T y)
Convenience function for iterating over sub-ranges.
static void HandleByValArgumentInit(Value *Dst, Value *Src, Module *M, BasicBlock *InsertBlock, InlineFunctionInfo &IFI)
const MDOperand & getOperand(unsigned I) const
A SetVector that performs no allocations if smaller than a certain size.
Iterator for intrusive lists based on ilist_node.
const BasicBlockListType & getBasicBlockList() const
BasicBlock * getUnwindDest() const
bool isSwiftError() const
Return true if this alloca is used as a swifterror argument to a call.
This is the shared class of boolean and integer constants.
InstrTy * getInstruction() const
static CatchSwitchInst * Create(Value *ParentPad, BasicBlock *UnwindDest, unsigned NumHandlers, const Twine &NameStr="", Instruction *InsertBefore=nullptr)
Optional< OperandBundleUse > getOperandBundle(StringRef Name) const
Return an operand bundle by name, if present.
bool isFuncletEHPersonality(EHPersonality Pers)
Returns true if this is a personality function that invokes handler funclets (which must return to it...
ValTy * getArgument(unsigned ArgNo) const
static CallInst * Create(Value *Func, ArrayRef< Value * > Args, ArrayRef< OperandBundleDef > Bundles=None, const Twine &NameStr="", Instruction *InsertBefore=nullptr)
Module.h This file contains the declarations for the Module class.
Type * getType() const
All values are typed, get the type of this value.
MDNode * getMetadata(unsigned KindID) const
Get the metadata of given kind attached to this Instruction.
TailCallKind getTailCallKind() const
LLVM_NODISCARD T pop_back_val()
static Value * getParentPad(Value *EHPad)
Helper for getUnwindDestToken/getUnwindDestTokenHelper.
Value * stripPointerCasts()
Strip off pointer casts, all-zero GEPs, and aliases.
static Value * getUnwindDestToken(Instruction *EHPad, UnwindDestMemoTy &MemoMap)
Given an EH pad, find where it unwinds.
static Constant * get(Type *Ty, uint64_t V, bool isSigned=false)
If Ty is a vector type, return a Constant with a splat of the given value.
Function * getCalledFunction() const
Return the function called, or null if this is an indirect function invocation.
static BranchInst * Create(BasicBlock *IfTrue, Instruction *InsertBefore=nullptr)
static PHINode * Create(Type *Ty, unsigned NumReservedValues, const Twine &NameStr="", Instruction *InsertBefore=nullptr)
Constructors - NumReservedValues is a hint for the number of incoming edges that this phi node will h...
const BasicBlock & getEntryBlock() const
DenseMap< Instruction *, Value * > UnwindDestMemoTy
void setOperand(unsigned i, Value *Val)
size_type count(const KeyT &Val) const
Return 1 if the specified key is in the map, 0 otherwise.
void swap(llvm::BitVector &LHS, llvm::BitVector &RHS)
Implement std::swap in terms of BitVector swap.
DISubprogram * getSubprogram() const
Get the attached subprogram.
Value * getIncomingValueForBlock(const BasicBlock *BB) const
iterator_range< user_iterator > users()
bool ContainsCalls
ContainsCalls - This is set to true if the cloned code contains a normal call instruction.
bool isEHPad() const
Return true if the instruction is a variety of EH-block.
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
SmallVector< AllocaInst *, 4 > StaticAllocas
StaticAllocas - InlineFunction fills this in with all static allocas that get copied into the caller...
const DataLayout & getDataLayout() const
Get the data layout for the module's target platform.
The basic data container for the call graph of a Module of IR.
LLVM_ATTRIBUTE_ALWAYS_INLINE iterator end()
void emplace_back(ArgTypes &&...Args)
void registerAssumption(CallInst *CI)
Add an .assume intrinsic to this function's cache.
bool isDeclaration() const
Return true if the primary definition of this global value is outside of the current translation unit...
ImmutableCallSite - establish a view to a call site for examination.
static MDNode * concatenate(MDNode *A, MDNode *B)
Methods for metadata merging.
const std::string & getGC() const
static void PropagateParallelLoopAccessMetadata(CallSite CS, ValueToValueMapTy &VMap)
When inlining a call site that has !llvm.mem.parallel_loop_access metadata, that metadata should be p...
SymbolTableList< BasicBlock >::iterator eraseFromParent()
Unlink 'this' from the containing function and delete it.
TerminatorInst * getTerminator()
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
LLVM_ATTRIBUTE_ALWAYS_INLINE size_type size() const
FunctionType * getFunctionType() const
Returns the FunctionType for me.
static Value * HandleByValArgument(Value *Arg, Instruction *TheCall, const Function *CalledFunc, InlineFunctionInfo &IFI, unsigned ByValAlignment)
When inlining a call site that has a byval argument, we have to make the implicit memcpy explicit by ...
ClonedCodeInfo - This struct can be used to capture information about code being cloned, while it is being cloned.
iterator find(const KeyT &Val)
LLVM_NODISCARD std::enable_if<!is_simple_type< Y >::value, typename cast_retty< X, const Y >::ret_type >::type dyn_cast(const Y &Val)
static ConstantTokenNone * get(LLVMContext &Context)
Return the ConstantTokenNone.
BasicBlock * splitBasicBlock(iterator I, const Twine &BBName="")
Split the basic block into two basic blocks at the specified instruction.
LLVMContext & getContext() const
uint64_t getTypeStoreSize(Type *Ty) const
Returns the maximum number of bytes that may be overwritten by storing the specified type...
bool PointerMayBeCapturedBefore(const Value *V, bool ReturnCaptures, bool StoreCaptures, const Instruction *I, DominatorTree *DT, bool IncludeI=false, OrderedBasicBlock *OBB=nullptr)
PointerMayBeCapturedBefore - Return true if this pointer value may be captured by the enclosing funct...
static bool allocaWouldBeStaticInEntry(const AllocaInst *AI)
Return the result of AI->isStaticAlloca() if AI were moved to the entry block.
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
bool ContainsDynamicAllocas
ContainsDynamicAllocas - This is set to true if the cloned code contains a 'dynamic' alloca...
bool isAsynchronousEHPersonality(EHPersonality Pers)
Returns true if this personality function catches asynchronous exceptions.
unsigned getParamAlignment(unsigned i) const
Extract the alignment for a call or parameter (0=unknown).
FunTy * getCalledFunction() const
getCalledFunction - Return the function being called if this is a direct call, otherwise return null ...
Module * getParent()
Get the module that this global value is contained inside of...
LLVM Value Representation.
unsigned getOpcode() const
Returns a member of one of the enums like Instruction::Add.
A vector that has set insertion semantics.
void removeCallEdgeFor(CallSite CS)
Removes the edge in the node for the specified call site.
static CleanupReturnInst * Create(Value *CleanupPad, BasicBlock *UnwindBB=nullptr, Instruction *InsertBefore=nullptr)
const Value * getArraySize() const
Get the number of elements allocated.
BasicBlock * changeToInvokeAndSplitBasicBlock(CallInst *CI, BasicBlock *UnwindEdge)
Convert the CallInst to InvokeInst with the specified unwind edge basic block.
bool isCleanup() const
Return 'true' if this landingpad instruction is a cleanup.
void recalculate(FT &F)
recalculate - compute a dominator tree for the given function
std::vector< CallRecord > CalledFunctionsVector
Value * SimplifyInstruction(Instruction *I, const DataLayout &DL, const TargetLibraryInfo *TLI=nullptr, const DominatorTree *DT=nullptr, AssumptionCache *AC=nullptr)
See if we can compute a simplified version of this instruction.
void setPersonalityFn(Constant *Fn)
static void HandleInlinedEHPad(InvokeInst *II, BasicBlock *FirstNewBlock, ClonedCodeInfo &InlinedCodeInfo)
If we inlined an invoke site, we need to convert calls in the body of the inlined function into invok...
Type * getAllocatedType() const
Return the type that is being allocated by the instruction.
unsigned getPointerAddressSpace() const
Get the address space of this pointer or pointer vector type.
static GCRegistry::Add< ErlangGC > A("erlang","erlang-compatible garbage collector")
const BasicBlock * getParent() const
bool doesNotThrow() const
Determine if the call cannot unwind.
iterator_range< arg_iterator > args()
A wrapper class for inspecting calls to intrinsic functions.
bool isVoidTy() const
Return true if this is 'void'.
an instruction to allocate memory on the stack