28#define DEBUG_TYPE "instcombine"
32 cl::desc(
"Maximum number phis to handle in intptr/ptrint folding"));
35 "Number of phi-of-insertvalue turned into insertvalue-of-phis");
37 "Number of phi-of-extractvalue turned into extractvalue-of-phi");
38STATISTIC(NumPHICSEs,
"Number of PHI's that got CSE'd");
48 assert(!isa<CallInst>(Inst));
51 auto *
I = cast<Instruction>(V);
63 while (!Stack.empty()) {
64 PHINode *Phi = Stack.pop_back_val();
65 if (!Visited.
insert(Phi).second)
68 if (Visited.
size() == 16)
70 for (
User *
Use : Phi->users()) {
72 Stack.push_back(PhiUse);
140 auto *IntToPtr = dyn_cast<IntToPtrInst>(PN.
user_back());
146 for (
User *U : IIP->users()) {
148 if (
LoadInst *LoadI = dyn_cast<LoadInst>(U)) {
149 Ptr = LoadI->getPointerOperand();
150 }
else if (
StoreInst *SI = dyn_cast<StoreInst>(U)) {
151 Ptr = SI->getPointerOperand();
153 Ptr = GI->getPointerOperand();
162 if (!HasPointerUse(IntToPtr))
175 if (!isa<Instruction>(Arg) && !isa<Argument>(Arg))
179 if (
auto *PI = dyn_cast<PtrToIntInst>(Arg)) {
185 Value *ArgIntToPtr =
nullptr;
187 if (isa<IntToPtrInst>(U) && U->getType() == IntToPtr->getType() &&
189 cast<Instruction>(U)->getParent() == BB)) {
202 if (isa<PHINode>(Arg)) {
208 auto *LoadI = dyn_cast<LoadInst>(Arg);
212 if (!LoadI->hasOneUse())
224 "Not enough available ptr typed incoming values");
225 PHINode *MatchingPtrPHI =
nullptr;
226 unsigned NumPhis = 0;
227 for (
PHINode &PtrPHI : BB->phis()) {
231 if (&PtrPHI == &PN || PtrPHI.
getType() != IntToPtr->getType())
234 [&](
const auto &BlockAndValue) {
235 BasicBlock *BB = std::get<0>(BlockAndValue);
236 Value *V = std::get<1>(BlockAndValue);
237 return PtrPHI.getIncomingValueForBlock(BB) != V;
240 MatchingPtrPHI = &PtrPHI;
244 if (MatchingPtrPHI) {
246 "Phi's Type does not match with IntToPtr");
257 return (V->getType() != IntToPtr->getType()) || isa<IntToPtrInst>(V);
266 if (V->getType() == IntToPtr->getType())
268 auto *Inst = dyn_cast<Instruction>(V);
271 if (Inst->isTerminator())
273 auto *BB = Inst->getParent();
274 if (isa<PHINode>(Inst) && BB->getFirstInsertionPt() == BB->end())
286 auto *IncomingBB = std::get<0>(
Incoming);
287 auto *IncomingVal = std::get<1>(
Incoming);
289 if (IncomingVal->getType() == IntToPtr->getType()) {
295 LoadInst *LoadI = dyn_cast<LoadInst>(IncomingVal);
296 assert((isa<PHINode>(IncomingVal) ||
297 IncomingVal->getType()->isPointerTy() ||
299 "Can not replace LoadInst with multiple uses");
312 IncomingVal->getName() +
".ptr");
313 if (
auto *IncomingI = dyn_cast<Instruction>(IncomingVal)) {
317 if (isa<PHINode>(IncomingI))
319 assert(InsertPos != BB->
end() &&
"should have checked above");
322 auto *InsertBB = &IncomingBB->getParent()->getEntryBlock();
347 bool OperandWithRoundTripCast =
false;
352 OperandWithRoundTripCast =
true;
355 if (!OperandWithRoundTripCast)
369 auto *
I = dyn_cast<InsertValueInst>(V);
370 if (!
I || !
I->hasOneUser() ||
I->getIndices() != FirstIVI->getIndices())
375 std::array<PHINode *, 2> NewOperands;
376 for (
int OpIdx : {0, 1}) {
377 auto *&NewOperand = NewOperands[OpIdx];
382 FirstIVI->getOperand(OpIdx)->getName() +
".pn");
385 NewOperand->addIncoming(
386 cast<InsertValueInst>(std::get<1>(
Incoming))->getOperand(OpIdx),
393 FirstIVI->getIndices(), PN.
getName());
396 ++NumPHIsOfInsertValues;
409 auto *
I = dyn_cast<ExtractValueInst>(V);
410 if (!
I || !
I->hasOneUser() ||
I->getIndices() != FirstEVI->getIndices() ||
411 I->getAggregateOperand()->getType() !=
412 FirstEVI->getAggregateOperand()->getType())
420 FirstEVI->getAggregateOperand()->getName() +
".pn");
423 NewAggregateOperand->addIncoming(
424 cast<ExtractValueInst>(std::get<1>(
Incoming))->getAggregateOperand(),
430 FirstEVI->getIndices(), PN.
getName());
433 ++NumPHIsOfExtractValues;
441 assert(isa<BinaryOperator>(FirstInst) || isa<CmpInst>(FirstInst));
452 if (!
I ||
I->getOpcode() != Opc || !
I->hasOneUser() ||
455 I->getOperand(0)->getType() != LHSType ||
456 I->getOperand(1)->getType() != RHSType)
460 if (
CmpInst *CI = dyn_cast<CmpInst>(
I))
461 if (CI->getPredicate() != cast<CmpInst>(FirstInst)->getPredicate())
465 if (
I->getOperand(0) != LHSVal) LHSVal =
nullptr;
466 if (
I->getOperand(1) != RHSVal) RHSVal =
nullptr;
473 if (!LHSVal && !RHSVal)
480 PHINode *NewLHS =
nullptr, *NewRHS =
nullptr;
498 if (NewLHS || NewRHS) {
509 NewRHS->addIncoming(NewInRHS, InBB);
514 if (
CmpInst *CIOp = dyn_cast<CmpInst>(FirstInst)) {
541 bool AllBasePointersAreAllocas =
true;
546 bool NeededPhi =
false;
554 if (!
GEP || !
GEP->hasOneUser() ||
559 NW &=
GEP->getNoWrapFlags();
562 if (AllBasePointersAreAllocas &&
563 (!isa<AllocaInst>(
GEP->getOperand(0)) ||
564 !
GEP->hasAllConstantIndices()))
565 AllBasePointersAreAllocas =
false;
578 isa<ConstantInt>(
GEP->getOperand(
Op)))
582 GEP->getOperand(
Op)->getType())
592 FixedOperands[
Op] =
nullptr;
603 if (AllBasePointersAreAllocas)
610 bool HasAnyPHIs =
false;
611 for (
unsigned I = 0, E = FixedOperands.
size();
I != E; ++
I) {
612 if (FixedOperands[
I])
620 OperandPhis[
I] = NewPN;
621 FixedOperands[
I] = NewPN;
632 for (
unsigned Op = 0, E = OperandPhis.
size();
Op != E; ++
Op)
641 ArrayRef(FixedOperands).slice(1), NW);
656 for (++BBI; BBI != E; ++BBI)
657 if (BBI->mayWriteToMemory()) {
660 if (
auto *CB = dyn_cast<CallBase>(BBI))
661 if (CB->onlyAccessesInaccessibleMemory())
668 if (
AllocaInst *AI = dyn_cast<AllocaInst>(L->getOperand(0))) {
669 bool IsAddressTaken =
false;
670 for (
User *U : AI->users()) {
671 if (isa<LoadInst>(U))
continue;
672 if (
StoreInst *SI = dyn_cast<StoreInst>(U)) {
674 if (SI->getOperand(1) == AI)
continue;
676 IsAddressTaken =
true;
680 if (!IsAddressTaken && AI->isStaticAlloca())
690 if (
AllocaInst *AI = dyn_cast<AllocaInst>(
GEP->getOperand(0)))
691 if (AI->isStaticAlloca() &&
GEP->hasAllConstantIndices())
725 FirstLI->
getParent()->getTerminator()->getNumSuccessors() != 1)
731 LoadInst *LI = dyn_cast<LoadInst>(InVal);
749 LoadAlignment = std::min(LoadAlignment, LI->
getAlign());
754 if (IsVolatile && LI->
getParent()->getTerminator()->getNumSuccessors() != 1)
767 new LoadInst(FirstLI->
getType(), NewPN,
"", IsVolatile, LoadAlignment);
777 if (NewInVal != InVal)
796 cast<LoadInst>(IncValue)->setVolatile(
false);
808 if (
Instruction *TI = Phi.getParent()->getTerminator())
815 unsigned NumIncomingValues = Phi.getNumIncomingValues();
816 if (NumIncomingValues < 3)
820 Type *NarrowType =
nullptr;
821 for (
Value *V : Phi.incoming_values()) {
822 if (
auto *Zext = dyn_cast<ZExtInst>(V)) {
823 NarrowType = Zext->getSrcTy();
833 unsigned NumZexts = 0;
834 unsigned NumConsts = 0;
835 for (
Value *V : Phi.incoming_values()) {
836 if (
auto *Zext = dyn_cast<ZExtInst>(V)) {
838 if (Zext->getSrcTy() != NarrowType || !Zext->hasOneUser())
840 NewIncoming.
push_back(Zext->getOperand(0));
842 }
else if (
auto *
C = dyn_cast<Constant>(V)) {
861 if (NumConsts == 0 || NumZexts < 2)
868 Phi.getName() +
".shrunk");
869 for (
unsigned I = 0;
I != NumIncomingValues; ++
I)
870 NewPhi->
addIncoming(NewIncoming[
I], Phi.getIncomingBlock(
I));
888 if (isa<GetElementPtrInst>(FirstInst))
890 if (isa<LoadInst>(FirstInst))
892 if (isa<InsertValueInst>(FirstInst))
894 if (isa<ExtractValueInst>(FirstInst))
902 Type *CastSrcTy =
nullptr;
904 if (isa<CastInst>(FirstInst)) {
910 if (!shouldChangeType(PN.
getType(), CastSrcTy))
913 }
else if (isa<BinaryOperator>(FirstInst) || isa<CmpInst>(FirstInst)) {
916 ConstantOp = dyn_cast<Constant>(FirstInst->
getOperand(1));
926 if (!
I || !
I->hasOneUser() || !
I->isSameOperationAs(FirstInst))
929 if (
I->getOperand(0)->getType() != CastSrcTy)
931 }
else if (
I->getOperand(1) != ConstantOp) {
949 Value *NewInVal = cast<Instruction>(V)->getOperand(0);
950 if (NewInVal != InVal)
967 if (
CastInst *FirstCI = dyn_cast<CastInst>(FirstInst)) {
974 if (
BinaryOperator *BinOp = dyn_cast<BinaryOperator>(FirstInst)) {
979 BinOp->andIRFlags(V);
985 CmpInst *CIOp = cast<CmpInst>(FirstInst);
998 if (!ValueEqualPHIs.
insert(PN).second)
1002 if (ValueEqualPHIs.
size() == 16)
1008 if (
PHINode *OpPN = dyn_cast<PHINode>(
Op)) {
1014 }
else if (
Op != NonPhiInVal)
1024 assert(isa<IntegerType>(PN.
getType()) &&
"Expect only integer type phi");
1026 if (
auto *ConstVA = dyn_cast<ConstantInt>(V))
1027 if (!ConstVA->isZero())
1029 return ConstantInt::get(cast<IntegerType>(PN.
getType()), 1);
1033struct PHIUsageRecord {
1039 : PHIId(Pn), Shift(Sh), Inst(
User) {}
1042 if (PHIId <
RHS.PHIId)
return true;
1043 if (PHIId >
RHS.PHIId)
return false;
1044 if (Shift <
RHS.Shift)
return true;
1045 if (Shift >
RHS.Shift)
return false;
1051struct LoweredPHIRecord {
1056 LoweredPHIRecord(
PHINode *Phi,
unsigned Sh,
Type *Ty)
1057 : PN(
Phi), Shift(Sh), Width(Ty->getPrimitiveSizeInBits()) {}
1060 LoweredPHIRecord(
PHINode *Phi,
unsigned Sh) : PN(
Phi), Shift(Sh), Width(0) {}
1068 return LoweredPHIRecord(
nullptr, 0);
1071 return LoweredPHIRecord(
nullptr, 1);
1078 const LoweredPHIRecord &RHS) {
1107 PHIsInspected.
insert(&FirstPhi);
1109 for (
unsigned PHIId = 0; PHIId != PHIsToSlice.
size(); ++PHIId) {
1110 PHINode *PN = PHIsToSlice[PHIId];
1122 if (
II->getParent() != BB)
1134 for (
auto *Pred : PN->
blocks())
1135 if (Pred->getFirstInsertionPt() == Pred->end())
1142 if (
PHINode *UserPN = dyn_cast<PHINode>(UserI)) {
1143 if (PHIsInspected.
insert(UserPN).second)
1149 if (isa<TruncInst>(UserI)) {
1150 PHIUsers.
push_back(PHIUsageRecord(PHIId, 0, UserI));
1155 if (UserI->
getOpcode() != Instruction::LShr ||
1162 if (cast<ConstantInt>(UserI->
getOperand(1))->getValue().uge(SizeInBits))
1165 unsigned Shift = cast<ConstantInt>(UserI->
getOperand(1))->getZExtValue();
1171 if (PHIUsers.
empty())
1179 for (
unsigned I = 1;
I != PHIsToSlice.
size(); ++
I)
dbgs()
1180 <<
"AND USER PHI #" <<
I <<
": " << *PHIsToSlice[
I] <<
'\n');
1190 for (
unsigned UserI = 0, UserE = PHIUsers.
size(); UserI != UserE; ++UserI) {
1191 unsigned PHIId = PHIUsers[UserI].PHIId;
1192 PHINode *PN = PHIsToSlice[PHIId];
1193 unsigned Offset = PHIUsers[UserI].Shift;
1194 Type *Ty = PHIUsers[UserI].Inst->getType();
1200 if ((EltPHI = ExtractedVals[LoweredPHIRecord(PN,
Offset, Ty)]) ==
nullptr) {
1207 "Truncate didn't shrink phi?");
1212 Value *&PredVal = PredValues[Pred];
1227 if (
PHINode *InPHI = dyn_cast<PHINode>(PN)) {
1230 if (
Value *Res = ExtractedVals[LoweredPHIRecord(InPHI,
Offset, Ty)]) {
1242 Res, ConstantInt::get(InVal->
getType(),
Offset),
"extract");
1251 if (
PHINode *OldInVal = dyn_cast<PHINode>(InVal))
1252 if (PHIsInspected.
count(OldInVal)) {
1254 find(PHIsToSlice, OldInVal) - PHIsToSlice.
begin();
1256 PHIUsageRecord(RefPHIId,
Offset, cast<Instruction>(Res)));
1263 << *EltPHI <<
'\n');
1264 ExtractedVals[LoweredPHIRecord(PN,
Offset, Ty)] = EltPHI;
1309 SuccForValue[
C] = Succ;
1312 if (
auto *BI = dyn_cast<BranchInst>(IDom->getTerminator())) {
1313 if (BI->isUnconditional())
1316 Cond = BI->getCondition();
1319 }
else if (
auto *SI = dyn_cast<SwitchInst>(IDom->getTerminator())) {
1320 Cond = SI->getCondition();
1321 ++SuccCount[SI->getDefaultDest()];
1322 for (
auto Case : SI->cases())
1323 AddSucc(Case.getCaseValue(), Case.getCaseSuccessor());
1333 std::optional<bool> Invert;
1335 auto *Input = cast<ConstantInt>(std::get<0>(Pair));
1341 auto It = SuccForValue.
find(Input);
1342 return It != SuccForValue.
end() && SuccCount[It->second] == 1 &&
1349 if (IsCorrectInput(Input))
1350 NeedsInvert =
false;
1357 if (Invert && *Invert != NeedsInvert)
1360 Invert = NeedsInvert;
1370 if (InsertPt != BB->
end()) {
1390 auto MatchOuterIV = [&](
Value *V1,
Value *V2) {
1394 IvNext = cast<Instruction>(V2);
1405 Value *Iv2Start, *Iv2Step;
1410 auto *BO = dyn_cast<BinaryOperator>(IvNext);
1414 if (Iv2Start != Identity)
1419 auto *
GEP = cast<GEPOperator>(IvNext);
1420 return Builder.
CreateGEP(
GEP->getSourceElementType(), Start, Iv2,
"",
1421 cast<GEPOperator>(IvNext)->getNoWrapFlags());
1424 assert(BO->isCommutative() &&
"Must be commutative");
1426 cast<Instruction>(Res)->copyIRFlags(BO);
1446 if (Inst0 && Inst1 && Inst0->getOpcode() == Inst1->getOpcode() &&
1447 Inst0->hasOneUser())
1461 if (IV0 != IV0Stripped &&
1463 return !CheckedIVs.insert(IV).second ||
1464 IV0Stripped == IV->stripPointerCasts();
1486 (isa<BinaryOperator>(PHIUser) || isa<UnaryOperator>(PHIUser) ||
1487 isa<GetElementPtrInst>(PHIUser)) &&
1508 auto *CmpInst = dyn_cast<ICmpInst>(U);
1512 if (U->hasOneUse() && match(U, m_c_Or(m_Specific(&PN), m_Value()))) {
1513 DropPoisonFlags.push_back(cast<Instruction>(U));
1514 CmpInst = dyn_cast<ICmpInst>(U->user_back());
1524 if (AllUsesOfPhiEndsInCmp) {
1526 bool MadeChange =
false;
1533 if (NonZeroConst != VA) {
1537 I->dropPoisonGeneratingFlags();
1558 while (InValNo != NumIncomingVals &&
1559 isa<PHINode>(PN.getIncomingValue(InValNo)))
1562 Value *NonPhiInVal =
1563 InValNo != NumIncomingVals ? PN.getIncomingValue(InValNo) :
nullptr;
1568 for (++InValNo; InValNo != NumIncomingVals; ++InValNo) {
1569 Value *OpVal = PN.getIncomingValue(InValNo);
1570 if (OpVal != NonPhiInVal && !isa<PHINode>(OpVal))
1577 if (InValNo == NumIncomingVals) {
1580 return replaceInstUsesWith(PN, NonPhiInVal);
1588 auto Res = PredOrder.try_emplace(PN.getParent());
1590 const auto &Preds = Res.first->second;
1591 for (
unsigned I = 0, E = PN.getNumIncomingValues();
I != E; ++
I) {
1595 Value *VA = PN.getIncomingValue(
I);
1596 unsigned J = PN.getBasicBlockIndex(BBB);
1597 Value *
VB = PN.getIncomingValue(J);
1598 PN.setIncomingBlock(
I, BBB);
1599 PN.setIncomingValue(
I, VB);
1600 PN.setIncomingBlock(J, BBA);
1601 PN.setIncomingValue(J, VA);
1616 if (&IdenticalPN == &PN)
1621 if (!PN.isIdenticalToWhenDefined(&IdenticalPN))
1625 return replaceInstUsesWith(PN, &IdenticalPN);
1632 if (PN.getType()->isIntegerTy() &&
1633 !
DL.isLegalInteger(PN.getType()->getPrimitiveSizeInBits()))
1634 if (
Instruction *Res = SliceUpIllegalIntegerPHI(PN))
1639 return replaceInstUsesWith(PN, V);
1642 return replaceInstUsesWith(PN, Res);
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
This file provides internal interfaces used to implement the InstCombine.
static ConstantInt * getAnyNonZeroConstInt(PHINode &PN)
Return an existing non-zero constant if this phi node has one, otherwise return constant 1.
static Value * foldDependentIVs(PHINode &PN, IRBuilderBase &Builder)
static bool isSafeAndProfitableToSinkLoad(LoadInst *L)
Return true if we know that it is safe to sink the load out of the block that defines it.
static Value * simplifyUsingControlFlow(InstCombiner &Self, PHINode &PN, const DominatorTree &DT)
static bool PHIsEqualValue(PHINode *PN, Value *&NonPhiInVal, SmallPtrSetImpl< PHINode * > &ValueEqualPHIs)
Return true if this phi node is always equal to NonPhiInVal.
static cl::opt< unsigned > MaxNumPhis("instcombine-max-num-phis", cl::init(512), cl::desc("Maximum number phis to handle in intptr/ptrint folding"))
This file provides the interface for the instcombine pass implementation.
uint64_t IntrinsicInst * II
const SmallVectorImpl< MachineOperand > & Cond
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file defines the SmallPtrSet class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
static const uint32_t IV[8]
an instruction to allocate memory on the stack
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
LLVM Basic Block Representation.
const_iterator getFirstInsertionPt() const
Returns an iterator to the first instruction in this block that is suitable for inserting a non-PHI i...
const Function * getParent() const
Return the enclosing method, or null if none.
InstListType::iterator iterator
Instruction iterators...
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
BinaryOps getOpcode() const
static BinaryOperator * Create(BinaryOps Op, Value *S1, Value *S2, const Twine &Name=Twine(), InsertPosition InsertBefore=nullptr)
Construct a binary instruction, given the opcode and the two operands.
This is the base class for all instructions that perform data casts.
static CastInst * CreatePointerCast(Value *S, Type *Ty, const Twine &Name="", InsertPosition InsertBefore=nullptr)
Create a BitCast, AddrSpaceCast or a PtrToInt cast instruction.
static CastInst * CreateBitOrPointerCast(Value *S, Type *Ty, const Twine &Name="", InsertPosition InsertBefore=nullptr)
Create a BitCast, a PtrToInt, or an IntToPTr cast instruction.
static CastInst * CreateZExtOrBitCast(Value *S, Type *Ty, const Twine &Name="", InsertPosition InsertBefore=nullptr)
Create a ZExt or BitCast cast instruction.
static CastInst * Create(Instruction::CastOps, Value *S, Type *Ty, const Twine &Name="", InsertPosition InsertBefore=nullptr)
Provides a way to construct any of the CastInst subclasses using an opcode instead of the subclass's ...
This class is the base class for the comparison instructions.
static bool isEquality(Predicate pred)
Determine if this is an equals/not equals predicate.
static CmpInst * Create(OtherOps Op, Predicate Pred, Value *S1, Value *S2, const Twine &Name="", InsertPosition InsertBefore=nullptr)
Construct a compare instruction, given the opcode, the predicate and the two operands.
Predicate getPredicate() const
Return the predicate for this instruction.
OtherOps getOpcode() const
Get the opcode casted to the right type.
static Constant * getNot(Constant *C)
static Constant * getBinOpIdentity(unsigned Opcode, Type *Ty, bool AllowRHSConstant=false, bool NSZ=false)
Return the identity constant for a binary opcode.
This is the shared class of boolean and integer constants.
static ConstantInt * getTrue(LLVMContext &Context)
static ConstantInt * getFalse(LLVMContext &Context)
This is an important base class in LLVM.
static Constant * getNullValue(Type *Ty)
Constructor to create a '0' constant of arbitrary type.
This class represents an Operation in the Expression.
unsigned getPointerSizeInBits(unsigned AS=0) const
Layout pointer size, in bits FIXME: The defaults need to be removed once all of the backends/clients ...
TypeSize getTypeSizeInBits(Type *Ty) const
Size examples:
iterator find(const_arg_type_t< KeyT > Val)
DomTreeNodeBase * getIDom() const
DomTreeNodeBase< NodeT > * getNode(const NodeT *BB) const
getNode - return the (Post)DominatorTree node for the specified basic block.
Concrete subclass of DominatorTreeBase that is used to compute a normal dominator tree.
bool isReachableFromEntry(const Use &U) const
Provide an overload for a Use.
bool dominates(const BasicBlock *BB, const Use &U) const
Return true if the (end of the) basic block BB dominates the use U.
Represents flags for the getelementptr instruction/expression.
an instruction for type-safe pointer arithmetic to access elements of arrays and structs
static GetElementPtrInst * Create(Type *PointeeType, Value *Ptr, ArrayRef< Value * > IdxList, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
Type * getSourceElementType() const
GEPNoWrapFlags getNoWrapFlags() const
Get the nowrap flags for the GEP instruction.
Common base class shared among various IRBuilders.
Value * CreateLShr(Value *LHS, Value *RHS, const Twine &Name="", bool isExact=false)
Value * CreateGEP(Type *Ty, Value *Ptr, ArrayRef< Value * > IdxList, const Twine &Name="", GEPNoWrapFlags NW=GEPNoWrapFlags::none())
Value * CreateNot(Value *V, const Twine &Name="")
Value * CreateTrunc(Value *V, Type *DestTy, const Twine &Name="", bool IsNUW=false, bool IsNSW=false)
Value * CreateBinOp(Instruction::BinaryOps Opc, Value *LHS, Value *RHS, const Twine &Name="", MDNode *FPMathTag=nullptr)
void SetInsertPoint(BasicBlock *TheBB)
This specifies that created instructions should be appended to the end of the specified block.
static InsertValueInst * Create(Value *Agg, Value *Val, ArrayRef< unsigned > Idxs, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
Instruction * foldPHIArgInsertValueInstructionIntoPHI(PHINode &PN)
If we have something like phi [insertvalue(a,b,0), insertvalue(c,d,0)], turn this into a phi[a,...
Instruction * foldPHIArgBinOpIntoPHI(PHINode &PN)
If we have something like phi [add (a,b), add(a,c)] and if a/b/c and the adds all have a single user,...
Constant * getLosslessUnsignedTrunc(Constant *C, Type *TruncTy)
Instruction * eraseInstFromFunction(Instruction &I) override
Combiner aware instruction erasure.
Instruction * visitPHINode(PHINode &PN)
Instruction * foldPHIArgOpIntoPHI(PHINode &PN)
Try to rotate an operation below a PHI node, using PHI nodes for its operands.
Instruction * foldPHIArgZextsIntoPHI(PHINode &PN)
TODO: This function could handle other cast types, but then it might require special-casing a cast fr...
Instruction * foldPHIArgLoadIntoPHI(PHINode &PN)
bool foldIntegerTypedPHI(PHINode &PN)
If an integer typed PHI has only one use which is an IntToPtr operation, replace the PHI with an exis...
bool foldDeadPhiWeb(PHINode &PN)
If the phi is within a phi web, which is formed by the def-use chain of phis and all the phis in the ...
Instruction * foldPHIArgIntToPtrToPHI(PHINode &PN)
Instruction * SliceUpIllegalIntegerPHI(PHINode &PN)
This is an integer PHI and we know that it has an illegal type: see if it is only used by trunc or tr...
Instruction * foldPHIArgGEPIntoPHI(PHINode &PN)
void PHIArgMergedDebugLoc(Instruction *Inst, PHINode &PN)
Helper function for FoldPHIArgXIntoPHI() to set debug location for the folded operation.
Instruction * foldPHIArgExtractValueInstructionIntoPHI(PHINode &PN)
If we have something like phi [extractvalue(a,0), extractvalue(b,0)], turn this into a phi[a,...
The core instruction combiner logic.
Instruction * InsertNewInstBefore(Instruction *New, BasicBlock::iterator Old)
Inserts an instruction New before instruction Old.
Instruction * replaceInstUsesWith(Instruction &I, Value *V)
A combiner-aware RAUW-like routine.
Instruction * replaceOperand(Instruction &I, unsigned OpNum, Value *V)
Replace operand of instruction and add old operand to the worklist.
const SimplifyQuery & getSimplifyQuery() const
void copyIRFlags(const Value *V, bool IncludeWrapFlags=true)
Convenience method to copy supported exact, fast-math, and (optionally) wrapping flags from V to this...
const DebugLoc & getDebugLoc() const
Return the debug location for this node as a DebugLoc.
void andIRFlags(const Value *V)
Logical 'and' of any supported wrapping, exact, and fast-math flags of V and this instruction.
bool isAtomic() const LLVM_READONLY
Return true if this instruction has an AtomicOrdering of unordered or higher.
Instruction * user_back()
Specialize the methods defined in Value, as we know that an instruction can only be used by other ins...
unsigned getOpcode() const
Returns a member of one of the enums like Instruction::Add.
void applyMergedLocation(DILocation *LocA, DILocation *LocB)
Merge 2 debug locations and apply it to the Instruction.
void setDebugLoc(DebugLoc Loc)
Set the debug location information for this instruction.
void copyMetadata(const Instruction &SrcInst, ArrayRef< unsigned > WL=ArrayRef< unsigned >())
Copy metadata from SrcInst to this instruction.
This is an important class for using LLVM in a threaded context.
An instruction for reading from memory.
unsigned getPointerAddressSpace() const
Returns the address space of the pointer operand.
bool isVolatile() const
Return true if this is a load from a volatile memory location.
Align getAlign() const
Return the alignment of the access that is being performed.
void addIncoming(Value *V, BasicBlock *BB)
Add an incoming value to the end of the PHI list.
iterator_range< const_block_iterator > blocks() const
op_range incoming_values()
BasicBlock * getIncomingBlock(unsigned i) const
Return incoming basic block number i.
Value * getIncomingValue(unsigned i) const
Return incoming value number x.
unsigned getNumIncomingValues() const
Return the number of incoming edges.
static PHINode * Create(Type *Ty, unsigned NumReservedValues, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
Constructors - NumReservedValues is a hint for the number of incoming edges that this phi node will h...
static PoisonValue * get(Type *T)
Static factory methods - Return an 'poison' object of the specified type.
A templated base class for SmallPtrSet which provides the typesafe interface that is common across al...
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
reference emplace_back(ArgTypes &&... Args)
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
An instruction for storing to memory.
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
The instances of the Type class are immutable: once they are created, they are never changed.
bool isPointerTy() const
True if this is an instance of PointerType.
unsigned getScalarSizeInBits() const LLVM_READONLY
If this is a vector type, return the getPrimitiveSizeInBits value for the element type.
bool isIntegerTy() const
True if this is an instance of IntegerType.
TypeSize getPrimitiveSizeInBits() const LLVM_READONLY
Return the basic size of this type if it is a primitive type.
A Use represents the edge between a Value definition and its users.
void setOperand(unsigned i, Value *Val)
Value * getOperand(unsigned i) const
unsigned getNumOperands() const
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
bool hasOneUser() const
Return true if there is exactly one user of this value.
bool hasOneUse() const
Return true if there is exactly one use of this value.
iterator_range< user_iterator > users()
bool hasNUsesOrMore(unsigned N) const
Return true if this value has N uses or more.
const Value * stripPointerCasts() const
Strip off pointer casts, all-zero GEPs and address space casts.
bool isSwiftError() const
Return true if this value is a swifterror value.
LLVMContext & getContext() const
All values hold a context through their type.
StringRef getName() const
Return a constant reference to the value's name.
const ParentTy * getParent() const
self_iterator getIterator()
@ C
The default llvm calling convention, compatible with C.
class_match< BinaryOperator > m_BinOp()
Match an arbitrary binary operation and ignore it.
bool match(Val *V, const Pattern &P)
specificval_ty m_Specific(const Value *V)
Match if we have a specific specified value.
auto m_GEP(const OperandTypes &...Ops)
Matches GetElementPtrInst.
AnyBinaryOp_match< LHS, RHS, true > m_c_BinOp(const LHS &L, const RHS &R)
Matches a BinaryOperator with LHS and RHS in either order.
is_zero m_Zero()
Match any null constant or a vector with all elements equal to 0.
initializer< Ty > init(const Ty &Val)
NodeAddr< PhiNode * > Phi
This is an optimization pass for GlobalISel generic memory operations.
auto drop_begin(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the first N elements excluded.
detail::zippy< detail::zip_shortest, T, U, Args... > zip(T &&t, U &&u, Args &&...args)
zip iterator for two or more iteratable types.
bool operator<(int64_t V1, const APSInt &V2)
auto find(R &&Range, const T &Val)
Provide wrappers to std::find which take ranges instead of having to pass begin/end explicitly.
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly.
void append_range(Container &C, Range &&R)
Wrapper function to append range R to container C.
Value * simplifyInstruction(Instruction *I, const SimplifyQuery &Q)
See if we can compute a simplified version of this instruction.
bool matchSimpleRecurrence(const PHINode *P, BinaryOperator *&BO, Value *&Start, Value *&Step)
Attempt to match a simple first order recurrence cycle of the form: iv = phi Ty [Start,...
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
bool isKnownNonZero(const Value *V, const SimplifyQuery &Q, unsigned Depth=0)
Return true if the given value is known to be non-zero when defined.
void combineMetadataForCSE(Instruction *K, const Instruction *J, bool DoesKMove)
Combine the metadata of two instructions so that K can replace J.
DWARFExpression::Operation Op
void array_pod_sort(IteratorTy Start, IteratorTy End)
array_pod_sort - This sorts an array with the specified start and end extent.
This struct is a compact representation of a valid (non-zero power of two) alignment.
static bool isEqual(const LoweredPHIRecord &LHS, const LoweredPHIRecord &RHS)
static unsigned getHashValue(const LoweredPHIRecord &Val)
static LoweredPHIRecord getEmptyKey()
static LoweredPHIRecord getTombstoneKey()
An information struct used to provide DenseMap with the various necessary components for a given valu...
Incoming for lane maks phi as machine instruction, incoming register Reg and incoming block Block are...
SimplifyQuery getWithInstruction(const Instruction *I) const