175 "disable-separate-const-offset-from-gep",
cl::init(
false),
176 cl::desc(
"Do not separate the constant offset from a GEP instruction"),
184 cl::desc(
"Verify this pass produces no dead code"),
202class ConstantOffsetExtractor {
214 User *&UserChainTail,
bool &PreservesNUW);
223 : IP(InsertionPt),
DL(InsertionPt->getDataLayout()) {}
242 APInt findInEitherOperand(BinaryOperator *BO,
bool SignExtended,
260 Value *rebuildWithoutConstOffset();
278 Value *distributeCastsAndCloneChain(
unsigned ChainIndex);
281 Value *removeConstOffset(
unsigned ChainIndex);
295 bool CanTraceInto(
bool SignExtended,
bool ZeroExtended, BinaryOperator *BO,
300 APInt extractDisjointBitsFromXor(BinaryOperator *XorInst);
317 const DataLayout &DL;
323class SeparateConstOffsetFromGEPLegacyPass :
public FunctionPass {
327 SeparateConstOffsetFromGEPLegacyPass(
bool LowerGEP =
false)
328 : FunctionPass(ID), LowerGEP(LowerGEP) {
333 void getAnalysisUsage(AnalysisUsage &AU)
const override {
350class SeparateConstOffsetFromGEP {
352 SeparateConstOffsetFromGEP(
353 DominatorTree *DT, LoopInfo *LI, TargetLibraryInfo *TLI,
354 function_ref<TargetTransformInfo &(Function &)> GetTTI,
bool LowerGEP)
355 : DT(DT), LI(LI), TLI(TLI), GetTTI(GetTTI), LowerGEP(LowerGEP) {}
357 bool run(Function &
F);
361 using ExprKey = std::pair<Value *, Value *>;
364 static ExprKey createNormalizedCommutablePair(
Value *
A,
Value *
B) {
372 bool splitGEP(GetElementPtrInst *
GEP);
377 bool reorderGEP(GetElementPtrInst *
GEP, TargetTransformInfo &
TTI);
386 void lowerToSingleIndexGEPs(GetElementPtrInst *Variadic,
387 const APInt &AccumulativeByteOffset);
394 APInt accumulateByteOffset(GetElementPtrInst *
GEP,
bool &NeedsExtraction);
411 bool canonicalizeArrayIndicesToIndexSize(GetElementPtrInst *
GEP);
422 bool reuniteExts(Function &
F);
425 bool reuniteExts(Instruction *
I);
429 ExprKey
Key, Instruction *Dominatee,
430 DenseMap<ExprKey, SmallVector<Instruction *, 2>> &DominatingExprs);
433 void verifyNoDeadCode(Function &
F);
435 bool hasMoreThanOneUseInLoop(
Value *v, Loop *L);
438 void swapGEPOperand(GetElementPtrInst *
First, GetElementPtrInst *Second);
441 bool isLegalToSwapOperand(GetElementPtrInst *
First, GetElementPtrInst *Second,
444 const DataLayout *DL =
nullptr;
445 DominatorTree *DT =
nullptr;
447 TargetLibraryInfo *TLI;
449 function_ref<TargetTransformInfo &(
Function &)> GetTTI;
455 DenseMap<ExprKey, SmallVector<Instruction *, 2>> DominatingAdds;
456 DenseMap<ExprKey, SmallVector<Instruction *, 2>> DominatingSubs;
461char SeparateConstOffsetFromGEPLegacyPass::ID = 0;
464 SeparateConstOffsetFromGEPLegacyPass,
"separate-const-offset-from-gep",
465 "Split GEPs to a variadic base and a constant offset for better CSE",
false,
473 SeparateConstOffsetFromGEPLegacyPass,
"separate-const-offset-from-gep",
474 "Split GEPs to a variadic base and a constant offset for better CSE",
false,
478 return new SeparateConstOffsetFromGEPLegacyPass(LowerGEP);
481bool ConstantOffsetExtractor::CanTraceInto(
bool SignExtended,
488 if (BO->
getOpcode() != Instruction::Add &&
497 if (BO->
getOpcode() == Instruction::Or &&
504 if (ZeroExtended && !SignExtended && BO->
getOpcode() == Instruction::Sub)
528 if (!ConstLHS->isNegative())
532 if (!ConstRHS->isNegative())
539 if (BO->
getOpcode() == Instruction::Add ||
550APInt ConstantOffsetExtractor::findInEitherOperand(BinaryOperator *BO,
554 size_t ChainLength = UserChain.size();
558 APInt ConstantOffset =
find(BO->
getOperand(0), SignExtended, ZeroExtended,
565 if (ConstantOffset != 0)
return ConstantOffset;
569 UserChain.resize(ChainLength);
571 ConstantOffset =
find(BO->
getOperand(1), SignExtended, ZeroExtended,
576 ConstantOffset = -ConstantOffset;
579 if (ConstantOffset == 0)
580 UserChain.resize(ChainLength);
582 return ConstantOffset;
585APInt ConstantOffsetExtractor::find(
Value *V,
bool SignExtended,
594 if (U ==
nullptr)
return APInt(
BitWidth, 0);
599 ConstantOffset = CI->getValue();
602 if (CanTraceInto(SignExtended, ZeroExtended, BO,
NonNegative))
603 ConstantOffset = findInEitherOperand(BO, SignExtended, ZeroExtended);
605 else if (BO->
getOpcode() == Instruction::Xor)
606 ConstantOffset = extractDisjointBitsFromXor(BO);
612 ConstantOffset =
find(
U->getOperand(0),
true,
620 find(
U->getOperand(0),
false,
627 if (ConstantOffset != 0)
628 UserChain.push_back(U);
629 return ConstantOffset;
632Value *ConstantOffsetExtractor::applyCasts(
Value *V) {
659Value *ConstantOffsetExtractor::rebuildWithoutConstOffset() {
660 distributeCastsAndCloneChain(UserChain.size() - 1);
662 unsigned NewSize = 0;
663 for (User *
I : UserChain) {
665 UserChain[NewSize] =
I;
669 UserChain.resize(NewSize);
670 return removeConstOffset(UserChain.size() - 1);
674ConstantOffsetExtractor::distributeCastsAndCloneChain(
unsigned ChainIndex) {
675 User *
U = UserChain[ChainIndex];
676 if (ChainIndex == 0) {
685 "Only following instructions can be traced: sext, zext & trunc");
686 CastInsts.push_back(Cast);
687 UserChain[ChainIndex] =
nullptr;
688 return distributeCastsAndCloneChain(ChainIndex - 1);
694 unsigned OpNo = (BO->
getOperand(0) == UserChain[ChainIndex - 1] ? 0 : 1);
696 Value *NextInChain = distributeCastsAndCloneChain(ChainIndex - 1);
698 BinaryOperator *NewBO =
nullptr;
706 return UserChain[ChainIndex] = NewBO;
709Value *ConstantOffsetExtractor::removeConstOffset(
unsigned ChainIndex) {
710 if (ChainIndex == 0) {
712 return ConstantInt::getNullValue(UserChain[ChainIndex]->
getType());
717 "distributeCastsAndCloneChain clones each BinaryOperator in "
718 "UserChain, so no one should be used more than "
721 unsigned OpNo = (BO->
getOperand(0) == UserChain[ChainIndex - 1] ? 0 : 1);
723 Value *NextInChain = removeConstOffset(ChainIndex - 1);
737 if (!(BO->
getOpcode() == Instruction::Sub && OpNo == 0))
742 BinaryOperator::BinaryOps NewOp = BO->
getOpcode();
743 if (BO->
getOpcode() == Instruction::Or) {
757 NewOp = Instruction::Add;
760 BinaryOperator *NewBO;
789APInt ConstantOffsetExtractor::extractDisjointBitsFromXor(
790 BinaryOperator *XorInst) {
792 "Expected XOR instruction");
796 ConstantInt *XorConstant;
803 const SimplifyQuery SQ(
DL);
805 const APInt &ConstantValue = XorConstant->
getValue();
808 const APInt DisjointBits = ConstantValue & BaseKnownBits.
Zero;
811 if (DisjointBits.
isZero())
815 const APInt NonDisjointBits = ConstantValue & ~DisjointBits;
827 UserChain.push_back(ConstantInt::get(XorInst->
getType(), NonDisjointBits));
837 if (Opcode == BinaryOperator::Or) {
850 return TI->hasNoUnsignedWrap();
855Value *ConstantOffsetExtractor::Extract(
Value *Idx, GetElementPtrInst *
GEP,
856 User *&UserChainTail,
857 bool &PreservesNUW) {
858 ConstantOffsetExtractor Extractor(
GEP->getIterator());
860 APInt ConstantOffset =
861 Extractor.find(Idx,
false,
false,
863 if (ConstantOffset == 0) {
864 UserChainTail =
nullptr;
872 Value *IdxWithoutConstOffset = Extractor.rebuildWithoutConstOffset();
873 UserChainTail = Extractor.UserChain.back();
874 return IdxWithoutConstOffset;
877APInt ConstantOffsetExtractor::Find(
Value *Idx, GetElementPtrInst *
GEP) {
879 return ConstantOffsetExtractor(
GEP->getIterator())
880 .find(Idx,
false,
false,
884bool SeparateConstOffsetFromGEP::canonicalizeArrayIndicesToIndexSize(
885 GetElementPtrInst *
GEP) {
887 Type *PtrIdxTy =
DL->getIndexType(
GEP->getType());
890 I !=
E; ++
I, ++GTI) {
893 if ((*I)->getType() != PtrIdxTy) {
903APInt SeparateConstOffsetFromGEP::accumulateByteOffset(GetElementPtrInst *
GEP,
904 bool &NeedsExtraction) {
905 NeedsExtraction =
false;
906 unsigned IdxWidth =
DL->getIndexTypeSizeInBits(
GEP->getType());
907 APInt AccumulativeByteOffset(IdxWidth, 0);
909 for (
unsigned I = 1,
E =
GEP->getNumOperands();
I !=
E; ++
I, ++GTI) {
916 APInt ConstantOffset =
917 ConstantOffsetExtractor::Find(
GEP->getOperand(
I),
GEP)
919 if (ConstantOffset != 0) {
920 NeedsExtraction =
true;
924 AccumulativeByteOffset +=
925 ConstantOffset * APInt(IdxWidth,
929 }
else if (LowerGEP) {
934 NeedsExtraction =
true;
935 AccumulativeByteOffset +=
936 APInt(IdxWidth,
DL->getStructLayout(StTy)->getElementOffset(
Field),
941 return AccumulativeByteOffset;
944void SeparateConstOffsetFromGEP::lowerToSingleIndexGEPs(
945 GetElementPtrInst *Variadic,
const APInt &AccumulativeByteOffset) {
952 bool isSwapCandidate =
953 L &&
L->isLoopInvariant(ResultPtr) &&
954 !hasMoreThanOneUseInLoop(ResultPtr, L);
955 Value *FirstResult =
nullptr;
960 for (
unsigned I = 1,
E =
Variadic->getNumOperands();
I !=
E; ++
I, ++GTI) {
971 if (ElementSize != 1) {
973 Idx = Builder.CreateShl(
974 Idx, ConstantInt::get(PtrIndexTy, ElementSize.
logBase2()));
977 Builder.CreateMul(Idx, ConstantInt::get(PtrIndexTy, ElementSize));
981 ResultPtr = Builder.CreatePtrAdd(ResultPtr, Idx,
"uglygep");
982 if (FirstResult ==
nullptr)
983 FirstResult = ResultPtr;
988 if (AccumulativeByteOffset != 0) {
989 Value *
Offset = ConstantInt::get(PtrIndexTy, AccumulativeByteOffset);
990 ResultPtr = Builder.CreatePtrAdd(ResultPtr,
Offset,
"uglygep");
992 isSwapCandidate =
false;
999 if (isSwapCandidate && isLegalToSwapOperand(FirstGEP, SecondGEP, L))
1000 swapGEPOperand(FirstGEP, SecondGEP);
1002 Variadic->replaceAllUsesWith(ResultPtr);
1006bool SeparateConstOffsetFromGEP::reorderGEP(GetElementPtrInst *
GEP,
1007 TargetTransformInfo &
TTI) {
1012 bool NestedNeedsExtraction;
1013 APInt NestedByteOffset = accumulateByteOffset(PtrGEP, NestedNeedsExtraction);
1014 if (!NestedNeedsExtraction)
1017 unsigned AddrSpace = PtrGEP->getPointerAddressSpace();
1021 true, 0, AddrSpace))
1024 bool GEPInBounds =
GEP->isInBounds();
1025 bool PtrGEPInBounds = PtrGEP->isInBounds();
1026 bool IsChainInBounds = GEPInBounds && PtrGEPInBounds;
1027 if (IsChainInBounds) {
1028 auto IsKnownNonNegative = [
this](
Value *
V) {
1031 IsChainInBounds &=
all_of(
GEP->indices(), IsKnownNonNegative);
1032 if (IsChainInBounds)
1033 IsChainInBounds &=
all_of(PtrGEP->indices(), IsKnownNonNegative);
1038 Value *NewSrc = Builder.CreateGEP(
1039 GEP->getSourceElementType(), PtrGEP->getPointerOperand(),
1040 SmallVector<Value *, 4>(
GEP->indices()),
"", IsChainInBounds);
1041 Value *NewGEP = Builder.CreateGEP(PtrGEP->getSourceElementType(), NewSrc,
1042 SmallVector<Value *, 4>(PtrGEP->indices()),
1043 "", IsChainInBounds);
1044 GEP->replaceAllUsesWith(NewGEP);
1049bool SeparateConstOffsetFromGEP::splitGEP(GetElementPtrInst *
GEP) {
1051 if (
GEP->getType()->isVectorTy())
1058 const APInt *BaseOffset;
1059 const bool ExtractBase =
1063 unsigned IdxWidth =
DL->getIndexTypeSizeInBits(
GEP->getType());
1064 const APInt BaseByteOffset =
1065 ExtractBase ? BaseOffset->
sextOrTrunc(IdxWidth) : APInt(IdxWidth, 0);
1069 if (
GEP->hasAllConstantIndices() && !ExtractBase)
1072 bool Changed = canonicalizeArrayIndicesToIndexSize(
GEP);
1074 bool NeedsExtraction;
1075 APInt AccumulativeByteOffset =
1076 BaseByteOffset + accumulateByteOffset(
GEP, NeedsExtraction);
1078 TargetTransformInfo &
TTI = GetTTI(*
GEP->getFunction());
1080 if (!NeedsExtraction && !ExtractBase) {
1093 unsigned AddrSpace =
GEP->getPointerAddressSpace();
1095 GEP->getResultElementType(),
1097 true, 0, AddrSpace)) {
1103 bool AllOffsetsNonNegative = AccumulativeByteOffset.
isNonNegative();
1104 bool AllNUWPreserved =
GEP->hasNoUnsignedWrap();
1105 bool NewGEPInBounds =
GEP->isInBounds();
1106 bool NewGEPNUSW =
GEP->hasNoUnsignedSignedWrap();
1116 for (
unsigned I = 1,
E =
GEP->getNumOperands();
I !=
E; ++
I, ++GTI) {
1125 User *UserChainTail;
1127 Value *NewIdx = ConstantOffsetExtractor::Extract(Idx,
GEP, UserChainTail,
1129 if (NewIdx !=
nullptr) {
1131 GEP->setOperand(
I, NewIdx);
1137 AllNUWPreserved &= PreservesNUW;
1139 AllOffsetsNonNegative =
1145 AllNUWPreserved &=
Base->hasNoUnsignedWrap();
1146 NewGEPInBounds &=
Base->isInBounds();
1147 NewGEPNUSW &=
Base->hasNoUnsignedSignedWrap();
1150 GEP->setOperand(0, NewBase);
1176 bool CanPreserveInBoundsNUSW = AllOffsetsNonNegative;
1180 if (AllNUWPreserved) {
1188 CanPreserveInBoundsNUSW |= NewGEPNUSW;
1191 if (CanPreserveInBoundsNUSW) {
1194 else if (NewGEPNUSW)
1198 GEP->setNoWrapFlags(NewGEPFlags);
1202 lowerToSingleIndexGEPs(
GEP, AccumulativeByteOffset);
1207 if (AccumulativeByteOffset == 0)
1230 Type *PtrIdxTy =
DL->getIndexType(
GEP->getType());
1233 NewGEP, ConstantInt::get(PtrIdxTy, AccumulativeByteOffset),
1234 GEP->getName(), NewGEPFlags));
1237 GEP->replaceAllUsesWith(NewGEP);
1238 GEP->eraseFromParent();
1243bool SeparateConstOffsetFromGEPLegacyPass::runOnFunction(Function &
F) {
1244 if (skipFunction(
F))
1246 auto *DT = &getAnalysis<DominatorTreeWrapperPass>().getDomTree();
1247 auto *LI = &getAnalysis<LoopInfoWrapperPass>().getLoopInfo();
1248 auto *TLI = &getAnalysis<TargetLibraryInfoWrapperPass>().getTLI(
F);
1249 auto GetTTI = [
this](
Function &
F) -> TargetTransformInfo & {
1250 return this->getAnalysis<TargetTransformInfoWrapperPass>().getTTI(
F);
1252 SeparateConstOffsetFromGEP Impl(DT, LI, TLI, GetTTI, LowerGEP);
1256bool SeparateConstOffsetFromGEP::run(Function &
F) {
1260 DL = &
F.getDataLayout();
1263 ReversePostOrderTraversal<Function *> RPOT(&
F);
1264 for (BasicBlock *
B : RPOT) {
1278 verifyNoDeadCode(
F);
1283Instruction *SeparateConstOffsetFromGEP::findClosestMatchingDominator(
1284 ExprKey
Key, Instruction *Dominatee,
1285 DenseMap<ExprKey, SmallVector<Instruction *, 2>> &DominatingExprs) {
1286 auto Pos = DominatingExprs.find(
Key);
1287 if (Pos == DominatingExprs.end())
1290 auto &Candidates = Pos->second;
1295 while (!Candidates.empty()) {
1297 if (DT->
dominates(Candidate, Dominatee))
1299 Candidates.pop_back();
1304bool SeparateConstOffsetFromGEP::reuniteExts(Instruction *
I) {
1305 if (!
I->getType()->isIntOrIntVectorTy())
1315 ExprKey
Key = createNormalizedCommutablePair(
LHS,
RHS);
1316 if (
auto *Dom = findClosestMatchingDominator(
Key,
I, DominatingAdds)) {
1318 new SExtInst(Dom,
I->getType(),
"",
I->getIterator());
1320 I->replaceAllUsesWith(NewSExt);
1329 findClosestMatchingDominator({
LHS,
RHS},
I, DominatingSubs)) {
1331 new SExtInst(Dom,
I->getType(),
"",
I->getIterator());
1333 I->replaceAllUsesWith(NewSExt);
1344 ExprKey
Key = createNormalizedCommutablePair(
LHS,
RHS);
1345 DominatingAdds[
Key].push_back(
I);
1349 DominatingSubs[{
LHS,
RHS}].push_back(
I);
1354bool SeparateConstOffsetFromGEP::reuniteExts(Function &
F) {
1356 DominatingAdds.clear();
1357 DominatingSubs.clear();
1366void SeparateConstOffsetFromGEP::verifyNoDeadCode(Function &
F) {
1367 for (BasicBlock &
B :
F) {
1368 for (Instruction &
I :
B) {
1370 std::string ErrMessage;
1371 raw_string_ostream RSO(ErrMessage);
1372 RSO <<
"Dead instruction detected!\n" <<
I <<
"\n";
1379bool SeparateConstOffsetFromGEP::isLegalToSwapOperand(
1380 GetElementPtrInst *FirstGEP, GetElementPtrInst *SecondGEP, Loop *CurLoop) {
1381 if (!FirstGEP || !FirstGEP->
hasOneUse())
1387 if (FirstGEP == SecondGEP)
1393 if (FirstNum != SecondNum || FirstNum != 2)
1418 if (FirstOffsetDef && FirstOffsetDef->
isShift() &&
1427 if ((opc == Instruction::Add || opc == Instruction::Sub) &&
1435bool SeparateConstOffsetFromGEP::hasMoreThanOneUseInLoop(
Value *V, Loop *L) {
1442 for (User *U :
V->users()) {
1444 if (
L->contains(User))
1445 if (++UsesInLoop > 1)
1451void SeparateConstOffsetFromGEP::swapGEPOperand(GetElementPtrInst *
First,
1452 GetElementPtrInst *Second) {
1455 First->setOperand(1, Offset2);
1459 const DataLayout &DAL =
First->getDataLayout();
1465 uint64_t ObjectSize;
1467 Offset.ugt(ObjectSize)) {
1472 First->setIsInBounds(
true);
1493 SeparateConstOffsetFromGEP Impl(DT, LI, TLI, GetTTI, LowerGEP);
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
This file implements a class to represent arbitrary precision integral constant values and operations...
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
This file contains the declarations for the subclasses of Constant, which represent the different fla...
This file defines the DenseMap class.
This file builds on the ADT/GraphTraits.h file to build generic depth first graph iterator.
static bool runOnFunction(Function &F, bool PostInlining)
Module.h This file contains the declarations for the Module class.
This header defines various interfaces for pass management in LLVM.
static const T * Find(StringRef S, ArrayRef< T > A)
Find KV in array using binary search.
OptimizedStructLayoutField Field
#define INITIALIZE_PASS_DEPENDENCY(depName)
#define INITIALIZE_PASS_END(passName, arg, name, cfg, analysis)
#define INITIALIZE_PASS_BEGIN(passName, arg, name, cfg, analysis)
static cl::opt< bool > DisableSeparateConstOffsetFromGEP("disable-separate-const-offset-from-gep", cl::init(false), cl::desc("Do not separate the constant offset from a GEP instruction"), cl::Hidden)
static bool allowsPreservingNUW(const User *U)
A helper function to check if reassociating through an entry in the user chain would invalidate the G...
static cl::opt< bool > VerifyNoDeadCode("reassociate-geps-verify-no-dead-code", cl::init(false), cl::desc("Verify this pass produces no dead code"), cl::Hidden)
This file defines the SmallVector class.
static SymbolRef::Type getType(const Symbol *Sym)
Class for arbitrary precision integers.
LLVM_ABI APInt zext(unsigned width) const
Zero extend to a new width.
LLVM_ABI APInt trunc(unsigned width) const
Truncate to new width.
bool isZero() const
Determine if this value is zero, i.e. all bits are clear.
LLVM_ABI APInt sextOrTrunc(unsigned width) const
Sign extend or truncate to width.
unsigned logBase2() const
bool isNonNegative() const
Determine if this APInt Value is non-negative (>= 0)
LLVM_ABI APInt sext(unsigned width) const
Sign extend to a new width.
bool isPowerOf2() const
Check if this APInt's value is a power of two greater than zero.
static APInt getZero(unsigned numBits)
Get the '0' value for the specified bit-width.
int64_t getSExtValue() const
Get sign extended value.
PassT::Result & getResult(IRUnitT &IR, ExtraArgTs... ExtraArgs)
Get the result of an analysis pass for a given IR unit.
AnalysisUsage & addRequired()
LLVM_ABI void setPreservesCFG()
This function should be called by the pass, iff they do not:
InstListType::iterator iterator
Instruction iterators...
BinaryOps getOpcode() const
static LLVM_ABI BinaryOperator * Create(BinaryOps Op, Value *S1, Value *S2, const Twine &Name=Twine(), InsertPosition InsertBefore=nullptr)
Construct a binary instruction, given the opcode and the two operands.
Represents analyses that only rely on functions' control flow.
static LLVM_ABI CastInst * CreateIntegerCast(Value *S, Type *Ty, bool isSigned, const Twine &Name="", InsertPosition InsertBefore=nullptr)
Create a ZExt, BitCast, or Trunc for int -> int casts.
const APInt & getValue() const
Return the constant as an APInt value reference.
unsigned getIndexSizeInBits(unsigned AS) const
The size in bits of indices used for address calculation in getelementptr and for addresses in the gi...
Analysis pass which computes a DominatorTree.
Legacy analysis pass which computes a DominatorTree.
LLVM_ABI bool isReachableFromEntry(const Use &U) const
Provide an overload for a Use.
LLVM_ABI bool dominates(const BasicBlock *BB, const Use &U) const
Return true if the (end of the) basic block BB dominates the use U.
FunctionPass class - This class is used to implement most global optimizations.
static GEPNoWrapFlags inBounds()
static GEPNoWrapFlags noUnsignedWrap()
static GEPNoWrapFlags noUnsignedSignedWrap()
static GEPNoWrapFlags none()
an instruction for type-safe pointer arithmetic to access elements of arrays and structs
LLVM_ABI void setNoWrapFlags(GEPNoWrapFlags NW)
Set nowrap flags for GEP instruction.
LLVM_ABI bool hasNoUnsignedWrap() const LLVM_READONLY
Determine whether the no unsigned wrap flag is set.
LLVM_ABI bool hasNoSignedWrap() const LLVM_READONLY
Determine whether the no signed wrap flag is set.
LLVM_ABI void insertBefore(InstListType::iterator InsertPos)
Insert an unlinked instruction into a basic block immediately before the specified position.
LLVM_ABI void dropPoisonGeneratingFlags()
Drops flags that may cause this instruction to evaluate to poison despite having non-poison inputs.
void setDebugLoc(DebugLoc Loc)
Set the debug location information for this instruction.
LLVM_ABI void copyMetadata(const Instruction &SrcInst, ArrayRef< unsigned > WL=ArrayRef< unsigned >())
Copy metadata from SrcInst to this instruction.
Analysis pass that exposes the LoopInfo for a function.
LoopT * getLoopFor(const BlockT *BB) const
Return the inner most loop that BB lives in.
The legacy pass manager's analysis pass to compute loop information.
bool isLoopInvariant(const Value *V) const
Return true if the specified value is loop invariant.
static LLVM_ABI PassRegistry * getPassRegistry()
getPassRegistry - Access the global registry object, which is automatically initialized at applicatio...
A set of analyses that are preserved following a run of a transformation pass.
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
PreservedAnalyses & preserveSet()
Mark an analysis set as preserved.
void printPipeline(raw_ostream &OS, function_ref< StringRef(StringRef)> MapClassName2PassName)
PreservedAnalyses run(Function &F, FunctionAnalysisManager &)
StringRef - Represent a constant reference to a string, i.e.
Analysis pass providing the TargetTransformInfo.
Analysis pass providing the TargetLibraryInfo.
This class represents a truncation of integer types.
LLVM_ABI unsigned getIntegerBitWidth() const
LLVM_ABI bool isScalableTy(SmallPtrSetImpl< const Type * > &Visited) const
Return true if this is a type whose size is a known multiple of vscale.
LLVM_ABI unsigned getScalarSizeInBits() const LLVM_READONLY
If this is a vector type, return the getPrimitiveSizeInBits value for the element type.
void setOperand(unsigned i, Value *Val)
Value * getOperand(unsigned i) const
unsigned getNumOperands() const
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
const Value * stripAndAccumulateInBoundsConstantOffsets(const DataLayout &DL, APInt &Offset) const
This is a wrapper around stripAndAccumulateConstantOffsets with the in-bounds requirement set to fals...
bool hasOneUse() const
Return true if there is exactly one use of this value.
LLVM_ABI StringRef getName() const
Return a constant reference to the value's name.
LLVM_ABI void takeName(Value *V)
Transfer the name from V to this value.
An efficient, type-erasing, non-owning reference to a callable.
bool isSequential() const
StructType * getStructType() const
TypeSize getSequentialElementStride(const DataLayout &DL) const
Type * getIndexedType() const
const ParentTy * getParent() const
This class implements an extremely fast bulk output stream that can only output to a stream.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
@ C
The default llvm calling convention, compatible with C.
@ BasicBlock
Various leaf nodes.
PtrAdd_match< PointerOpTy, OffsetOpTy > m_PtrAdd(const PointerOpTy &PointerOp, const OffsetOpTy &OffsetOp)
Matches GEP with i8 source element type.
BinaryOp_match< LHS, RHS, Instruction::Add > m_Add(const LHS &L, const RHS &R)
ap_match< APInt > m_APInt(const APInt *&Res)
Match a ConstantInt or splatted ConstantVector, binding the specified pointer to the contained APInt.
BinaryOp_match< LHS, RHS, Instruction::Xor > m_Xor(const LHS &L, const RHS &R)
OverflowingBinaryOp_match< LHS, RHS, Instruction::Sub, OverflowingBinaryOperator::NoSignedWrap > m_NSWSub(const LHS &L, const RHS &R)
bool match(Val *V, const Pattern &P)
class_match< ConstantInt > m_ConstantInt()
Match an arbitrary ConstantInt and ignore it.
class_match< Value > m_Value()
Match an arbitrary value and ignore it.
OverflowingBinaryOp_match< LHS, RHS, Instruction::Add, OverflowingBinaryOperator::NoSignedWrap > m_NSWAdd(const LHS &L, const RHS &R)
CastInst_match< OpTy, SExtInst > m_SExt(const OpTy &Op)
Matches SExt.
BinaryOp_match< LHS, RHS, Instruction::Sub > m_Sub(const LHS &L, const RHS &R)
initializer< Ty > init(const Ty &Val)
PointerTypeMap run(const Module &M)
Compute the PointerTypeMap for the module M.
@ User
could "use" a pointer
NodeAddr< NodeBase * > Node
friend class Instruction
Iterator for Instructions in a `BasicBlock.
This is an optimization pass for GlobalISel generic memory operations.
FunctionAddr VTableAddr Value
auto find(R &&Range, const T &Val)
Provide wrappers to std::find which take ranges instead of having to pass begin/end explicitly.
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly.
LLVM_ABI bool RecursivelyDeleteTriviallyDeadInstructions(Value *V, const TargetLibraryInfo *TLI=nullptr, MemorySSAUpdater *MSSAU=nullptr, std::function< void(Value *)> AboutToDeleteCallback=std::function< void(Value *)>())
If the specified value is a trivially dead instruction, delete it.
decltype(auto) dyn_cast(const From &Val)
dyn_cast<X> - Return the argument parameter cast to the specified type.
iterator_range< early_inc_iterator_impl< detail::IterOfRange< RangeT > > > make_early_inc_range(RangeT &&Range)
Make a range that does early increment to allow mutation of the underlying range without disrupting i...
LLVM_ABI void initializeSeparateConstOffsetFromGEPLegacyPassPass(PassRegistry &)
auto dyn_cast_or_null(const Y &Val)
LLVM_ABI bool isInstructionTriviallyDead(Instruction *I, const TargetLibraryInfo *TLI=nullptr)
Return true if the result produced by the instruction is not used, and the instruction will return.
LLVM_ABI bool getObjectSize(const Value *Ptr, uint64_t &Size, const DataLayout &DL, const TargetLibraryInfo *TLI, ObjectSizeOpts Opts={})
Compute the size of the object pointed by Ptr.
auto reverse(ContainerTy &&C)
LLVM_ABI void computeKnownBits(const Value *V, KnownBits &Known, const DataLayout &DL, AssumptionCache *AC=nullptr, const Instruction *CxtI=nullptr, const DominatorTree *DT=nullptr, bool UseInstrInfo=true, unsigned Depth=0)
Determine which bits of V are known to be either zero or one and return them in the KnownZero/KnownOn...
LLVM_ABI bool programUndefinedIfPoison(const Instruction *Inst)
generic_gep_type_iterator<> gep_type_iterator
LLVM_ABI Constant * ConstantFoldCastOperand(unsigned Opcode, Constant *C, Type *DestTy, const DataLayout &DL)
Attempt to constant fold a cast with the specified operand.
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
bool isa(const From &Val)
isa<X> - Return true if the parameter to the template is an instance of one of the template type argu...
LLVM_ATTRIBUTE_VISIBILITY_DEFAULT AnalysisKey InnerAnalysisManagerProxy< AnalysisManagerT, IRUnitT, ExtraArgTs... >::Key
LLVM_ABI FunctionPass * createSeparateConstOffsetFromGEPPass(bool LowerGEP=false)
@ First
Helpers to iterate all locations in the MemoryEffectsBase class.
IRBuilder(LLVMContext &, FolderTy, InserterTy, MDNode *, ArrayRef< OperandBundleDef >) -> IRBuilder< FolderTy, InserterTy >
constexpr unsigned BitWidth
decltype(auto) cast(const From &Val)
cast<X> - Return the argument parameter cast to the specified type.
gep_type_iterator gep_type_begin(const User *GEP)
iterator_range< df_iterator< T > > depth_first(const T &G)
AnalysisManager< Function > FunctionAnalysisManager
Convenience typedef for the Function analysis manager.
LLVM_ABI bool isKnownNonNegative(const Value *V, const SimplifyQuery &SQ, unsigned Depth=0)
Returns true if the give value is known to be non-negative.
A CRTP mix-in to automatically provide informational APIs needed for passes.