76#define DEBUG_TYPE "gvn-sink"
78STATISTIC(NumRemoved,
"Number of instructions removed");
81namespace GVNExpression {
94 return isa<LoadInst>(
I) || isa<StoreInst>(
I) ||
95 (isa<InvokeInst>(
I) && !cast<InvokeInst>(
I)->doesNotAccessMemory()) ||
96 (isa<CallInst>(
I) && !cast<CallInst>(
I)->doesNotAccessMemory());
111class LockstepReverseIterator {
124 ActiveBlocks.
clear();
129 if (BB->size() <= 1) {
134 Insts.
push_back(BB->getTerminator()->getPrevNonDebugInstruction());
153 if (!
Blocks.contains((*II)->getParent())) {
154 ActiveBlocks.
remove((*II)->getParent());
166 for (
auto *Inst : Insts) {
167 if (Inst == &Inst->getParent()->front())
168 ActiveBlocks.
remove(Inst->getParent());
170 NewInsts.
push_back(Inst->getPrevNonDebugInstruction());
172 if (NewInsts.
empty()) {
186struct SinkingInstructionCandidate {
188 unsigned NumInstructions;
190 unsigned NumMemoryInsts;
194 void calculateCost(
unsigned NumOrigPHIs,
unsigned NumOrigBlocks) {
195 unsigned NumExtraPHIs = NumPHIs - NumOrigPHIs;
196 unsigned SplitEdgeCost = (NumOrigBlocks > NumBlocks) ? 2 : 0;
197 Cost = (NumInstructions * (NumBlocks - 1)) -
210 OS <<
"<Candidate Cost=" <<
C.Cost <<
" #Blocks=" <<
C.NumBlocks
211 <<
" #Insts=" <<
C.NumInstructions <<
" #PHIs=" <<
C.NumPHIs <<
">";
226 ModelledPHI() =
default;
233 using OpsType = std::pair<BasicBlock *, Value *>;
238 auto ComesBefore = [BlockOrder](OpsType O1, OpsType O2) {
239 return BlockOrder.
lookup(O1.first) < BlockOrder.
lookup(O2.first);
244 for (
auto &
P : Ops) {
253 static ModelledPHI createDummy(
size_t ID) {
255 M.Values.push_back(
reinterpret_cast<Value*
>(
ID));
262 "Modelling PHI with less than 2 values");
263 auto ComesBefore = [BlockOrder](
const BasicBlock *BB1,
269 for (
const Value *V : Values) {
270 if (!isa<UndefValue>(V)) {
284 verifyModelledPHI(BlockOrder);
292 for (
auto *
I : Insts)
293 Values.push_back(
I->getOperand(OpNum));
300 auto VI = Values.begin();
301 while (BI !=
Blocks.end()) {
302 assert(VI != Values.end());
305 VI = Values.erase(VI);
316 bool areAllIncomingValuesSame()
const {
320 bool areAllIncomingValuesSameType()
const {
322 Values, [&](
Value *V) {
return V->getType() == Values[0]->
getType(); });
325 bool areAnyIncomingValuesConstant()
const {
330 unsigned hash()
const {
341 static inline ModelledPHI &getEmptyKey() {
342 static ModelledPHI
Dummy = ModelledPHI::createDummy(0);
346 static inline ModelledPHI &getTombstoneKey() {
347 static ModelledPHI
Dummy = ModelledPHI::createDummy(1);
351 static unsigned getHashValue(
const ModelledPHI &V) {
return V.hash(); }
353 static bool isEqual(
const ModelledPHI &LHS,
const ModelledPHI &RHS) {
374 unsigned MemoryUseOrder = -1;
381 : GVNExpression::BasicExpression(
I->getNumUses()) {
387 ShuffleMask = SVI->getShuffleMask().
copy(
A);
389 for (
auto &U :
I->uses())
394 void setMemoryUseOrder(
unsigned MUO) { MemoryUseOrder = MUO; }
395 void setVolatile(
bool V) {
Volatile =
V; }
399 MemoryUseOrder, Volatile, ShuffleMask);
420 BasicBlocksSet ReachableBBs;
426 InstructionUseExpr *E =
429 E->setMemoryUseOrder(getMemoryUseOrder(
I));
433 E->setOpcode((
C->getOpcode() << 8) |
Predicate);
441 template <
class Inst> InstructionUseExpr *createMemoryExpr(Inst *
I) {
444 InstructionUseExpr *E = createExpr(
I);
445 E->setVolatile(
I->isVolatile());
453 void setReachableBBs(
const BasicBlocksSet &ReachableBBs) {
454 this->ReachableBBs = ReachableBBs;
460 auto VI = ValueNumbering.
find(V);
461 if (VI != ValueNumbering.
end())
464 if (!isa<Instruction>(V)) {
465 ValueNumbering[
V] = nextValueNumber;
466 return nextValueNumber++;
470 if (!ReachableBBs.contains(
I->getParent()))
473 InstructionUseExpr *exp =
nullptr;
474 switch (
I->getOpcode()) {
475 case Instruction::Load:
476 exp = createMemoryExpr(cast<LoadInst>(
I));
478 case Instruction::Store:
479 exp = createMemoryExpr(cast<StoreInst>(
I));
481 case Instruction::Call:
482 case Instruction::Invoke:
483 case Instruction::FNeg:
484 case Instruction::Add:
485 case Instruction::FAdd:
486 case Instruction::Sub:
487 case Instruction::FSub:
488 case Instruction::Mul:
489 case Instruction::FMul:
490 case Instruction::UDiv:
491 case Instruction::SDiv:
492 case Instruction::FDiv:
493 case Instruction::URem:
494 case Instruction::SRem:
495 case Instruction::FRem:
496 case Instruction::Shl:
497 case Instruction::LShr:
498 case Instruction::AShr:
499 case Instruction::And:
500 case Instruction::Or:
501 case Instruction::Xor:
502 case Instruction::ICmp:
503 case Instruction::FCmp:
504 case Instruction::Trunc:
505 case Instruction::ZExt:
506 case Instruction::SExt:
507 case Instruction::FPToUI:
508 case Instruction::FPToSI:
509 case Instruction::UIToFP:
510 case Instruction::SIToFP:
511 case Instruction::FPTrunc:
512 case Instruction::FPExt:
513 case Instruction::PtrToInt:
514 case Instruction::IntToPtr:
515 case Instruction::BitCast:
516 case Instruction::AddrSpaceCast:
517 case Instruction::Select:
518 case Instruction::ExtractElement:
519 case Instruction::InsertElement:
520 case Instruction::ShuffleVector:
521 case Instruction::InsertValue:
522 case Instruction::GetElementPtr:
530 ValueNumbering[
V] = nextValueNumber;
531 return nextValueNumber++;
536 hash_code H = exp->getHashValue([=](
Value *V) {
return lookupOrAdd(V); });
540 ExpressionNumbering[exp] = nextValueNumber++;
542 ValueNumbering[
V] =
e;
549 auto VI = ValueNumbering.
find(V);
550 assert(VI != ValueNumbering.
end() &&
"Value not numbered?");
556 ValueNumbering.
clear();
557 ExpressionNumbering.
clear();
558 HashNumbering.
clear();
575 for (
auto I = std::next(Inst->
getIterator()), E = BB->end();
576 I != E && !
I->isTerminator(); ++
I) {
577 if (!isMemoryInst(&*
I))
579 if (isa<LoadInst>(&*
I))
585 if (
II &&
II->onlyReadsMemory())
587 return lookupOrAdd(&*
I);
603 unsigned NumSunk = 0;
605 VN.setReachableBBs(BasicBlocksSet(RPOT.begin(), RPOT.end()));
611 unsigned NodeOrdering = 0;
612 RPOTOrder[*RPOT.begin()] = ++NodeOrdering;
613 for (
auto *BB : RPOT)
615 RPOTOrder[BB] = ++NodeOrdering;
617 NumSunk += sinkBB(
N);
628 if (isa<PHINode>(
I) ||
I->isEHPad() || isa<AllocaInst>(
I) ||
629 I->getType()->isTokenTy())
637 std::optional<SinkingInstructionCandidate> analyzeInstructionForSinking(
638 LockstepReverseIterator &LRI,
unsigned &InstNum,
unsigned &MemoryInstNum,
642 void analyzeInitialPHIs(
BasicBlock *BB, ModelledPHISet &PHIs,
645 auto MPHI = ModelledPHI(&PN, RPOTOrder);
647 for (
auto *V : MPHI.getValues())
663 while (
PHINode *PN = dyn_cast<PHINode>(
I++)) {
665 return V == PN->getIncomingValue(0);
677std::optional<SinkingInstructionCandidate>
678GVNSink::analyzeInstructionForSinking(LockstepReverseIterator &LRI,
680 unsigned &MemoryInstNum,
681 ModelledPHISet &NeededPHIs,
684 LLVM_DEBUG(
dbgs() <<
" -- Analyzing instruction set: [\n";
for (
auto *
I
687 }
dbgs() <<
" ]\n";);
690 for (
auto *
I : Insts) {
699 if (VNums[VNumToSink] == 1)
705 auto &ActivePreds = LRI.getActiveBlocks();
706 unsigned InitialActivePredSize = ActivePreds.size();
708 for (
auto *
I : Insts) {
709 if (VN.lookup(
I) != VNumToSink)
710 ActivePreds.remove(
I->getParent());
714 for (
auto *
I : NewInsts)
715 if (shouldAvoidSinkingInstruction(
I))
720 bool RecomputePHIContents =
false;
721 if (ActivePreds.size() != InitialActivePredSize) {
722 ModelledPHISet NewNeededPHIs;
723 for (
auto P : NeededPHIs) {
724 P.restrictToBlocks(ActivePreds);
725 NewNeededPHIs.insert(
P);
727 NeededPHIs = NewNeededPHIs;
728 LRI.restrictToBlocks(ActivePreds);
729 RecomputePHIContents =
true;
733 ModelledPHI NewPHI(NewInsts, ActivePreds, RPOTOrder);
736 if (NeededPHIs.erase(NewPHI))
737 RecomputePHIContents =
true;
739 if (RecomputePHIContents) {
743 for (
auto &
PHI : NeededPHIs)
744 PHIContents.
insert(
PHI.getValues().begin(),
PHI.getValues().end());
749 for (
auto *V : NewPHI.getValues())
750 if (PHIContents.
count(V))
765 if (
any_of(NewInsts, isNotSameOperation))
768 for (
unsigned OpNum = 0, E = I0->
getNumOperands(); OpNum != E; ++OpNum) {
769 ModelledPHI
PHI(NewInsts, OpNum, ActivePreds);
770 if (
PHI.areAllIncomingValuesSame())
775 if (NeededPHIs.count(
PHI))
777 if (!
PHI.areAllIncomingValuesSameType())
780 if ((isa<CallInst>(I0) || isa<InvokeInst>(I0)) && OpNum == E - 1 &&
781 PHI.areAnyIncomingValuesConstant())
784 NeededPHIs.reserve(NeededPHIs.size());
785 NeededPHIs.insert(
PHI);
786 PHIContents.
insert(
PHI.getValues().begin(),
PHI.getValues().end());
789 if (isMemoryInst(NewInsts[0]))
792 SinkingInstructionCandidate Cand;
793 Cand.NumInstructions = ++InstNum;
794 Cand.NumMemoryInsts = MemoryInstNum;
795 Cand.NumBlocks = ActivePreds.size();
796 Cand.NumPHIs = NeededPHIs.size();
810 auto *
T =
B->getTerminator();
811 if (isa<BranchInst>(
T) || isa<SwitchInst>(
T))
816 if (Preds.size() < 2)
824 unsigned NumOrigPreds = Preds.size();
830 LockstepReverseIterator LRI(Preds);
832 unsigned InstNum = 0, MemoryInstNum = 0;
833 ModelledPHISet NeededPHIs;
835 analyzeInitialPHIs(BBEnd, NeededPHIs, PHIContents);
836 unsigned NumOrigPHIs = NeededPHIs.size();
838 while (LRI.isValid()) {
839 auto Cand = analyzeInstructionForSinking(LRI, InstNum, MemoryInstNum,
840 NeededPHIs, PHIContents);
843 Cand->calculateCost(NumOrigPHIs, Preds.size());
851 <<
" " <<
C <<
"\n";);
854 if (Candidates.empty() || Candidates.front().Cost <= 0)
856 auto C = Candidates.front();
860 if (
C.Blocks.size() < NumOrigPreds) {
871 for (
unsigned I = 0;
I <
C.NumInstructions; ++
I)
874 return C.NumInstructions;
896 assert(!
Op->getType()->isTokenTy() &&
"Can't PHI tokens!");
900 for (
auto *
I : Insts)
912 for (
auto *
I : Insts)
918 for (
auto *
I : Insts)
920 I->replaceAllUsesWith(I0);
923 foldPointlessPHINodes(BBEnd);
926 for (
auto *
I : Insts)
928 I->eraseFromParent();
930 NumRemoved += Insts.size() - 1;
This file defines the BumpPtrAllocator interface.
Atomic ordering constants.
static const Function * getParent(const Value *V)
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
#define LLVM_DUMP_METHOD
Mark debug helper function definitions like dump() that should not be stripped from debug builds.
This file contains the declarations for the subclasses of Constant, which represent the different fla...
This file defines the DenseMap class.
This file defines the DenseSet and SmallDenseSet classes.
std::optional< std::vector< StOtherPiece > > Other
DenseMap< Block *, BlockRelaxAux > Blocks
The header file for the GVN pass that contains expression handling classes.
This file provides the interface for LLVM's Global Value Numbering pass which eliminates fully redund...
This is the interface for a simple mod/ref and alias analysis over globals.
This file provides various utilities for inspecting and working with the control flow graph in LLVM I...
This header defines various interfaces for pass management in LLVM.
This defines the Use class.
static bool lookup(const GsymReader &GR, DataExtractor &Data, uint64_t &Offset, uint64_t BaseAddr, uint64_t Addr, SourceLocations &SrcLocs, llvm::Error &Err)
A Lookup helper functions.
uint64_t IntrinsicInst * II
This file builds on the ADT/GraphTraits.h file to build a generic graph post order iterator.
static bool isValid(const char C)
Returns true if C is a valid mangled character: <0-9a-zA-Z_>.
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
static void sinkLastInstruction(ArrayRef< BasicBlock * > Blocks)
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
static SymbolRef::Type getType(const Symbol *Sym)
A container for analyses that lazily runs them and caches their results.
Recycle small arrays allocated from a BumpPtrAllocator.
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
MutableArrayRef< T > copy(Allocator &A)
LLVM Basic Block Representation.
iterator begin()
Instruction iterator methods.
iterator_range< const_phi_iterator > phis() const
Returns a range that iterates over the phis in the basic block.
const_iterator getFirstInsertionPt() const
Returns an iterator to the first instruction in this block that is suitable for inserting a non-PHI i...
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
Allocate memory in an ever growing pool, as if by bump-pointer.
bool onlyReadsMemory(unsigned OpNo) const
This class represents a function call, abstracting a target machine's calling convention.
This class is the base class for the comparison instructions.
Predicate
This enumeration lists the possible predicates for CmpInst subclasses.
This class represents an Operation in the Expression.
ValueT lookup(const_arg_type_t< KeyT > Val) const
lookup - Return the entry for the specified key, or a default constructed value if no such entry exis...
iterator find(const_arg_type_t< KeyT > Val)
std::pair< iterator, bool > try_emplace(KeyT &&Key, Ts &&...Args)
size_type count(const_arg_type_t< KeyT > Val) const
Return 1 if the specified key is in the map, 0 otherwise.
Implements a dense probed hash-table based set.
void allocateOperands(RecyclerType &Recycler, BumpPtrAllocator &Allocator)
void op_push_back(Value *Arg)
hash_code getHashValue() const override
iterator_range< op_iterator > operands()
unsigned getOpcode() const
LLVM_DUMP_METHOD void dump() const
void setOpcode(unsigned opcode)
void print(raw_ostream &OS) const
bool isSameOperationAs(const Instruction *I, unsigned flags=0) const LLVM_READONLY
This function determines if the specified instruction executes the same operation as the current one.
void insertBefore(Instruction *InsertPos)
Insert an unlinked instruction into a basic block immediately before the specified instruction.
const DebugLoc & getDebugLoc() const
Return the debug location for this node as a DebugLoc.
void andIRFlags(const Value *V)
Logical 'and' of any supported wrapping, exact, and fast-math flags of V and this instruction.
const Instruction * getPrevNonDebugInstruction(bool SkipPseudoOp=false) const
Return a pointer to the previous non-debug instruction in the same basic block as 'this',...
InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
void applyMergedLocation(DILocation *LocA, DILocation *LocB)
Merge 2 debug locations and apply it to the Instruction.
void moveBefore(Instruction *MovePos)
Unlink this instruction from its current basic block and insert it into the basic block that MovePos ...
void addIncoming(Value *V, BasicBlock *BB)
Add an incoming value to the end of the PHI list.
op_range incoming_values()
BasicBlock * getIncomingBlock(unsigned i) const
Return incoming basic block number i.
Value * getIncomingValue(unsigned i) const
Return incoming value number x.
unsigned getNumIncomingValues() const
Return the number of incoming edges.
static PHINode * Create(Type *Ty, unsigned NumReservedValues, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
Constructors - NumReservedValues is a hint for the number of incoming edges that this phi node will h...
static PoisonValue * get(Type *T)
Static factory methods - Return an 'poison' object of the specified type.
A set of analyses that are preserved following a run of a transformation pass.
static PreservedAnalyses none()
Convenience factory function for the empty preserved set.
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
Recycler - This class manages a linked-list of deallocated nodes and facilitates reusing deallocated ...
void clear(AllocatorType &Allocator)
clear - Release all the tracked allocations to the allocator.
bool remove(const value_type &X)
Remove an item from the set vector.
size_type size() const
Determine the number of elements in the SetVector.
void clear()
Completely clear the SetVector.
bool insert(const value_type &X)
Insert a new element into the SetVector.
bool contains(const key_type &key) const
Check if the SetVector contains the given key.
This instruction constructs a fixed permutation of two input vectors.
A templated base class for SmallPtrSet which provides the typesafe interface that is common across al...
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
A SetVector that performs no allocations if smaller than a certain size.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
reference emplace_back(ArgTypes &&... Args)
iterator erase(const_iterator CI)
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
static Twine utohexstr(const uint64_t &Val)
const Use & getOperandUse(unsigned i) const
Value * getOperand(unsigned i) const
unsigned getNumOperands() const
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
void printAsOperand(raw_ostream &O, bool PrintType=true, const Module *M=nullptr) const
Print the name of this Value out to the specified raw_ostream.
An opaque object representing a hash code.
const ParentTy * getParent() const
self_iterator getIterator()
This class implements an extremely fast bulk output stream that can only output to a stream.
ArchKind & operator--(ArchKind &Kind)
@ C
The default llvm calling convention, compatible with C.
Predicate
Predicate - These are "(BI << 5) | BO" for various predicates.
PointerTypeMap run(const Module &M)
Compute the PointerTypeMap for the module M.
This is an optimization pass for GlobalISel generic memory operations.
void stable_sort(R &&Range)
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly.
bool isEqual(const GCNRPTracker::LiveRegSet &S1, const GCNRPTracker::LiveRegSet &S2)
APInt operator*(APInt a, uint64_t RHS)
void append_range(Container &C, Range &&R)
Wrapper function to append range R to container C.
bool isStrongerThanUnordered(AtomicOrdering AO)
bool operator==(const AddressRangeValuePair &LHS, const AddressRangeValuePair &RHS)
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
bool operator>(int64_t V1, const APSInt &V2)
void sort(IteratorTy Start, IteratorTy End)
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
bool is_sorted(R &&Range, Compare C)
Wrapper function around std::is_sorted to check if elements in a range R are sorted with respect to a...
void combineMetadataForCSE(Instruction *K, const Instruction *J, bool DoesKMove)
Combine the metadata of two instructions so that K can replace J.
BasicBlock * SplitBlockPredecessors(BasicBlock *BB, ArrayRef< BasicBlock * > Preds, const char *Suffix, DominatorTree *DT, LoopInfo *LI=nullptr, MemorySSAUpdater *MSSAU=nullptr, bool PreserveLCSSA=false)
This method introduces at least one new basic block into the function and moves some of the predecess...
bool canReplaceOperandWithVariable(const Instruction *I, unsigned OpIdx)
Given an instruction, is it legal to set operand OpIdx to a non-constant value?
auto max_element(R &&Range)
Provide wrappers to std::max_element which take ranges instead of having to pass begin/end explicitly...
raw_ostream & operator<<(raw_ostream &OS, const APFixedPoint &FX)
OutputIt copy(R &&Range, OutputIt Out)
void erase_if(Container &C, UnaryPredicate P)
Provide a container algorithm similar to C++ Library Fundamentals v2's erase_if which is equivalent t...
auto predecessors(const MachineBasicBlock *BB)
bool pred_empty(const BasicBlock *BB)
bool all_equal(std::initializer_list< T > Values)
Returns true if all Values in the initializer lists are equal or the list.
hash_code hash_combine(const Ts &...args)
Combine values into a single hash_code.
hash_code hash_combine_range(InputIteratorT first, InputIteratorT last)
Compute a hash_code for a sequence of values.
An information struct used to provide DenseMap with the various necessary components for a given valu...
PreservedAnalyses run(Function &F, FunctionAnalysisManager &AM)
Run the pass over the function.
Function object to check whether the second component of a container supported by std::get (like std:...