19 if (!isa<MemDGNode>(N)) {
20 assert(OpIt != OpItE &&
"Can't dereference end iterator!");
28 assert(MemIt != cast<MemDGNode>(N)->MemPreds.end() &&
29 "Cant' dereference end iterator!");
35 if (!isa<MemDGNode>(N)) {
36 assert(OpIt != OpItE &&
"Already at end!");
51 assert(MemIt != cast<MemDGNode>(N)->MemPreds.end() &&
"Already at end!");
57 assert(DAG ==
Other.DAG &&
"Iterators of different DAGs!");
58 assert(N ==
Other.N &&
"Iterators of different nodes!");
59 return OpIt ==
Other.OpIt && MemIt ==
Other.MemIt;
65 SB->eraseFromBundle(
this);
77 static constexpr const unsigned Indent = 4;
78 for (
auto *Pred : MemPreds)
79 OS.indent(Indent) <<
"<-" << *Pred->getInstruction() <<
"\n";
94 return cast<MemDGNode>(DAG.
getNode(
I));
104 I =
I->getPrevNode();
107 return cast<MemDGNode>(DAG.
getNode(
I));
115 if (TopMemN ==
nullptr)
118 assert(BotMemN !=
nullptr &&
"TopMemN should be null too!");
123DependencyGraph::DependencyType
128 return DependencyType::ReadAfterWrite;
130 return DependencyType::WriteAfterWrite;
133 return DependencyType::WriteAfterRead;
135 if (isa<sandboxir::PHINode>(FromI) || isa<sandboxir::PHINode>(ToI))
136 return DependencyType::Control;
138 return DependencyType::Control;
141 return DependencyType::Other;
142 return DependencyType::None;
147 if (
auto *LI = dyn_cast<LoadInst>(
I))
148 return !LI->isUnordered();
149 if (
auto *SI = dyn_cast<StoreInst>(
I))
150 return !SI->isUnordered();
155 bool Is = IsOrdered(
I);
157 "An ordered instruction must be a MemDepCandidate!");
162 DependencyType DepType) {
163 std::optional<MemoryLocation> DstLocOpt =
169 "Expected a mem instr");
176 case DependencyType::ReadAfterWrite:
177 case DependencyType::WriteAfterWrite:
179 case DependencyType::WriteAfterRead:
186bool DependencyGraph::hasDep(Instruction *SrcI, Instruction *DstI) {
187 DependencyType RoughDepType = getRoughDepType(SrcI, DstI);
188 switch (RoughDepType) {
189 case DependencyType::ReadAfterWrite:
190 case DependencyType::WriteAfterWrite:
191 case DependencyType::WriteAfterRead:
192 return alias(SrcI, DstI, RoughDepType);
193 case DependencyType::Control:
199 case DependencyType::Other:
201 case DependencyType::None:
207void DependencyGraph::scanAndAddDeps(MemDGNode &DstN,
209 assert(isa<MemDGNode>(DstN) &&
210 "DstN is the mem dep destination, so it must be mem");
211 Instruction *DstI = DstN.getInstruction();
214 for (MemDGNode &SrcN :
reverse(SrcScanRange)) {
215 Instruction *SrcI = SrcN.getInstruction();
216 if (hasDep(SrcI, DstI))
217 DstN.addMemPred(&SrcN);
221void DependencyGraph::setDefUseUnscheduledSuccs(
230 for (Instruction &
I : NewInterval) {
231 for (Value *
Op :
I.operands()) {
232 auto *OpI = dyn_cast<Instruction>(
Op);
235 if (!NewInterval.contains(OpI))
240 ++OpN->UnscheduledSuccs;
245 bool NewIsAbove = DAGInterval.empty() || NewInterval.comesBefore(DAGInterval);
246 const auto &TopInterval = NewIsAbove ? NewInterval : DAGInterval;
247 const auto &BotInterval = NewIsAbove ? DAGInterval : NewInterval;
258 for (Instruction &BotI : BotInterval) {
261 if (BotN->scheduled())
263 for (Value *
Op : BotI.operands()) {
264 auto *OpI = dyn_cast<Instruction>(
Op);
267 if (!TopInterval.contains(OpI))
272 ++OpN->UnscheduledSuccs;
280 MemDGNode *LastMemN = dyn_cast<MemDGNode>(LastN);
284 if (
auto *MemN = dyn_cast<MemDGNode>(
N)) {
285 MemN->setPrevNode(LastMemN);
286 if (LastMemN !=
nullptr)
287 LastMemN->setNextNode(MemN);
292 if (!DAGInterval.empty()) {
293 bool NewIsAbove = NewInterval.comesBefore(DAGInterval);
294 const auto &TopInterval = NewIsAbove ? NewInterval : DAGInterval;
295 const auto &BotInterval = NewIsAbove ? DAGInterval : NewInterval;
300 assert((LinkTopN ==
nullptr || LinkBotN ==
nullptr ||
301 LinkTopN->comesBefore(LinkBotN)) &&
303 if (LinkTopN !=
nullptr && LinkBotN !=
nullptr) {
304 LinkTopN->setNextNode(LinkBotN);
305 LinkBotN->setPrevNode(LinkTopN);
310 auto UnionIntvl = DAGInterval.getUnionInterval(NewInterval);
315 if (ChainTopN !=
nullptr && ChainBotN !=
nullptr) {
316 for (
auto *
N = ChainTopN->getNextNode(), *LastN = ChainTopN;
N !=
nullptr;
317 LastN =
N,
N =
N->getNextNode()) {
318 assert(
N == LastN->getNextNode() &&
"Bad chain!");
319 assert(
N->getPrevNode() == LastN &&
"Bad chain!");
325 setDefUseUnscheduledSuccs(NewInterval);
328MemDGNode *DependencyGraph::getMemDGNodeBefore(DGNode *
N,
329 bool IncludingN)
const {
330 auto *
I =
N->getInstruction();
331 for (
auto *PrevI = IncludingN ?
I :
I->getPrevNode(); PrevI !=
nullptr;
332 PrevI = PrevI->getPrevNode()) {
334 if (PrevN ==
nullptr)
336 if (
auto *PrevMemN = dyn_cast<MemDGNode>(PrevN))
342MemDGNode *DependencyGraph::getMemDGNodeAfter(DGNode *
N,
343 bool IncludingN)
const {
344 auto *
I =
N->getInstruction();
345 for (
auto *NextI = IncludingN ?
I :
I->getNextNode(); NextI !=
nullptr;
346 NextI = NextI->getNextNode()) {
348 if (NextN ==
nullptr)
350 if (
auto *NextMemN = dyn_cast<MemDGNode>(NextN))
356void DependencyGraph::notifyCreateInstr(Instruction *
I) {
361 if (MemN !=
nullptr) {
362 if (
auto *PrevMemN = getMemDGNodeBefore(MemN,
false)) {
363 PrevMemN->NextMemN = MemN;
364 MemN->PrevMemN = PrevMemN;
366 if (
auto *NextMemN = getMemDGNodeAfter(MemN,
false)) {
367 NextMemN->PrevMemN = MemN;
368 MemN->NextMemN = NextMemN;
373void DependencyGraph::notifyEraseInstr(Instruction *
I) {
376 auto *PrevMemN = getMemDGNodeBefore(MemN,
false);
377 auto *NextMemN = getMemDGNodeAfter(MemN,
false);
378 if (PrevMemN !=
nullptr)
379 PrevMemN->NextMemN = NextMemN;
380 if (NextMemN !=
nullptr)
381 NextMemN->PrevMemN = PrevMemN;
384 InstrToNodeMap.erase(
I);
395 auto NewInterval = Union.getSingleDiff(DAGInterval);
396 if (NewInterval.empty())
399 createNewNodes(NewInterval);
415 if (!DstRange.empty()) {
418 scanAndAddDeps(DstN, SrcRange);
422 if (DAGInterval.empty()) {
423 assert(NewInterval == InstrsInterval &&
"Expected empty DAGInterval!");
424 FullScan(NewInterval);
441 else if (DAGInterval.bottom()->comesBefore(NewInterval.top())) {
444 DAGInterval.getUnionInterval(NewInterval), *
this);
448 scanAndAddDeps(DstN, SrcRange);
452 else if (NewInterval.bottom()->comesBefore(DAGInterval.top())) {
466 FullScan(NewInterval);
485 scanAndAddDeps(DstN, SrcRange);
498 Nodes.
reserve(InstrToNodeMap.size());
499 for (
const auto &Pair : InstrToNodeMap)
505 for (
auto *
N : Nodes)
std::pair< uint64_t, uint64_t > Interval
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
bool empty() const
empty - Check if the array is empty.
bool mayWriteToMemory() const LLVM_READONLY
Return true if this instruction may modify memory.
bool mayReadFromMemory() const LLVM_READONLY
Return true if this instruction may read memory.
void reserve(size_type N)
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
This class implements an extremely fast bulk output stream that can only output to a stream.
A DependencyGraph Node that points to an Instruction and contains memory dependency edges.
virtual void print(raw_ostream &OS, bool PrintDeps=true) const
static bool isMemDepCandidate(Instruction *I)
We consider I as a Memory Dependency Candidate instruction if it reads/write memory or if it has side...
unsigned UnscheduledSuccs
The number of unscheduled successors.
SchedBundle * SB
The scheduler bundle that this node belongs to.
bool Scheduled
This is true if this node has been scheduled.
static bool isMemDepNodeCandidate(Instruction *I)
\Returns true if I is a memory dependency candidate instruction.
static bool isFenceLike(Instruction *I)
\Returns true if I is fence like. It excludes non-mem intrinsics.
LLVM_DUMP_METHOD void dump() const
Instruction * getInstruction() const
static bool isStackSaveOrRestoreIntrinsic(Instruction *I)
LLVM_DUMP_METHOD void dump() const
DGNode * getNode(Instruction *I) const
DGNode * getNodeOrNull(Instruction *I) const
Like getNode() but returns nullptr if I is nullptr.
void print(raw_ostream &OS) const
DGNode * getOrCreateNode(Instruction *I)
Interval< Instruction > extend(ArrayRef< Instruction * > Instrs)
Build/extend the dependency graph such that it includes Instrs.
A sandboxir::User with operands, opcode and linked with previous/next instructions in an instruction ...
bool mayWriteToMemory() const
bool mayReadFromMemory() const
bool isTerminator() const
static MemDGNode * getBotMemDGNode(const Interval< Instruction > &Intvl, const DependencyGraph &DAG)
Scans the instruction chain in Intvl bottom-up, returning the bottom-most MemDGNode,...
static MemDGNode * getTopMemDGNode(const Interval< Instruction > &Intvl, const DependencyGraph &DAG)
Scans the instruction chain in Intvl top-down, returning the top-most MemDGNode, or nullptr.
static Interval< MemDGNode > make(const Interval< Instruction > &Instrs, DependencyGraph &DAG)
Given Instrs it finds their closest mem nodes in the interval and returns the corresponding mem range...
A DependencyGraph Node for instructions that may read/write memory, or have some ordering constraints...
virtual void print(raw_ostream &OS, bool PrintDeps=true) const override
Iterate over both def-use and mem dependencies.
PredIterator & operator++()
bool operator==(const PredIterator &Other) const
static ModRefInfo aliasAnalysisGetModRefInfo(BatchAAResults &BatchAA, const Instruction *I, const std::optional< MemoryLocation > &OptLoc)
Equivalent to BatchAA::getModRefInfo().
static std::optional< llvm::MemoryLocation > memoryLocationGetOrNone(const Instruction *I)
Equivalent to MemoryLocation::getOrNone(I).
A SandboxIR Value has users. This is the base class.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
static bool isOrdered(Instruction *I)
static User::op_iterator skipNonInstr(User::op_iterator OpIt, User::op_iterator OpItE)
While OpIt points to a Value that is not an Instruction keep incrementing it.
auto drop_begin(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the first N elements excluded.
auto reverse(ContainerTy &&C)
bool isModSet(const ModRefInfo MRI)
void sort(IteratorTy Start, IteratorTy End)
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
ModRefInfo
Flags indicating whether a memory access modifies or references memory.
@ ModRef
The access may reference and may modify the value stored in memory.
DWARFExpression::Operation Op
bool isRefSet(const ModRefInfo MRI)