34 for (
auto *
N : Nodes) {
35 auto *
I =
N->getInstruction();
36 if (
I->getIterator() == Where)
57 while (!ListCopy.empty()) {
58 OS << *ListCopy.top() <<
"\n";
69void Scheduler::scheduleAndUpdateReadyList(
SchedBundle &Bndl) {
71 assert(ScheduleTopItOpt &&
"Should have been set by now!");
72 auto Where = *ScheduleTopItOpt;
80 N->setScheduled(
true);
81 for (
auto *DepN :
N->preds(DAG)) {
85 DepN->decrUnscheduledSuccs();
92SchedBundle *Scheduler::createBundle(ArrayRef<Instruction *> Instrs) {
95 for (
auto *
I : Instrs)
97 auto BndlPtr = std::make_unique<SchedBundle>(std::move(Nodes));
98 auto *Bndl = BndlPtr.get();
99 Bndls[Bndl] = std::move(BndlPtr);
103void Scheduler::eraseBundle(SchedBundle *SB) { Bndls.erase(SB); }
105bool Scheduler::tryScheduleUntil(ArrayRef<Instruction *> Instrs) {
107 DenseSet<Instruction *> InstrsToDefer(Instrs.begin(), Instrs.end());
110 SmallVector<DGNode *, 8> DeferredNodes;
115 while (!ReadyList.
empty()) {
116 auto *ReadyN = ReadyList.
pop();
117 if (InstrsToDefer.contains(ReadyN->getInstruction())) {
122 DeferredNodes.push_back(ReadyN);
123 bool ReadyToScheduleDeferred = DeferredNodes.size() == Instrs.size();
124 if (ReadyToScheduleDeferred) {
125 scheduleAndUpdateReadyList(*createBundle(Instrs));
131 scheduleAndUpdateReadyList(*createBundle({ReadyN->getInstruction()}));
134 assert(DeferredNodes.size() != Instrs.size() &&
135 "We should have succesfully scheduled and early-returned!");
139Scheduler::BndlSchedState
140Scheduler::getBndlSchedState(ArrayRef<Instruction *> Instrs)
const {
141 assert(!Instrs.empty() &&
"Expected non-empty bundle");
142 bool PartiallyScheduled =
false;
143 bool FullyScheduled =
true;
144 for (
auto *
I : Instrs) {
146 if (
N !=
nullptr &&
N->scheduled())
147 PartiallyScheduled =
true;
149 FullyScheduled =
false;
151 if (FullyScheduled) {
156 assert(SB !=
nullptr &&
"FullyScheduled assumes that there is an SB!");
158 return DAG.
getNode(cast<sandboxir::Instruction>(SBV))
159 ->getSchedBundle() != SB;
161 FullyScheduled =
false;
163 return FullyScheduled ? BndlSchedState::FullyScheduled
164 : PartiallyScheduled ? BndlSchedState::PartiallyOrDifferentlyScheduled
165 : BndlSchedState::NoneScheduled;
168void Scheduler::trimSchedule(ArrayRef<Instruction *> Instrs) {
169 Instruction *TopI = &*ScheduleTopItOpt.value();
172 for (
auto *
I = LowestI, *
E = TopI->getPrevNode();
I !=
E;
173 I =
I->getPrevNode()) {
175 if (
auto *SB =
N->getSchedBundle())
190 return I->getParent() == (*Instrs.
begin())->getParent();
192 "Instrs not in the same BB!");
193 auto SchedState = getBndlSchedState(Instrs);
194 switch (SchedState) {
195 case BndlSchedState::FullyScheduled:
198 case BndlSchedState::PartiallyOrDifferentlyScheduled:
202 trimSchedule(Instrs);
204 case BndlSchedState::NoneScheduled: {
218 return tryScheduleUntil(Instrs);
226 OS <<
"ReadyList:\n";
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
void reserve(size_type N)
This class implements an extremely fast bulk output stream that can only output to a stream.
Iterator for Instructions in a `BasicBlock.
BasicBlock * getNodeParent() const
\Returns the parent BB.
A DependencyGraph Node that points to an Instruction and contains memory dependency edges.
SchedBundle * getSchedBundle() const
\Returns the scheduling bundle that this node belongs to, or nullptr.
Instruction * getInstruction() const
DGNode * getNode(Instruction *I) const
Interval< Instruction > extend(ArrayRef< Instruction * > Instrs)
Build/extend the dependency graph such that it includes Instrs.
A sandboxir::User with operands, opcode and linked with previous/next instructions in an instruction ...
BBIterator getIterator() const
\Returns a BasicBlock::iterator for this Instruction.
bool comesBefore(const Instruction *Other) const
Given an instruction Other in the same basic block as this instruction, return true if this instructi...
LLVM_DUMP_METHOD void dump() const
void dump(raw_ostream &OS) const
The nodes that need to be scheduled back-to-back in a single scheduling cycle form a SchedBundle.
DGNode * getBot() const
\Returns the bundle node that comes after the others in program order.
DGNode * getTop() const
\Returns the bundle node that comes before the others in program order.
SmallVector< DGNode *, 4 > ContainerTy
LLVM_DUMP_METHOD void dump() const
void cluster(BasicBlock::iterator Where)
Move all bundle instructions to Where back-to-back.
LLVM_DUMP_METHOD void dump() const
bool trySchedule(ArrayRef< Instruction * > Instrs)
static Instruction * getLowest(ArrayRef< Instruction * > Instrs)
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
auto drop_begin(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the first N elements excluded.
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly.
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.