15#ifndef LLVM_MCA_HARDWAREUNITS_LSUNIT_H
16#define LLVM_MCA_HARDWAREUNITS_LSUNIT_H
46 unsigned UsedLQEntries;
47 unsigned UsedSQEntries;
58 unsigned StoreQueueSize,
bool AssumeNoAlias);
100 bool isSQFull()
const {
return SQSize && SQSize == UsedSQEntries; }
101 bool isLQFull()
const {
return LQSize && LQSize == UsedLQEntries; }
247 unsigned NumPredecessors = 0;
248 unsigned NumExecutingPredecessors = 0;
249 unsigned NumExecutedPredecessors = 0;
251 unsigned NumInstructions = 0;
252 unsigned NumExecuting = 0;
253 unsigned NumExecuted = 0;
260 InstRef CriticalMemoryInstruction;
262 MemoryGroup(
const MemoryGroup &) =
delete;
263 MemoryGroup &operator=(
const MemoryGroup &) =
delete;
270 return OrderSucc.size() + DataSucc.size();
274 return NumExecutingPredecessors;
277 return NumExecutedPredecessors;
284 return CriticalMemoryInstruction;
287 return CriticalPredecessor;
297 Group->NumPredecessors++;
300 Group->
onGroupIssued(CriticalMemoryInstruction, IsDataDependent);
303 DataSucc.emplace_back(Group);
305 OrderSucc.emplace_back(Group);
309 return NumPredecessors >
310 (NumExecutingPredecessors + NumExecutedPredecessors);
313 return NumExecutingPredecessors &&
314 ((NumExecutedPredecessors + NumExecutingPredecessors) ==
317 bool isReady()
const {
return NumExecutedPredecessors == NumPredecessors; }
319 return NumExecuting && (NumExecuting == (NumInstructions - NumExecuted));
321 bool isExecuted()
const {
return NumInstructions == NumExecuted; }
325 NumExecutingPredecessors++;
327 if (!ShouldUpdateCriticalDep)
330 unsigned Cycles =
IR.getInstruction()->getCyclesLeft();
331 if (CriticalPredecessor.Cycles < Cycles) {
332 CriticalPredecessor.IID =
IR.getSourceIndex();
333 CriticalPredecessor.Cycles = Cycles;
339 NumExecutingPredecessors--;
340 NumExecutedPredecessors++;
349 if ((
bool)CriticalMemoryInstruction) {
351 *CriticalMemoryInstruction.getInstruction();
353 CriticalMemoryInstruction =
IR;
355 CriticalMemoryInstruction =
IR;
362 for (MemoryGroup *MG : OrderSucc) {
363 MG->onGroupIssued(CriticalMemoryInstruction,
false);
365 MG->onGroupExecuted();
368 for (MemoryGroup *MG : DataSucc)
369 MG->onGroupIssued(CriticalMemoryInstruction,
true);
377 if (CriticalMemoryInstruction &&
378 CriticalMemoryInstruction.getSourceIndex() ==
IR.getSourceIndex()) {
379 CriticalMemoryInstruction.invalidate();
387 for (MemoryGroup *MG : DataSucc)
388 MG->onGroupExecuted();
397 if (
isWaiting() && CriticalPredecessor.Cycles)
398 CriticalPredecessor.Cycles--;
425 unsigned GroupID =
IR.getInstruction()->getLSUTokenID();
431 unsigned GroupID =
IR.getInstruction()->getLSUTokenID();
437 unsigned GroupID =
IR.getInstruction()->getLSUTokenID();
443 unsigned GroupID =
IR.getInstruction()->getLSUTokenID();
466 unsigned dispatch(
const InstRef &
IR)
override;
469 unsigned GroupID =
IR.getInstruction()->getLSUTokenID();
470 Groups[GroupID]->onInstructionIssued(
IR);
473 void onInstructionRetired(
const InstRef &
IR)
override;
475 void onInstructionExecuted(
const InstRef &
IR)
override;
477 void cycleEvent()
override;
480 void dump()
const override;
484 bool isValidGroupID(
unsigned Index)
const {
485 return Index &&
Groups.contains(Index);
488 const MemoryGroup &getGroup(
unsigned Index)
const {
489 assert(isValidGroupID(Index) &&
"Group doesn't exist!");
490 return *
Groups.find(Index)->second;
493 MemoryGroup &getGroup(
unsigned Index) {
494 assert(isValidGroupID(Index) &&
"Group doesn't exist!");
495 return *
Groups.find(Index)->second;
498 unsigned createMemoryGroup() {
499 Groups.insert(std::make_pair(NextGroupID, std::make_unique<MemoryGroup>()));
500 return NextGroupID++;
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
This file defines the DenseMap class.
This file defines a base class for describing a simulated hardware unit.
Legalize the Machine IR a function s Machine IR
This file defines abstractions used by the Pipeline to model register reads, register writes and inst...
static constexpr unsigned SM(unsigned Version)
This file defines the SmallVector class.
static const X86InstrFMA3Group Groups[]
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
An InstRef contains both a SourceMgr index and Instruction pair.
An instruction propagated through the simulated instruction pipeline.
int getCyclesLeft() const
virtual void dump() const =0
virtual unsigned dispatch(const InstRef &IR)=0
Allocates LS resources for instruction IR.
unsigned getUsedSQEntries() const
virtual bool isReady(const InstRef &IR) const =0
Check if a peviously dispatched instruction IR is now ready for execution.
virtual Status isAvailable(const InstRef &IR) const =0
This method checks the availability of the load/store buffers.
virtual void onInstructionRetired(const InstRef &IR)=0
virtual void onInstructionExecuted(const InstRef &IR)=0
virtual void cycleEvent()=0
unsigned getUsedLQEntries() const
bool assumeNoAlias() const
unsigned getLoadQueueSize() const
Returns the total number of entries in the load queue.
virtual bool isWaiting(const InstRef &IR) const =0
Check if instruction IR is still waiting on memory operations, and the wait time is still unknown.
virtual const CriticalDependency getCriticalPredecessor(unsigned GroupId)=0
virtual bool isPending(const InstRef &IR) const =0
Check if instruction IR only depends on memory instructions that are currently executing.
virtual bool hasDependentUsers(const InstRef &IR) const =0
LSUnitBase(const MCSchedModel &SM, unsigned LoadQueueSize, unsigned StoreQueueSize, bool AssumeNoAlias)
unsigned getStoreQueueSize() const
Returns the total number of entries in the store queue.
virtual void onInstructionIssued(const InstRef &IR)=0
A node of a memory dependency graph.
unsigned getNumInstructions() const
unsigned getNumExecutingPredecessors() const
unsigned getNumExecuting() const
void onGroupIssued(const InstRef &IR, bool ShouldUpdateCriticalDep)
unsigned getNumPredecessors() const
void onInstructionExecuted(const InstRef &IR)
const CriticalDependency & getCriticalPredecessor() const
void addSuccessor(MemoryGroup *Group, bool IsDataDependent)
unsigned getNumExecuted() const
unsigned getNumExecutedPredecessors() const
MemoryGroup(MemoryGroup &&)=default
size_t getNumSuccessors() const
void onInstructionIssued(const InstRef &IR)
const InstRef & getCriticalMemoryInstruction() const
unsigned CurrentLoadGroupID
bool isPending(const InstRef &IR) const override
Check if instruction IR only depends on memory instructions that are currently executing.
void onInstructionIssued(const InstRef &IR) override
bool isReady(const InstRef &IR) const override
Check if a peviously dispatched instruction IR is now ready for execution.
DenseMap< unsigned, std::unique_ptr< MemoryGroup > > Groups
Used to map group identifiers to MemoryGroups.
unsigned CurrentStoreGroupID
LSUnit(const MCSchedModel &SM, unsigned LQ, unsigned SQ, bool AssumeNoAlias)
unsigned CurrentLoadBarrierGroupID
bool isWaiting(const InstRef &IR) const override
Check if instruction IR is still waiting on memory operations, and the wait time is still unknown.
unsigned CurrentStoreBarrierGroupID
LSUnit(const MCSchedModel &SM, unsigned LQ, unsigned SQ)
const CriticalDependency getCriticalPredecessor(unsigned GroupId) override
bool hasDependentUsers(const InstRef &IR) const override
LSUnit(const MCSchedModel &SM)
This is an optimization pass for GlobalISel generic memory operations.
void dump(const SparseBitVector< ElementSize > &LHS, raw_ostream &out)
Machine model for scheduling, bundling, and heuristics.
A critical data dependency descriptor.