51#define DEBUG_TYPE "regalloc"
55STATISTIC(NumCoalesced,
"Number of copies coalesced");
68class InstrPosIndexes {
70 void unsetInitialized() { IsInitialized =
false; }
72 void init(
const MachineBasicBlock &
MBB) {
74 Instr2PosIndex.
clear();
75 uint64_t LastIndex = 0;
76 for (
const MachineInstr &
MI :
MBB) {
77 LastIndex += InstrDist;
78 Instr2PosIndex[&
MI] = LastIndex;
85 bool getIndex(
const MachineInstr &
MI, uint64_t &Index) {
93 assert(
MI.getParent() == CurMBB &&
"MI is not in CurMBB");
94 auto It = Instr2PosIndex.find(&
MI);
95 if (It != Instr2PosIndex.end()) {
109 unsigned Distance = 1;
111 End = std::next(Start);
112 while (Start != CurMBB->begin() &&
113 !Instr2PosIndex.count(&*std::prev(Start))) {
117 while (End != CurMBB->end() && !Instr2PosIndex.count(&*(End))) {
125 Start == CurMBB->begin() ? 0 : Instr2PosIndex.at(&*std::prev(Start));
127 if (End == CurMBB->end())
128 Step =
static_cast<uint64_t
>(InstrDist);
131 uint64_t EndIndex = Instr2PosIndex.at(&*End);
132 assert(EndIndex > LastIndex &&
"Index must be ascending order");
133 unsigned NumAvailableIndexes = EndIndex - LastIndex - 1;
152 Step = (NumAvailableIndexes + 1) / (Distance + 1);
157 if (
LLVM_UNLIKELY(!Step || (!LastIndex && Step == InstrDist))) {
159 Index = Instr2PosIndex.at(&
MI);
163 for (
auto I = Start;
I != End; ++
I) {
165 Instr2PosIndex[&*
I] = LastIndex;
167 Index = Instr2PosIndex.at(&
MI);
172 bool IsInitialized =
false;
173 enum { InstrDist = 1024 };
174 const MachineBasicBlock *CurMBB =
nullptr;
175 DenseMap<const MachineInstr *, uint64_t> Instr2PosIndex;
178class RegAllocFastImpl {
181 bool ClearVirtRegs_ =
true)
182 : ShouldAllocateRegisterImpl(
F), StackSlotForVirtReg(-1),
183 ClearVirtRegs(ClearVirtRegs_) {}
186 MachineFrameInfo *MFI =
nullptr;
187 MachineRegisterInfo *MRI =
nullptr;
188 const TargetRegisterInfo *TRI =
nullptr;
189 const TargetInstrInfo *TII =
nullptr;
190 RegisterClassInfo RegClassInfo;
194 MachineBasicBlock *MBB =
nullptr;
197 IndexedMap<int, VirtReg2IndexFunctor> StackSlotForVirtReg;
201 MachineInstr *LastUse =
nullptr;
204 bool LiveOut =
false;
205 bool Reloaded =
false;
208 explicit LiveReg(
Register VirtReg) : VirtReg(VirtReg) {}
209 explicit LiveReg() =
default;
211 unsigned getSparseSetIndex()
const {
return VirtReg.virtRegIndex(); }
214 using LiveRegMap = SparseSet<LiveReg, unsigned, identity, uint16_t>;
217 LiveRegMap LiveVirtRegs;
220 DenseMap<Register, LiveReg> BundleVirtRegsMap;
222 DenseMap<Register, SmallVector<MachineOperand *, 2>> LiveDbgValueMap;
225 DenseMap<Register, SmallVector<MachineInstr *, 1>> DanglingDbgValues;
229 BitVector MayLiveAcrossBlocks;
251 std::vector<unsigned> RegUnitStates;
269 SmallVector<unsigned, 0> UsedInInstr;
271 SmallVector<unsigned, 8> DefOperandIndexes;
276 InstrPosIndexes PosIndexes;
278 void setPhysRegState(MCRegister PhysReg,
unsigned NewState);
279 bool isPhysRegFree(MCRegister PhysReg)
const;
282 void markRegUsedInInstr(
MCPhysReg PhysReg) {
283 for (
MCRegUnit Unit : TRI->regunits(PhysReg))
284 UsedInInstr[
Unit] = InstrGen | 1;
288 bool isClobberedByRegMasks(MCRegister PhysReg)
const {
289 return llvm::any_of(RegMasks, [PhysReg](
const uint32_t *Mask) {
295 bool isRegUsedInInstr(MCRegister PhysReg,
bool LookAtPhysRegUses)
const {
296 if (LookAtPhysRegUses && isClobberedByRegMasks(PhysReg))
298 for (
MCRegUnit Unit : TRI->regunits(PhysReg))
299 if (UsedInInstr[Unit] >= (InstrGen | !LookAtPhysRegUses))
306 void markPhysRegUsedInInstr(MCRegister PhysReg) {
307 for (
MCRegUnit Unit : TRI->regunits(PhysReg)) {
308 assert(UsedInInstr[Unit] <= InstrGen &&
"non-phys use before phys use?");
309 UsedInInstr[
Unit] = InstrGen;
314 void unmarkRegUsedInInstr(MCRegister PhysReg) {
315 for (
MCRegUnit Unit : TRI->regunits(PhysReg))
316 UsedInInstr[
Unit] = 0;
323 spillImpossible = ~0
u
329 bool runOnMachineFunction(MachineFunction &MF);
332 void allocateBasicBlock(MachineBasicBlock &MBB);
337 void findAndSortDefOperandIndexes(
const MachineInstr &
MI);
339 void allocateInstruction(MachineInstr &
MI);
340 void handleDebugValue(MachineInstr &
MI);
341 void handleBundle(MachineInstr &
MI);
343 bool usePhysReg(MachineInstr &
MI, MCRegister PhysReg);
344 bool definePhysReg(MachineInstr &
MI, MCRegister PhysReg);
345 bool displacePhysReg(MachineInstr &
MI, MCRegister PhysReg);
346 void freePhysReg(MCRegister PhysReg);
348 unsigned calcSpillCost(
MCPhysReg PhysReg)
const;
358 void assignVirtToPhysReg(MachineInstr &
MI, LiveReg &, MCRegister PhysReg);
359 void allocVirtReg(MachineInstr &
MI, LiveReg &LR,
Register Hint,
360 bool LookAtPhysRegUses =
false);
361 void allocVirtRegUndef(MachineOperand &MO);
362 void assignDanglingDebugValues(MachineInstr &Def,
Register VirtReg,
364 bool defineLiveThroughVirtReg(MachineInstr &
MI,
unsigned OpNum,
366 bool defineVirtReg(MachineInstr &
MI,
unsigned OpNum,
Register VirtReg,
367 bool LookAtPhysRegUses =
false);
368 bool useVirtReg(MachineInstr &
MI, MachineOperand &MO,
Register VirtReg);
370 MCPhysReg getErrorAssignment(
const LiveReg &LR, MachineInstr &
MI,
371 const TargetRegisterClass &RC);
374 getMBBBeginInsertionPoint(MachineBasicBlock &MBB,
375 SmallSet<Register, 2> &PrologLiveIns)
const;
377 void reloadAtBegin(MachineBasicBlock &MBB);
378 bool setPhysReg(MachineInstr &
MI, MachineOperand &MO,
379 const LiveReg &Assignment);
384 bool shouldAllocateRegister(
const Register Reg)
const;
385 int getStackSpaceFor(
Register VirtReg);
387 MCPhysReg AssignedReg,
bool Kill,
bool LiveOut);
394 bool mayBeSpillFromInlineAsmBr(
const MachineInstr &
MI)
const;
396 void dumpState()
const;
400 RegAllocFastImpl Impl;
406 : MachineFunctionPass(ID), Impl(
F, ClearVirtRegs_) {}
408 bool runOnMachineFunction(MachineFunction &MF)
override {
409 return Impl.runOnMachineFunction(MF);
412 StringRef getPassName()
const override {
return "Fast Register Allocator"; }
414 void getAnalysisUsage(AnalysisUsage &AU)
const override {
419 MachineFunctionProperties getRequiredProperties()
const override {
420 return MachineFunctionProperties().setNoPHIs();
423 MachineFunctionProperties getSetProperties()
const override {
424 if (Impl.ClearVirtRegs) {
425 return MachineFunctionProperties().setNoVRegs();
428 return MachineFunctionProperties();
431 MachineFunctionProperties getClearedProperties()
const override {
432 return MachineFunctionProperties().setIsSSA();
438char RegAllocFast::ID = 0;
445 if (!ShouldAllocateRegisterImpl)
448 return ShouldAllocateRegisterImpl(*
TRI, *
MRI,
Reg);
451void RegAllocFastImpl::setPhysRegState(
MCRegister PhysReg,
unsigned NewState) {
453 RegUnitStates[
Unit] = NewState;
456bool RegAllocFastImpl::isPhysRegFree(MCRegister PhysReg)
const {
458 if (RegUnitStates[Unit] != regFree)
466int RegAllocFastImpl::getStackSpaceFor(
Register VirtReg) {
468 int SS = StackSlotForVirtReg[VirtReg];
474 const TargetRegisterClass &RC = *
MRI->getRegClass(VirtReg);
475 unsigned Size =
TRI->getSpillSize(RC);
476 Align Alignment =
TRI->getSpillAlign(RC);
478 const MachineFunction &MF =
MRI->getMF();
480 Align CurrentAlign =
ST.getFrameLowering()->getStackAlign();
481 if (Alignment > CurrentAlign && !
TRI->canRealignStack(MF))
482 Alignment = CurrentAlign;
487 StackSlotForVirtReg[VirtReg] = FrameIdx;
494 PosIndexes.getIndex(
A, IndexA);
496 PosIndexes.getIndex(
A, IndexA);
497 return IndexA < IndexB;
504bool RegAllocFastImpl::mayBeSpillFromInlineAsmBr(
const MachineInstr &
MI)
const {
506 auto *
MBB =
MI.getParent();
509 for (
const auto &
Op :
MI.operands())
516bool RegAllocFastImpl::mayLiveOut(
Register VirtReg) {
522 const MachineInstr *SelfLoopDef =
nullptr;
528 for (
const MachineInstr &DefInst :
MRI->def_instructions(VirtReg)) {
529 if (DefInst.getParent() !=
MBB) {
533 if (!SelfLoopDef ||
dominates(PosIndexes, DefInst, *SelfLoopDef))
534 SelfLoopDef = &DefInst;
545 static const unsigned Limit = 8;
547 for (
const MachineInstr &UseInst :
MRI->use_nodbg_instructions(VirtReg)) {
548 if (UseInst.getParent() !=
MBB || ++
C >= Limit) {
557 if (SelfLoopDef == &UseInst ||
558 !
dominates(PosIndexes, *SelfLoopDef, UseInst)) {
569bool RegAllocFastImpl::mayLiveIn(
Register VirtReg) {
574 static const unsigned Limit = 8;
576 for (
const MachineInstr &DefInst :
MRI->def_instructions(VirtReg)) {
577 if (DefInst.getParent() !=
MBB || ++
C >= Limit) {
593 int FI = getStackSpaceFor(VirtReg);
596 const TargetRegisterClass &RC = *
MRI->getRegClass(VirtReg);
605 SmallVectorImpl<MachineOperand *> &LRIDbgOperands = LiveDbgValueMap[VirtReg];
606 SmallMapVector<MachineInstr *, SmallVector<const MachineOperand *>, 2>
608 for (MachineOperand *MO : LRIDbgOperands)
609 SpilledOperandsMap[MO->getParent()].push_back(MO);
610 for (
const auto &MISpilledOperands : SpilledOperandsMap) {
611 MachineInstr &DBG = *MISpilledOperands.first;
616 *
MBB, Before, *MISpilledOperands.first, FI, MISpilledOperands.second);
619 LLVM_DEBUG(
dbgs() <<
"Inserting debug info due to spill:\n" << *NewDV);
626 MachineInstr *ClonedDV =
MBB->
getParent()->CloneMachineInstr(NewDV);
628 LLVM_DEBUG(
dbgs() <<
"Cloning debug info due to live out spill\n");
644 LRIDbgOperands.
clear();
652 int FI = getStackSpaceFor(VirtReg);
653 const TargetRegisterClass &RC = *
MRI->getRegClass(VirtReg);
663 MachineBasicBlock &
MBB, SmallSet<Register, 2> &PrologLiveIns)
const {
672 if (!
TII->isBasicBlockPrologue(*
I) && !mayBeSpillFromInlineAsmBr(*
I))
677 for (MachineOperand &MO :
I->operands()) {
689void RegAllocFastImpl::reloadAtBegin(MachineBasicBlock &
MBB) {
690 if (LiveVirtRegs.empty())
693 for (MachineBasicBlock::RegisterMaskPair
P :
MBB.
liveins()) {
694 MCRegister
Reg =
P.PhysReg;
697 setPhysRegState(
Reg, regLiveIn);
700 SmallSet<Register, 2> PrologLiveIns;
705 getMBBBeginInsertionPoint(
MBB, PrologLiveIns);
706 for (
const LiveReg &LR : LiveVirtRegs) {
708 if (PhysReg == 0 || LR.Error)
712 if (RegUnitStates[FirstUnit] == regLiveIn)
716 "no reload in start block. Missing vreg def?");
718 if (PrologLiveIns.
count(PhysReg)) {
722 reload(
MBB.
begin(), LR.VirtReg, PhysReg);
724 reload(InsertBefore, LR.VirtReg, PhysReg);
726 LiveVirtRegs.clear();
732bool RegAllocFastImpl::usePhysReg(MachineInstr &
MI, MCRegister
Reg) {
733 assert(Register::isPhysicalRegister(
Reg) &&
"expected physreg");
734 bool displacedAny = displacePhysReg(
MI,
Reg);
735 setPhysRegState(
Reg, regPreAssigned);
736 markRegUsedInInstr(
Reg);
740bool RegAllocFastImpl::definePhysReg(MachineInstr &
MI, MCRegister
Reg) {
741 bool displacedAny = displacePhysReg(
MI,
Reg);
742 setPhysRegState(
Reg, regPreAssigned);
749bool RegAllocFastImpl::displacePhysReg(MachineInstr &
MI, MCRegister PhysReg) {
750 bool displacedAny =
false;
753 switch (
unsigned VirtReg = RegUnitStates[Unit]) {
755 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
756 assert(LRI != LiveVirtRegs.end() &&
"datastructures in sync");
759 while (mayBeSpillFromInlineAsmBr(*ReloadBefore))
761 reload(ReloadBefore, VirtReg, LRI->PhysReg);
763 setPhysRegState(LRI->PhysReg, regFree);
765 LRI->Reloaded =
true;
770 RegUnitStates[
Unit] = regFree;
780void RegAllocFastImpl::freePhysReg(MCRegister PhysReg) {
784 switch (
unsigned VirtReg = RegUnitStates[FirstUnit]) {
790 setPhysRegState(PhysReg, regFree);
793 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
794 assert(LRI != LiveVirtRegs.end());
796 setPhysRegState(LRI->PhysReg, regFree);
807unsigned RegAllocFastImpl::calcSpillCost(
MCPhysReg PhysReg)
const {
809 switch (
unsigned VirtReg = RegUnitStates[Unit]) {
815 return spillImpossible;
817 bool SureSpill = StackSlotForVirtReg[VirtReg] != -1 ||
818 findLiveVirtReg(VirtReg)->LiveOut;
819 return SureSpill ? spillClean : spillDirty;
826void RegAllocFastImpl::assignDanglingDebugValues(MachineInstr &Definition,
829 auto UDBGValIter = DanglingDbgValues.
find(VirtReg);
830 if (UDBGValIter == DanglingDbgValues.
end())
833 SmallVectorImpl<MachineInstr *> &Dangling = UDBGValIter->second;
834 for (MachineInstr *DbgValue : Dangling) {
835 assert(DbgValue->isDebugValue());
836 if (!DbgValue->hasDebugOperandForReg(VirtReg))
843 E = DbgValue->getIterator();
845 if (
I->modifiesRegister(
Reg,
TRI) || --Limit == 0) {
852 for (MachineOperand &MO : DbgValue->getDebugOperandsForReg(VirtReg)) {
864void RegAllocFastImpl::assignVirtToPhysReg(MachineInstr &AtMI, LiveReg &LR,
865 MCRegister PhysReg) {
869 assert(LR.PhysReg == 0 &&
"Already assigned a physreg");
870 assert(PhysReg != 0 &&
"Trying to assign no register");
871 LR.PhysReg = PhysReg;
872 setPhysRegState(PhysReg, VirtReg.
id());
874 assignDanglingDebugValues(AtMI, VirtReg, PhysReg);
880 static const unsigned ChainLengthLimit = 3;
887 MachineInstr *VRegDef =
MRI->getUniqueVRegDef(
Reg);
891 }
while (++
C <= ChainLengthLimit);
899 static const unsigned DefLimit = 3;
901 for (
const MachineInstr &
MI :
MRI->def_instructions(VirtReg)) {
904 Reg = traceCopyChain(
Reg);
916void RegAllocFastImpl::allocVirtReg(MachineInstr &
MI, LiveReg &LR,
917 Register Hint0,
bool LookAtPhysRegUses) {
918 const Register VirtReg = LR.VirtReg;
921 const TargetRegisterClass &RC = *
MRI->getRegClass(VirtReg);
923 <<
" in class " <<
TRI->getRegClassName(&RC)
928 !isRegUsedInInstr(Hint0, LookAtPhysRegUses)) {
930 if (isPhysRegFree(Hint0)) {
933 assignVirtToPhysReg(
MI, LR, Hint0);
944 Register Hint1 = traceCopies(VirtReg);
946 !isRegUsedInInstr(Hint1, LookAtPhysRegUses)) {
948 if (isPhysRegFree(Hint1)) {
951 assignVirtToPhysReg(
MI, LR, Hint1);
962 unsigned BestCost = spillImpossible;
964 for (
MCPhysReg PhysReg : AllocationOrder) {
966 if (isRegUsedInInstr(PhysReg, LookAtPhysRegUses)) {
971 unsigned Cost = calcSpillCost(PhysReg);
975 assignVirtToPhysReg(
MI, LR, PhysReg);
979 if (PhysReg == Hint0 || PhysReg == Hint1)
980 Cost -= spillPrefBonus;
982 if (
Cost < BestCost) {
991 LR.PhysReg = getErrorAssignment(LR,
MI, RC);
996 displacePhysReg(
MI, BestReg);
997 assignVirtToPhysReg(
MI, LR, BestReg);
1000void RegAllocFastImpl::allocVirtRegUndef(MachineOperand &MO) {
1004 if (!shouldAllocateRegister(VirtReg))
1007 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
1009 bool IsRenamable =
true;
1010 if (LRI != LiveVirtRegs.end() && LRI->PhysReg) {
1011 PhysReg = LRI->PhysReg;
1013 const TargetRegisterClass &RC = *
MRI->getRegClass(VirtReg);
1015 if (AllocationOrder.
empty()) {
1021 PhysReg = getErrorAssignment(*LRI, *MO.
getParent(), RC);
1023 IsRenamable =
false;
1025 PhysReg = AllocationOrder.
front();
1029 if (SubRegIdx != 0) {
1030 PhysReg =
TRI->getSubReg(PhysReg, SubRegIdx);
1040bool RegAllocFastImpl::defineLiveThroughVirtReg(MachineInstr &
MI,
1043 if (!shouldAllocateRegister(VirtReg))
1045 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
1046 if (LRI != LiveVirtRegs.end()) {
1048 if (PrevReg != 0 && isRegUsedInInstr(PrevReg,
true)) {
1050 <<
" (tied/earlyclobber resolution)\n");
1051 freePhysReg(PrevReg);
1053 allocVirtReg(
MI, *LRI, 0,
true);
1059 TII->get(TargetOpcode::COPY), PrevReg)
1062 MachineOperand &MO =
MI.getOperand(OpNum);
1067 return defineVirtReg(
MI, OpNum, VirtReg,
true);
1077bool RegAllocFastImpl::defineVirtReg(MachineInstr &
MI,
unsigned OpNum,
1078 Register VirtReg,
bool LookAtPhysRegUses) {
1080 if (!shouldAllocateRegister(VirtReg))
1082 MachineOperand &MO =
MI.getOperand(OpNum);
1083 LiveRegMap::iterator LRI;
1085 std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg));
1088 if (mayLiveOut(VirtReg)) {
1089 LRI->LiveOut =
true;
1096 if (LRI->PhysReg == 0) {
1097 allocVirtReg(
MI, *LRI, 0, LookAtPhysRegUses);
1099 assert((!isRegUsedInInstr(LRI->PhysReg, LookAtPhysRegUses) || LRI->Error) &&
1100 "TODO: preassign mismatch");
1102 <<
" use existing assignment to "
1107 if (LRI->Reloaded || LRI->LiveOut) {
1108 if (!
MI.isImplicitDef()) {
1112 <<
" RL: " << LRI->Reloaded <<
'\n');
1113 bool Kill = LRI->LastUse ==
nullptr;
1114 spill(SpillBefore, VirtReg, PhysReg, Kill, LRI->LiveOut);
1118 if (
MI.getOpcode() == TargetOpcode::INLINEASM_BR) {
1119 int FI = StackSlotForVirtReg[VirtReg];
1120 const TargetRegisterClass &RC = *
MRI->getRegClass(VirtReg);
1121 for (MachineOperand &MO :
MI.operands()) {
1123 MachineBasicBlock *Succ = MO.
getMBB();
1132 LRI->LastUse =
nullptr;
1134 LRI->LiveOut =
false;
1135 LRI->Reloaded =
false;
1137 if (
MI.getOpcode() == TargetOpcode::BUNDLE) {
1138 BundleVirtRegsMap[VirtReg] = *LRI;
1140 markRegUsedInInstr(PhysReg);
1141 return setPhysReg(
MI, MO, *LRI);
1146bool RegAllocFastImpl::useVirtReg(MachineInstr &
MI, MachineOperand &MO,
1149 if (!shouldAllocateRegister(VirtReg))
1151 LiveRegMap::iterator LRI;
1153 std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg));
1156 if (mayLiveOut(VirtReg)) {
1157 LRI->LiveOut =
true;
1164 assert((!MO.
isKill() || LRI->LastUse == &
MI) &&
"Invalid kill flag");
1168 if (LRI->PhysReg == 0) {
1171 if (
MI.isCopy() &&
MI.getOperand(1).getSubReg() == 0) {
1172 Hint =
MI.getOperand(0).getReg();
1173 if (
Hint.isVirtual()) {
1174 assert(!shouldAllocateRegister(Hint));
1178 "Copy destination should already be assigned");
1181 allocVirtReg(
MI, *LRI, Hint,
false);
1186 if (
MI.getOpcode() == TargetOpcode::BUNDLE) {
1187 BundleVirtRegsMap[VirtReg] = *LRI;
1189 markRegUsedInInstr(LRI->PhysReg);
1190 return setPhysReg(
MI, MO, *LRI);
1196MCPhysReg RegAllocFastImpl::getErrorAssignment(
const LiveReg &LR,
1198 const TargetRegisterClass &RC) {
1199 MachineFunction &MF = *
MI.getMF();
1210 if (AllocationOrder.
empty()) {
1214 "no registers from class available to allocate", Fn,
1219 assert(!RawRegs.
empty() &&
"register classes cannot have no registers");
1220 return RawRegs.
front();
1223 if (!LR.Error && EmitError) {
1226 if (
MI.isInlineAsm()) {
1227 MI.emitInlineAsmError(
1228 "inline assembly requires more registers than available");
1232 "ran out of registers during register allocation", Fn,
1237 return AllocationOrder.
front();
1242bool RegAllocFastImpl::setPhysReg(MachineInstr &
MI, MachineOperand &MO,
1243 const LiveReg &Assignment) {
1245 assert(PhysReg &&
"assignments should always be to a valid physreg");
1273 MI.addRegisterKilled(PhysReg,
TRI,
true);
1282 MI.addRegisterDead(PhysReg,
TRI,
true);
1284 MI.addRegisterDefined(PhysReg,
TRI);
1293void RegAllocFastImpl::dumpState()
const {
1294 for (
unsigned Unit = 1, UnitE =
TRI->getNumRegUnits(); Unit != UnitE;
1296 switch (
unsigned VirtReg = RegUnitStates[Unit]) {
1299 case regPreAssigned:
1306 LiveRegMap::const_iterator
I = findLiveVirtReg(VirtReg);
1307 assert(
I != LiveVirtRegs.end() &&
"have LiveVirtRegs entry");
1308 if (
I->LiveOut ||
I->Reloaded) {
1316 assert(
TRI->hasRegUnit(
I->PhysReg, Unit) &&
"inverse mapping present");
1323 for (
const LiveReg &LR : LiveVirtRegs) {
1328 assert(Register::isPhysicalRegister(PhysReg) &&
"mapped to physreg");
1330 assert(RegUnitStates[Unit] == VirtReg &&
"inverse map valid");
1338void RegAllocFastImpl::addRegClassDefCounts(
1340 assert(RegClassDefCounts.
size() ==
TRI->getNumRegClasses());
1343 if (!shouldAllocateRegister(
Reg))
1345 const TargetRegisterClass *OpRC =
MRI->getRegClass(
Reg);
1346 for (
unsigned RCIdx = 0, RCIdxEnd =
TRI->getNumRegClasses();
1347 RCIdx != RCIdxEnd; ++RCIdx) {
1348 const TargetRegisterClass *IdxRC =
TRI->getRegClass(RCIdx);
1351 ++RegClassDefCounts[RCIdx];
1357 for (
unsigned RCIdx = 0, RCIdxEnd =
TRI->getNumRegClasses();
1358 RCIdx != RCIdxEnd; ++RCIdx) {
1359 const TargetRegisterClass *IdxRC =
TRI->getRegClass(RCIdx);
1360 for (MCRegAliasIterator Alias(
Reg,
TRI,
true); Alias.isValid(); ++Alias) {
1362 ++RegClassDefCounts[RCIdx];
1372void RegAllocFastImpl::findAndSortDefOperandIndexes(
const MachineInstr &
MI) {
1373 DefOperandIndexes.
clear();
1376 for (
unsigned I = 0,
E =
MI.getNumOperands();
I <
E; ++
I) {
1377 const MachineOperand &MO =
MI.getOperand(
I);
1384 markPhysRegUsedInInstr(
Reg);
1394 if (DefOperandIndexes.
size() <= 1)
1402 SmallVector<unsigned> RegClassDefCounts(
TRI->getNumRegClasses(), 0);
1404 for (
const MachineOperand &MO :
MI.all_defs())
1405 addRegClassDefCounts(RegClassDefCounts, MO.
getReg());
1407 llvm::sort(DefOperandIndexes, [&](
unsigned I0,
unsigned I1) {
1408 const MachineOperand &MO0 =
MI.getOperand(I0);
1409 const MachineOperand &MO1 =
MI.getOperand(I1);
1412 const TargetRegisterClass &RC0 = *
MRI->getRegClass(Reg0);
1413 const TargetRegisterClass &RC1 = *
MRI->getRegClass(Reg1);
1417 unsigned ClassSize0 = RegClassInfo.
getOrder(&RC0).size();
1418 unsigned ClassSize1 = RegClassInfo.
getOrder(&RC1).size();
1420 bool SmallClass0 = ClassSize0 < RegClassDefCounts[RC0.
getID()];
1421 bool SmallClass1 = ClassSize1 < RegClassDefCounts[RC1.
getID()];
1422 if (SmallClass0 > SmallClass1)
1424 if (SmallClass0 < SmallClass1)
1432 if (Livethrough0 > Livethrough1)
1434 if (Livethrough0 < Livethrough1)
1448 unsigned TiedIdx =
MI.findTiedOperandIdx(
MI.getOperandNo(&MO));
1453void RegAllocFastImpl::allocateInstruction(MachineInstr &
MI) {
1473 BundleVirtRegsMap.
clear();
1476 bool HasPhysRegUse =
false;
1477 bool HasRegMask =
false;
1478 bool HasVRegDef =
false;
1479 bool HasDef =
false;
1480 bool HasEarlyClobber =
false;
1481 bool NeedToAssignLiveThroughs =
false;
1482 for (MachineOperand &MO :
MI.operands()) {
1486 if (!shouldAllocateRegister(
Reg))
1492 HasEarlyClobber =
true;
1493 NeedToAssignLiveThroughs =
true;
1496 NeedToAssignLiveThroughs =
true;
1499 if (!
MRI->isReserved(
Reg)) {
1502 bool displacedAny = definePhysReg(
MI,
Reg);
1504 HasEarlyClobber =
true;
1509 HasPhysRegUse =
true;
1523 bool ReArrangedImplicitOps =
true;
1531 if (NeedToAssignLiveThroughs) {
1532 while (ReArrangedImplicitOps) {
1533 ReArrangedImplicitOps =
false;
1534 findAndSortDefOperandIndexes(
MI);
1535 for (
unsigned OpIdx : DefOperandIndexes) {
1536 MachineOperand &MO =
MI.getOperand(
OpIdx);
1541 ReArrangedImplicitOps = defineLiveThroughVirtReg(
MI,
OpIdx,
Reg);
1543 ReArrangedImplicitOps = defineVirtReg(
MI,
OpIdx,
Reg);
1547 if (ReArrangedImplicitOps)
1553 while (ReArrangedImplicitOps) {
1554 ReArrangedImplicitOps =
false;
1555 for (MachineOperand &MO :
MI.all_defs()) {
1558 ReArrangedImplicitOps =
1559 defineVirtReg(
MI,
MI.getOperandNo(&MO),
Reg);
1560 if (ReArrangedImplicitOps)
1571 for (MachineOperand &MO :
reverse(
MI.all_defs())) {
1582 "tied def assigned to clobbered register");
1594 if (
MRI->isReserved(
Reg))
1597 unmarkRegUsedInInstr(
Reg);
1605 for (
const auto *RM : RegMasks)
1606 MRI->addPhysRegsUsedFromRegMask(RM);
1609 for (
const LiveReg &LR : LiveVirtRegs) {
1611 if (PhysReg != 0 && isClobberedByRegMasks(PhysReg))
1612 displacePhysReg(
MI, PhysReg);
1617 if (HasPhysRegUse) {
1618 for (MachineOperand &MO :
MI.operands()) {
1624 if (
MRI->isReserved(
Reg))
1626 if (!usePhysReg(
MI,
Reg))
1634 bool HasUndefUse =
false;
1635 bool ReArrangedImplicitMOs =
true;
1636 while (ReArrangedImplicitMOs) {
1637 ReArrangedImplicitMOs =
false;
1638 for (MachineOperand &MO :
MI.operands()) {
1656 ReArrangedImplicitMOs = useVirtReg(
MI, MO,
Reg);
1657 if (ReArrangedImplicitMOs)
1666 for (MachineOperand &MO :
MI.all_uses()) {
1671 assert(MO.
isUndef() &&
"Should only have undef virtreg uses left");
1672 allocVirtRegUndef(MO);
1677 if (HasEarlyClobber) {
1678 for (MachineOperand &MO :
reverse(
MI.all_defs())) {
1681 assert(!MO.
getSubReg() &&
"should be already handled in def processing");
1706 if (
MI.isCopy() &&
MI.getOperand(0).getReg() ==
MI.getOperand(1).getReg() &&
1707 MI.getNumOperands() == 2) {
1713void RegAllocFastImpl::handleDebugValue(MachineInstr &
MI) {
1716 assert(
MI.isDebugValue() &&
"not a DBG_VALUE*");
1717 for (
const auto &MO :
MI.debug_operands()) {
1723 if (!shouldAllocateRegister(
Reg))
1727 int SS = StackSlotForVirtReg[
Reg];
1737 LiveRegMap::iterator LRI = findLiveVirtReg(
Reg);
1741 if (LRI != LiveVirtRegs.end() && LRI->PhysReg) {
1743 for (
auto &RegMO : DbgOps)
1744 setPhysReg(
MI, *RegMO, *LRI);
1746 DanglingDbgValues[
Reg].push_back(&
MI);
1751 LiveDbgValueMap[
Reg].append(DbgOps.begin(), DbgOps.end());
1755void RegAllocFastImpl::handleBundle(MachineInstr &
MI) {
1758 while (BundledMI->isBundledWithPred()) {
1759 for (MachineOperand &MO : BundledMI->operands()) {
1767 DenseMap<Register, LiveReg>::iterator DI = BundleVirtRegsMap.
find(
Reg);
1768 assert(DI != BundleVirtRegsMap.
end() &&
"Unassigned virtual register");
1770 setPhysReg(
MI, MO, DI->second);
1777void RegAllocFastImpl::allocateBasicBlock(MachineBasicBlock &
MBB) {
1781 PosIndexes.unsetInitialized();
1782 RegUnitStates.assign(
TRI->getNumRegUnits(), regFree);
1783 assert(LiveVirtRegs.empty() &&
"Mapping not cleared from last block?");
1786 setPhysRegState(LiveReg.PhysReg, regPreAssigned);
1796 if (
MI.isDebugValue()) {
1797 handleDebugValue(
MI);
1801 allocateInstruction(
MI);
1805 if (
MI.getOpcode() == TargetOpcode::BUNDLE) {
1813 LLVM_DEBUG(
dbgs() <<
"Loading live registers at begin of block.\n");
1818 for (MachineInstr *
MI : Coalesced)
1820 NumCoalesced += Coalesced.size();
1822 for (
auto &UDBGPair : DanglingDbgValues) {
1823 for (MachineInstr *DbgValue : UDBGPair.second) {
1828 LLVM_DEBUG(
dbgs() <<
"Register did not survive for " << *DbgValue
1833 DanglingDbgValues.clear();
1838bool RegAllocFastImpl::runOnMachineFunction(MachineFunction &MF) {
1839 LLVM_DEBUG(
dbgs() <<
"********** FAST REGISTER ALLOCATION **********\n"
1840 <<
"********** Function: " << MF.
getName() <<
'\n');
1846 MRI->freezeReservedRegs();
1848 unsigned NumRegUnits =
TRI->getNumRegUnits();
1850 UsedInInstr.
assign(NumRegUnits, 0);
1854 unsigned NumVirtRegs =
MRI->getNumVirtRegs();
1855 StackSlotForVirtReg.
resize(NumVirtRegs);
1856 LiveVirtRegs.setUniverse(NumVirtRegs);
1857 MayLiveAcrossBlocks.
clear();
1858 MayLiveAcrossBlocks.
resize(NumVirtRegs);
1861 for (MachineBasicBlock &
MBB : MF)
1862 allocateBasicBlock(
MBB);
1864 if (ClearVirtRegs) {
1867 MRI->clearVirtRegs();
1870 StackSlotForVirtReg.
clear();
1871 LiveDbgValueMap.
clear();
1878 RegAllocFastImpl Impl(Opts.Filter, Opts.ClearVRegs);
1879 bool Changed = Impl.runOnMachineFunction(MF);
1889 bool PrintFilterName = Opts.FilterName !=
"all";
1890 bool PrintNoClearVRegs = !Opts.ClearVRegs;
1891 bool PrintSemicolon = PrintFilterName && PrintNoClearVRegs;
1893 OS <<
"regallocfast";
1894 if (PrintFilterName || PrintNoClearVRegs) {
1896 if (PrintFilterName)
1897 OS <<
"filter=" << Opts.FilterName;
1900 if (PrintNoClearVRegs)
1901 OS <<
"no-clear-vregs";
1909 bool ClearVirtRegs) {
1910 return new RegAllocFast(Ftor, ClearVirtRegs);
unsigned const MachineRegisterInfo * MRI
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
#define LLVM_UNLIKELY(EXPR)
This file defines the DenseMap class.
const HexagonInstrInfo * TII
This file implements an indexed map.
Register const TargetRegisterInfo * TRI
This file implements a map that provides insertion order iteration.
Promote Memory to Register
MachineInstr unsigned OpIdx
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
static bool isCoalescable(const MachineInstr &MI)
static cl::opt< bool > IgnoreMissingDefs("rafast-ignore-missing-defs", cl::Hidden)
static bool dominates(InstrPosIndexes &PosIndexes, const MachineInstr &A, const MachineInstr &B)
static bool isTiedToNotUndef(const MachineOperand &MO)
static RegisterRegAlloc fastRegAlloc("fast", "fast register allocator", createFastRegisterAllocator)
This file defines the SmallSet class.
This file defines the SmallVector class.
This file defines the SparseSet class derived from the version described in Briggs,...
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
LLVM_ABI void setPreservesCFG()
This function should be called by the pass, iff they do not:
const T & front() const
front - Get the first element.
size_t size() const
size - Get the array size.
bool empty() const
empty - Check if the array is empty.
bool test(unsigned Idx) const
void resize(unsigned N, bool t=false)
resize - Grow or shrink the bitvector.
void clear()
clear - Removes all bits from the bitvector.
Represents analyses that only rely on functions' control flow.
iterator find(const_arg_type_t< KeyT > Val)
FunctionPass class - This class is used to implement most global optimizations.
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function.
void storeRegToStackSlot(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, Register SrcReg, bool isKill, int FrameIndex, const TargetRegisterClass *RC, Register VReg, MachineInstr::MIFlag Flags=MachineInstr::NoFlags) const override
Store the specified register of the given register class to the specified stack frame index.
void loadRegFromStackSlot(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, Register DestReg, int FrameIndex, const TargetRegisterClass *RC, Register VReg, MachineInstr::MIFlag Flags=MachineInstr::NoFlags) const override
Load the specified register of the given register class from the specified stack frame index.
Register isStoreToStackSlot(const MachineInstr &MI, int &FrameIndex) const override
If the specified machine instruction is a direct store to a stack slot, return the virtual or physica...
void resize(typename StorageT::size_type S)
LLVM_ABI void diagnose(const DiagnosticInfo &DI)
Report a message to the currently installed diagnostic handler.
Wrapper class representing physical registers. Should be passed by value.
An RAII based helper class to modify MachineFunctionProperties when running pass.
bool isInlineAsmBrIndirectTarget() const
Returns true if this is the indirect dest of an INLINEASM_BR.
iterator_range< liveout_iterator > liveouts() const
MachineInstrBundleIterator< const MachineInstr > const_iterator
LLVM_ABI instr_iterator insert(instr_iterator I, MachineInstr *M)
Insert MI into the instruction list before I, possibly inside a bundle.
iterator_range< livein_iterator > liveins() const
LLVM_ABI iterator getFirstTerminator()
Returns an iterator to the first terminator instruction of this basic block.
LLVM_ABI void dump() const
Instructions::iterator instr_iterator
void addLiveIn(MCRegister PhysReg, LaneBitmask LaneMask=LaneBitmask::getAll())
Adds the specified register as a live in.
const MachineFunction * getParent() const
Return the MachineFunction containing this basic block.
LLVM_ABI instr_iterator erase(instr_iterator I)
Remove an instruction from the instruction list and delete it.
LLVM_ABI bool isSuccessor(const MachineBasicBlock *MBB) const
Return true if the specified MBB is a successor of this block.
MachineInstrBundleIterator< MachineInstr > iterator
LLVM_ABI bool isLiveIn(MCRegister Reg, LaneBitmask LaneMask=LaneBitmask::getAll()) const
Return true if the specified register is in the live in set.
LLVM_ABI int CreateSpillStackObject(uint64_t Size, Align Alignment)
Create a new statically sized stack object that represents a spill slot, returning a nonnegative iden...
bool isSpillSlotObjectIndex(int ObjectIdx) const
Returns true if the specified index corresponds to a spill slot.
MachineFunctionPass - This class adapts the FunctionPass interface to allow convenient creation of pa...
void getAnalysisUsage(AnalysisUsage &AU) const override
getAnalysisUsage - Subclasses that override getAnalysisUsage must call this.
const TargetSubtargetInfo & getSubtarget() const
getSubtarget - Return the subtarget for which this machine code is being compiled.
StringRef getName() const
getName - Return the name of the corresponding LLVM function.
MachineFrameInfo & getFrameInfo()
getFrameInfo - Return the frame info object for the current function.
MachineRegisterInfo & getRegInfo()
getRegInfo - Return information about the registers currently in use.
Function & getFunction()
Return the LLVM function that this machine code represents.
const MachineFunctionProperties & getProperties() const
Get the function properties.
const MachineBasicBlock & front() const
const MachineInstrBuilder & addReg(Register RegNo, unsigned flags=0, unsigned SubReg=0) const
Add a new virtual register operand.
Representation of each machine instruction.
bool hasDebugOperandForReg(Register Reg) const
Returns whether this debug value has at least one debug operand with the register Reg.
bool isDebugValueList() const
void setDebugValueUndef()
Sets all register debug operands in this debug value instruction to be undef.
const MachineBasicBlock * getParent() const
bool isNonListDebugValue() const
bool isDebugValue() const
MachineOperand & getDebugOperand(unsigned Index)
const MachineOperand & getOperand(unsigned i) const
MachineOperand class - Representation of each machine instruction operand.
void setSubReg(unsigned subReg)
unsigned getSubReg() const
bool readsReg() const
readsReg - Returns true if this operand reads the previous value of its register.
LLVM_ABI void setIsRenamable(bool Val=true)
bool isReg() const
isReg - Tests if this is a MO_Register operand.
bool isRegMask() const
isRegMask - Tests if this is a MO_RegisterMask operand.
MachineBasicBlock * getMBB() const
void setIsDead(bool Val=true)
LLVM_ABI void setReg(Register Reg)
Change the register this operand corresponds to.
void setIsKill(bool Val=true)
MachineInstr * getParent()
getParent - Return the instruction that this operand belongs to.
void setIsUndef(bool Val=true)
bool isEarlyClobber() const
Register getReg() const
getReg - Returns the register number.
bool isInternalRead() const
static bool clobbersPhysReg(const uint32_t *RegMask, MCRegister PhysReg)
clobbersPhysReg - Returns true if this RegMask clobbers PhysReg.
const uint32_t * getRegMask() const
getRegMask - Returns a bit mask of registers preserved by this RegMask operand.
bool isMBB() const
isMBB - Tests if this is a MO_MachineBasicBlock operand.
A set of analyses that are preserved following a run of a transformation pass.
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
PreservedAnalyses run(MachineFunction &MF, MachineFunctionAnalysisManager &)
void printPipeline(raw_ostream &OS, function_ref< StringRef(StringRef)> MapClassName2PassName)
LLVM_ABI void runOnMachineFunction(const MachineFunction &MF, bool Rev=false)
runOnFunction - Prepare to answer questions about MF.
ArrayRef< MCPhysReg > getOrder(const TargetRegisterClass *RC) const
getOrder - Returns the preferred allocation order for RC.
Wrapper class representing virtual and physical registers.
unsigned virtRegIndex() const
Convert a virtual register number to a 0-based index.
constexpr bool isValid() const
constexpr bool isVirtual() const
Return true if the specified register number is in the virtual register namespace.
constexpr unsigned id() const
constexpr bool isPhysical() const
Return true if the specified register number is in the physical register namespace.
size_type count(const T &V) const
count - Return 1 if the element is in the set, 0 otherwise.
std::pair< const_iterator, bool > insert(const T &V)
insert - Insert an element into the set if it isn't already there.
void assign(size_type NumElts, ValueParamT Elt)
void push_back(const T &Elt)
typename DenseT::const_iterator const_iterator
typename DenseT::iterator iterator
StringRef - Represent a constant reference to a string, i.e.
ArrayRef< MCPhysReg > getRegisters() const
unsigned getID() const
Return the register class ID number.
bool contains(Register Reg) const
Return true if the specified register is included in this register class.
bool hasSubClassEq(const TargetRegisterClass *RC) const
Returns true if RC is a sub-class of or equal to this class.
virtual const TargetInstrInfo * getInstrInfo() const
virtual const TargetRegisterInfo * getRegisterInfo() const =0
Return the target's register information.
An efficient, type-erasing, non-owning reference to a callable.
self_iterator getIterator()
This class implements an extremely fast bulk output stream that can only output to a stream.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
constexpr char Align[]
Key for Kernel::Arg::Metadata::mAlign.
@ C
The default llvm calling convention, compatible with C.
@ Kill
The last use of a register.
initializer< Ty > init(const Ty &Val)
This is an optimization pass for GlobalISel generic memory operations.
LLVM_ABI FunctionPass * createFastRegisterAllocator()
FastRegisterAllocation Pass - This pass register allocates as fast as possible.
std::function< bool(const TargetRegisterInfo &TRI, const MachineRegisterInfo &MRI, const Register Reg)> RegAllocFilterFunc
Filter function for register classes during regalloc.
MachineInstrBuilder BuildMI(MachineFunction &MF, const MIMetadata &MIMD, const MCInstrDesc &MCID)
Builder interface. Specify how to create the initial instruction itself.
LLVM_ABI void updateDbgValueForSpill(MachineInstr &Orig, int FrameIndex, Register Reg)
Update a DBG_VALUE whose value has been spilled to FrameIndex.
LLVM_ABI Printable printRegUnit(MCRegUnit Unit, const TargetRegisterInfo *TRI)
Create Printable object to print register units on a raw_ostream.
AnalysisManager< MachineFunction > MachineFunctionAnalysisManager
LLVM_ABI PreservedAnalyses getMachineFunctionPassPreservedAnalyses()
Returns the minimum set of Analyses that all machine function passes must preserve.
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
auto reverse(ContainerTy &&C)
void sort(IteratorTy Start, IteratorTy End)
LLVM_ABI raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
unsigned MCRegUnit
Register units are used to compute register aliasing.
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
MutableArrayRef(T &OneElt) -> MutableArrayRef< T >
uint16_t MCPhysReg
An unsigned integer type large enough to represent all physical registers, but not necessarily virtua...
DWARFExpression::Operation Op
ArrayRef(const T &OneElt) -> ArrayRef< T >
LLVM_ABI MachineInstr * buildDbgValueForSpill(MachineBasicBlock &BB, MachineBasicBlock::iterator I, const MachineInstr &Orig, int FrameIndex, Register SpillReg)
Clone a DBG_VALUE whose value has been spilled to FrameIndex.
iterator_range< pointer_iterator< WrappedIteratorT > > make_pointer_range(RangeT &&Range)
LLVM_ABI Printable printReg(Register Reg, const TargetRegisterInfo *TRI=nullptr, unsigned SubIdx=0, const MachineRegisterInfo *MRI=nullptr)
Prints virtual and physical registers with or without a TRI instance.