51#define DEBUG_TYPE "regalloc"
55STATISTIC(NumCoalesced,
"Number of copies coalesced");
68class InstrPosIndexes {
70 void unsetInitialized() { IsInitialized =
false; }
74 Instr2PosIndex.
clear();
77 LastIndex += InstrDist;
78 Instr2PosIndex[&
MI] = LastIndex;
93 assert(
MI.getParent() == CurMBB &&
"MI is not in CurMBB");
94 auto It = Instr2PosIndex.find(&
MI);
95 if (It != Instr2PosIndex.end()) {
109 unsigned Distance = 1;
111 End = std::next(Start);
112 while (Start != CurMBB->begin() &&
113 !Instr2PosIndex.count(&*std::prev(Start))) {
117 while (
End != CurMBB->end() && !Instr2PosIndex.count(&*(
End))) {
125 Start == CurMBB->begin() ? 0 : Instr2PosIndex.at(&*std::prev(Start));
127 if (
End == CurMBB->end())
128 Step =
static_cast<uint64_t>(InstrDist);
132 assert(EndIndex > LastIndex &&
"Index must be ascending order");
133 unsigned NumAvailableIndexes = EndIndex - LastIndex - 1;
152 Step = (NumAvailableIndexes + 1) / (Distance + 1);
157 if (
LLVM_UNLIKELY(!Step || (!LastIndex && Step == InstrDist))) {
159 Index = Instr2PosIndex.at(&
MI);
163 for (
auto I = Start;
I !=
End; ++
I) {
165 Instr2PosIndex[&*
I] = LastIndex;
167 Index = Instr2PosIndex.at(&
MI);
172 bool IsInitialized =
false;
173 enum { InstrDist = 1024 };
178class RegAllocFastImpl {
181 bool ClearVirtRegs_ =
true)
182 : ShouldAllocateRegisterImpl(
F), StackSlotForVirtReg(-1),
183 ClearVirtRegs(ClearVirtRegs_) {}
204 bool LiveOut =
false;
205 bool Reloaded =
false;
208 explicit LiveReg(
Register VirtReg) : VirtReg(VirtReg) {}
210 unsigned getSparseSetIndex()
const {
return VirtReg.
virtRegIndex(); }
216 LiveRegMap LiveVirtRegs;
250 std::vector<unsigned> RegUnitStates;
275 InstrPosIndexes PosIndexes;
277 void setPhysRegState(
MCRegister PhysReg,
unsigned NewState);
278 bool isPhysRegFree(
MCPhysReg PhysReg)
const;
281 void markRegUsedInInstr(
MCPhysReg PhysReg) {
283 UsedInInstr[Unit] = InstrGen | 1;
287 bool isClobberedByRegMasks(
MCPhysReg PhysReg)
const {
294 bool isRegUsedInInstr(
MCPhysReg PhysReg,
bool LookAtPhysRegUses)
const {
295 if (LookAtPhysRegUses && isClobberedByRegMasks(PhysReg))
298 if (UsedInInstr[Unit] >= (InstrGen | !LookAtPhysRegUses))
305 void markPhysRegUsedInInstr(
MCPhysReg PhysReg) {
307 assert(UsedInInstr[Unit] <= InstrGen &&
"non-phys use before phys use?");
308 UsedInInstr[Unit] = InstrGen;
313 void unmarkRegUsedInInstr(
MCPhysReg PhysReg) {
315 UsedInInstr[Unit] = 0;
322 spillImpossible = ~0
u
347 unsigned calcSpillCost(
MCPhysReg PhysReg)
const;
359 bool LookAtPhysRegUses =
false);
366 bool LookAtPhysRegUses =
false);
382 bool shouldAllocateRegister(
const Register Reg)
const;
383 int getStackSpaceFor(
Register VirtReg);
385 MCPhysReg AssignedReg,
bool Kill,
bool LiveOut);
392 void dumpState()
const;
396 RegAllocFastImpl Impl;
405 return Impl.runOnMachineFunction(MF);
417 MachineFunctionProperties::Property::NoPHIs);
421 if (Impl.ClearVirtRegs) {
423 MachineFunctionProperties::Property::NoVRegs);
431 MachineFunctionProperties::Property::IsSSA);
437char RegAllocFast::ID = 0;
444 if (!ShouldAllocateRegisterImpl)
447 return ShouldAllocateRegisterImpl(*
TRI, *
MRI, Reg);
450void RegAllocFastImpl::setPhysRegState(
MCRegister PhysReg,
unsigned NewState) {
452 RegUnitStates[Unit] = NewState;
455bool RegAllocFastImpl::isPhysRegFree(
MCPhysReg PhysReg)
const {
457 if (RegUnitStates[Unit] != regFree)
465int RegAllocFastImpl::getStackSpaceFor(
Register VirtReg) {
467 int SS = StackSlotForVirtReg[VirtReg];
474 unsigned Size =
TRI->getSpillSize(RC);
475 Align Alignment =
TRI->getSpillAlign(RC);
479 StackSlotForVirtReg[VirtReg] = FrameIdx;
486 PosIndexes.getIndex(
A, IndexA);
488 PosIndexes.getIndex(
A, IndexA);
489 return IndexA < IndexB;
493bool RegAllocFastImpl::mayLiveOut(
Register VirtReg) {
506 if (DefInst.getParent() !=
MBB) {
510 if (!SelfLoopDef ||
dominates(PosIndexes, DefInst, *SelfLoopDef))
511 SelfLoopDef = &DefInst;
522 static const unsigned Limit = 8;
524 for (
const MachineInstr &UseInst :
MRI->use_nodbg_instructions(VirtReg)) {
525 if (UseInst.getParent() !=
MBB || ++
C >= Limit) {
534 if (SelfLoopDef == &UseInst ||
535 !
dominates(PosIndexes, *SelfLoopDef, UseInst)) {
546bool RegAllocFastImpl::mayLiveIn(
Register VirtReg) {
551 static const unsigned Limit = 8;
554 if (DefInst.getParent() !=
MBB || ++
C >= Limit) {
570 int FI = getStackSpaceFor(VirtReg);
587 SpilledOperandsMap[MO->getParent()].push_back(MO);
588 for (
const auto &MISpilledOperands : SpilledOperandsMap) {
594 *
MBB,
Before, *MISpilledOperands.first, FI, MISpilledOperands.second);
597 LLVM_DEBUG(
dbgs() <<
"Inserting debug info due to spill:\n" << *NewDV);
606 LLVM_DEBUG(
dbgs() <<
"Cloning debug info due to live out spill\n");
622 LRIDbgOperands.clear();
630 int FI = getStackSpaceFor(VirtReg);
650 if (!
TII->isBasicBlockPrologue(*
I))
668 if (LiveVirtRegs.empty())
675 setPhysRegState(Reg, regLiveIn);
683 getMBBBeginInsertionPoint(
MBB, PrologLiveIns);
684 for (
const LiveReg &LR : LiveVirtRegs) {
686 if (PhysReg == 0 || LR.Error)
690 if (RegUnitStates[FirstUnit] == regLiveIn)
694 "no reload in start block. Missing vreg def?");
696 if (PrologLiveIns.
count(PhysReg)) {
700 reload(
MBB.
begin(), LR.VirtReg, PhysReg);
702 reload(InsertBefore, LR.VirtReg, PhysReg);
704 LiveVirtRegs.clear();
712 bool displacedAny = displacePhysReg(
MI, Reg);
713 setPhysRegState(Reg, regPreAssigned);
714 markRegUsedInInstr(Reg);
719 bool displacedAny = displacePhysReg(
MI, Reg);
720 setPhysRegState(Reg, regPreAssigned);
728 bool displacedAny =
false;
731 switch (
unsigned VirtReg = RegUnitStates[Unit]) {
733 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
734 assert(LRI != LiveVirtRegs.end() &&
"datastructures in sync");
737 reload(ReloadBefore, VirtReg, LRI->PhysReg);
739 setPhysRegState(LRI->PhysReg, regFree);
741 LRI->Reloaded =
true;
746 RegUnitStates[Unit] = regFree;
756void RegAllocFastImpl::freePhysReg(
MCPhysReg PhysReg) {
760 switch (
unsigned VirtReg = RegUnitStates[FirstUnit]) {
766 setPhysRegState(PhysReg, regFree);
769 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
770 assert(LRI != LiveVirtRegs.end());
772 setPhysRegState(LRI->PhysReg, regFree);
783unsigned RegAllocFastImpl::calcSpillCost(
MCPhysReg PhysReg)
const {
785 switch (
unsigned VirtReg = RegUnitStates[Unit]) {
791 return spillImpossible;
793 bool SureSpill = StackSlotForVirtReg[VirtReg] != -1 ||
794 findLiveVirtReg(VirtReg)->LiveOut;
795 return SureSpill ? spillClean : spillDirty;
802void RegAllocFastImpl::assignDanglingDebugValues(
MachineInstr &Definition,
805 auto UDBGValIter = DanglingDbgValues.
find(VirtReg);
806 if (UDBGValIter == DanglingDbgValues.
end())
812 if (!
DbgValue->hasDebugOperandForReg(VirtReg))
821 if (
I->modifiesRegister(Reg,
TRI) || --Limit == 0) {
840void RegAllocFastImpl::assignVirtToPhysReg(
MachineInstr &AtMI, LiveReg &LR,
845 assert(LR.PhysReg == 0 &&
"Already assigned a physreg");
846 assert(PhysReg != 0 &&
"Trying to assign no register");
847 LR.PhysReg = PhysReg;
848 setPhysRegState(PhysReg, VirtReg);
850 assignDanglingDebugValues(AtMI, VirtReg, PhysReg);
856 static const unsigned ChainLengthLimit = 3;
859 if (
Reg.isPhysical())
867 }
while (++
C <= ChainLengthLimit);
875 static const unsigned DefLimit = 3;
880 Reg = traceCopyChain(Reg);
893 Register Hint0,
bool LookAtPhysRegUses) {
894 const Register VirtReg = LR.VirtReg;
899 <<
" in class " <<
TRI->getRegClassName(&RC)
904 !isRegUsedInInstr(Hint0, LookAtPhysRegUses)) {
906 if (isPhysRegFree(Hint0)) {
909 assignVirtToPhysReg(
MI, LR, Hint0);
920 Register Hint1 = traceCopies(VirtReg);
922 !isRegUsedInInstr(Hint1, LookAtPhysRegUses)) {
924 if (isPhysRegFree(Hint1)) {
927 assignVirtToPhysReg(
MI, LR, Hint1);
938 unsigned BestCost = spillImpossible;
942 if (isRegUsedInInstr(PhysReg, LookAtPhysRegUses)) {
947 unsigned Cost = calcSpillCost(PhysReg);
951 assignVirtToPhysReg(
MI, LR, PhysReg);
955 if (PhysReg == Hint0 || PhysReg == Hint1)
956 Cost -= spillPrefBonus;
958 if (
Cost < BestCost) {
967 LR.PhysReg = getErrorAssignment(LR,
MI, RC);
972 displacePhysReg(
MI, BestReg);
973 assignVirtToPhysReg(
MI, LR, BestReg);
980 if (!shouldAllocateRegister(VirtReg))
983 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
985 if (LRI != LiveVirtRegs.end() && LRI->PhysReg) {
986 PhysReg = LRI->PhysReg;
996 PhysReg = getErrorAssignment(*LRI, *MO.
getParent(), RC);
1003 if (SubRegIdx != 0) {
1004 PhysReg =
TRI->getSubReg(PhysReg, SubRegIdx);
1017 if (!shouldAllocateRegister(VirtReg))
1019 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
1020 if (LRI != LiveVirtRegs.end()) {
1022 if (PrevReg != 0 && isRegUsedInInstr(PrevReg,
true)) {
1024 <<
" (tied/earlyclobber resolution)\n");
1025 freePhysReg(PrevReg);
1027 allocVirtReg(
MI, *LRI, 0,
true);
1033 TII->get(TargetOpcode::COPY), PrevReg)
1041 return defineVirtReg(
MI, OpNum, VirtReg,
true);
1051bool RegAllocFastImpl::defineVirtReg(
MachineInstr &
MI,
unsigned OpNum,
1052 Register VirtReg,
bool LookAtPhysRegUses) {
1054 if (!shouldAllocateRegister(VirtReg))
1057 LiveRegMap::iterator LRI;
1059 std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg));
1062 if (mayLiveOut(VirtReg)) {
1063 LRI->LiveOut =
true;
1070 if (LRI->PhysReg == 0) {
1071 allocVirtReg(
MI, *LRI, 0, LookAtPhysRegUses);
1073 assert((!isRegUsedInInstr(LRI->PhysReg, LookAtPhysRegUses) || LRI->Error) &&
1074 "TODO: preassign mismatch");
1076 <<
" use existing assignment to "
1081 if (LRI->Reloaded || LRI->LiveOut) {
1082 if (!
MI.isImplicitDef()) {
1086 <<
" RL: " << LRI->Reloaded <<
'\n');
1087 bool Kill = LRI->LastUse ==
nullptr;
1088 spill(SpillBefore, VirtReg, PhysReg, Kill, LRI->LiveOut);
1092 if (
MI.getOpcode() == TargetOpcode::INLINEASM_BR) {
1093 int FI = StackSlotForVirtReg[VirtReg];
1106 LRI->LastUse =
nullptr;
1108 LRI->LiveOut =
false;
1109 LRI->Reloaded =
false;
1111 if (
MI.getOpcode() == TargetOpcode::BUNDLE) {
1112 BundleVirtRegsMap[VirtReg] = PhysReg;
1114 markRegUsedInInstr(PhysReg);
1115 return setPhysReg(
MI, MO, PhysReg);
1123 if (!shouldAllocateRegister(VirtReg))
1125 LiveRegMap::iterator LRI;
1127 std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg));
1130 if (mayLiveOut(VirtReg)) {
1131 LRI->LiveOut =
true;
1138 assert((!MO.
isKill() || LRI->LastUse == &
MI) &&
"Invalid kill flag");
1142 if (LRI->PhysReg == 0) {
1145 if (
MI.isCopy() &&
MI.getOperand(1).getSubReg() == 0) {
1146 Hint =
MI.getOperand(0).getReg();
1148 assert(!shouldAllocateRegister(Hint));
1152 "Copy destination should already be assigned");
1155 allocVirtReg(
MI, *LRI, Hint,
false);
1160 if (
MI.getOpcode() == TargetOpcode::BUNDLE) {
1161 BundleVirtRegsMap[VirtReg] = LRI->PhysReg;
1163 markRegUsedInInstr(LRI->PhysReg);
1164 return setPhysReg(
MI, MO, LRI->PhysReg);
1170MCPhysReg RegAllocFastImpl::getErrorAssignment(
const LiveReg &LR,
1177 MachineFunctionProperties::Property::FailedRegAlloc);
1179 MF.
getProperties().
set(MachineFunctionProperties::Property::FailedRegAlloc);
1189 "no registers from class available to allocate", Fn,
1194 assert(!RawRegs.
empty() &&
"register classes cannot have no registers");
1195 return RawRegs.
front();
1198 if (!LR.Error && EmitError) {
1201 if (
MI.isInlineAsm()) {
1202 MI.emitInlineAsmError(
1203 "inline assembly requires more registers than available");
1207 "ran out of registers during register allocation", Fn,
1237 MI.addRegisterKilled(PhysReg,
TRI,
true);
1246 MI.addRegisterDead(PhysReg,
TRI,
true);
1248 MI.addRegisterDefined(PhysReg,
TRI);
1257void RegAllocFastImpl::dumpState()
const {
1258 for (
unsigned Unit = 1, UnitE =
TRI->getNumRegUnits(); Unit != UnitE;
1260 switch (
unsigned VirtReg = RegUnitStates[Unit]) {
1263 case regPreAssigned:
1270 LiveRegMap::const_iterator
I = findLiveVirtReg(VirtReg);
1271 assert(
I != LiveVirtRegs.end() &&
"have LiveVirtRegs entry");
1272 if (
I->LiveOut ||
I->Reloaded) {
1280 assert(
TRI->hasRegUnit(
I->PhysReg, Unit) &&
"inverse mapping present");
1287 for (
const LiveReg &LR : LiveVirtRegs) {
1294 assert(RegUnitStates[Unit] == VirtReg &&
"inverse map valid");
1302void RegAllocFastImpl::addRegClassDefCounts(
1304 assert(RegClassDefCounts.
size() ==
TRI->getNumRegClasses());
1306 if (
Reg.isVirtual()) {
1307 if (!shouldAllocateRegister(Reg))
1310 for (
unsigned RCIdx = 0, RCIdxEnd =
TRI->getNumRegClasses();
1311 RCIdx != RCIdxEnd; ++RCIdx) {
1315 ++RegClassDefCounts[RCIdx];
1321 for (
unsigned RCIdx = 0, RCIdxEnd =
TRI->getNumRegClasses();
1322 RCIdx != RCIdxEnd; ++RCIdx) {
1326 ++RegClassDefCounts[RCIdx];
1336void RegAllocFastImpl::findAndSortDefOperandIndexes(
const MachineInstr &
MI) {
1337 DefOperandIndexes.
clear();
1340 for (
unsigned I = 0, E =
MI.getNumOperands();
I < E; ++
I) {
1346 if (
Reg.isPhysical()) {
1348 markPhysRegUsedInInstr(Reg);
1352 if (MO.
isDef() &&
Reg.isVirtual() && shouldAllocateRegister(Reg))
1358 if (DefOperandIndexes.
size() <= 1)
1369 addRegClassDefCounts(RegClassDefCounts, MO.
getReg());
1371 llvm::sort(DefOperandIndexes, [&](
unsigned I0,
unsigned I1) {
1381 unsigned ClassSize0 = RegClassInfo.
getOrder(&RC0).
size();
1382 unsigned ClassSize1 = RegClassInfo.
getOrder(&RC1).
size();
1384 bool SmallClass0 = ClassSize0 < RegClassDefCounts[RC0.
getID()];
1385 bool SmallClass1 = ClassSize1 < RegClassDefCounts[RC1.
getID()];
1386 if (SmallClass0 > SmallClass1)
1388 if (SmallClass0 < SmallClass1)
1396 if (Livethrough0 > Livethrough1)
1398 if (Livethrough0 < Livethrough1)
1412 unsigned TiedIdx =
MI.findTiedOperandIdx(
MI.getOperandNo(&MO));
1437 BundleVirtRegsMap.
clear();
1440 bool HasPhysRegUse =
false;
1441 bool HasRegMask =
false;
1442 bool HasVRegDef =
false;
1443 bool HasDef =
false;
1444 bool HasEarlyClobber =
false;
1445 bool NeedToAssignLiveThroughs =
false;
1449 if (
Reg.isVirtual()) {
1450 if (!shouldAllocateRegister(Reg))
1456 HasEarlyClobber =
true;
1457 NeedToAssignLiveThroughs =
true;
1460 NeedToAssignLiveThroughs =
true;
1462 }
else if (
Reg.isPhysical()) {
1463 if (!
MRI->isReserved(Reg)) {
1466 bool displacedAny = definePhysReg(
MI, Reg);
1468 HasEarlyClobber =
true;
1473 HasPhysRegUse =
true;
1487 bool ReArrangedImplicitOps =
true;
1495 if (NeedToAssignLiveThroughs) {
1496 while (ReArrangedImplicitOps) {
1497 ReArrangedImplicitOps =
false;
1498 findAndSortDefOperandIndexes(
MI);
1499 for (
unsigned OpIdx : DefOperandIndexes) {
1505 ReArrangedImplicitOps = defineLiveThroughVirtReg(
MI, OpIdx, Reg);
1507 ReArrangedImplicitOps = defineVirtReg(
MI, OpIdx, Reg);
1511 if (ReArrangedImplicitOps)
1517 while (ReArrangedImplicitOps) {
1518 ReArrangedImplicitOps =
false;
1521 if (
Reg.isVirtual()) {
1522 ReArrangedImplicitOps =
1523 defineVirtReg(
MI,
MI.getOperandNo(&MO), Reg);
1524 if (ReArrangedImplicitOps)
1546 "tied def assigned to clobbered register");
1553 if (
Reg.isVirtual()) {
1554 assert(!shouldAllocateRegister(Reg));
1558 if (
MRI->isReserved(Reg))
1561 unmarkRegUsedInInstr(Reg);
1569 for (
const auto *RM : RegMasks)
1570 MRI->addPhysRegsUsedFromRegMask(RM);
1573 for (
const LiveReg &LR : LiveVirtRegs) {
1575 if (PhysReg != 0 && isClobberedByRegMasks(PhysReg))
1576 displacePhysReg(
MI, PhysReg);
1581 if (HasPhysRegUse) {
1586 if (!
Reg.isPhysical())
1588 if (
MRI->isReserved(Reg))
1590 if (!usePhysReg(
MI, Reg))
1598 bool HasUndefUse =
false;
1599 bool ReArrangedImplicitMOs =
true;
1600 while (ReArrangedImplicitMOs) {
1601 ReArrangedImplicitMOs =
false;
1606 if (!
Reg.isVirtual() || !shouldAllocateRegister(Reg))
1620 ReArrangedImplicitMOs = useVirtReg(
MI, MO, Reg);
1621 if (ReArrangedImplicitMOs)
1632 if (!
Reg.isVirtual() || !shouldAllocateRegister(Reg))
1635 assert(MO.
isUndef() &&
"Should only have undef virtreg uses left");
1636 allocVirtRegUndef(MO);
1641 if (HasEarlyClobber) {
1645 assert(!MO.
getSubReg() &&
"should be already handled in def processing");
1650 if (
Reg.isVirtual()) {
1651 assert(!shouldAllocateRegister(Reg));
1654 assert(
Reg.isPhysical() &&
"should have register assigned");
1662 if (
MI.readsRegister(Reg,
TRI))
1670 if (
MI.isCopy() &&
MI.getOperand(0).getReg() ==
MI.getOperand(1).getReg() &&
1671 MI.getNumOperands() == 2) {
1680 assert(
MI.isDebugValue() &&
"not a DBG_VALUE*");
1681 for (
const auto &MO :
MI.debug_operands()) {
1685 if (!
Reg.isVirtual())
1687 if (!shouldAllocateRegister(Reg))
1691 int SS = StackSlotForVirtReg[
Reg];
1701 LiveRegMap::iterator LRI = findLiveVirtReg(Reg);
1706 if (LRI != LiveVirtRegs.end() && LRI->PhysReg) {
1708 for (
auto &RegMO : DbgOps)
1709 setPhysReg(
MI, *RegMO, LRI->PhysReg);
1711 DanglingDbgValues[
Reg].push_back(&
MI);
1716 LiveDbgValueMap[
Reg].append(DbgOps.
begin(), DbgOps.
end());
1723 while (BundledMI->isBundledWithPred()) {
1729 if (!
Reg.isVirtual() || !shouldAllocateRegister(Reg))
1733 DI = BundleVirtRegsMap.
find(Reg);
1734 assert(DI != BundleVirtRegsMap.
end() &&
"Unassigned virtual register");
1736 setPhysReg(
MI, MO, DI->second);
1747 PosIndexes.unsetInitialized();
1748 RegUnitStates.assign(
TRI->getNumRegUnits(), regFree);
1749 assert(LiveVirtRegs.empty() &&
"Mapping not cleared from last block?");
1752 setPhysRegState(LiveReg.PhysReg, regPreAssigned);
1762 if (
MI.isDebugValue()) {
1763 handleDebugValue(
MI);
1767 allocateInstruction(
MI);
1771 if (
MI.getOpcode() == TargetOpcode::BUNDLE) {
1779 LLVM_DEBUG(
dbgs() <<
"Loading live registers at begin of block.\n");
1786 NumCoalesced += Coalesced.size();
1788 for (
auto &UDBGPair : DanglingDbgValues) {
1792 if (!
DbgValue->hasDebugOperandForReg(UDBGPair.first))
1799 DanglingDbgValues.clear();
1805 LLVM_DEBUG(
dbgs() <<
"********** FAST REGISTER ALLOCATION **********\n"
1806 <<
"********** Function: " << MF.
getName() <<
'\n');
1812 MRI->freezeReservedRegs();
1814 unsigned NumRegUnits =
TRI->getNumRegUnits();
1816 UsedInInstr.
assign(NumRegUnits, 0);
1820 unsigned NumVirtRegs =
MRI->getNumVirtRegs();
1821 StackSlotForVirtReg.
resize(NumVirtRegs);
1822 LiveVirtRegs.setUniverse(NumVirtRegs);
1823 MayLiveAcrossBlocks.
clear();
1824 MayLiveAcrossBlocks.
resize(NumVirtRegs);
1828 allocateBasicBlock(
MBB);
1830 if (ClearVirtRegs) {
1833 MRI->clearVirtRegs();
1836 StackSlotForVirtReg.
clear();
1837 LiveDbgValueMap.
clear();
1845 bool Changed = Impl.runOnMachineFunction(MF);
1855 bool PrintFilterName = Opts.
FilterName !=
"all";
1857 bool PrintSemicolon = PrintFilterName && PrintNoClearVRegs;
1859 OS <<
"regallocfast";
1860 if (PrintFilterName || PrintNoClearVRegs) {
1862 if (PrintFilterName)
1866 if (PrintNoClearVRegs)
1867 OS <<
"no-clear-vregs";
1875 bool ClearVirtRegs) {
1876 return new RegAllocFast(Ftor, ClearVirtRegs);
unsigned const MachineRegisterInfo * MRI
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
#define LLVM_UNLIKELY(EXPR)
This file defines the DenseMap class.
const HexagonInstrInfo * TII
This file implements an indexed map.
unsigned const TargetRegisterInfo * TRI
This file implements a map that provides insertion order iteration.
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
static bool isCoalescable(const MachineInstr &MI)
static cl::opt< bool > IgnoreMissingDefs("rafast-ignore-missing-defs", cl::Hidden)
static bool dominates(InstrPosIndexes &PosIndexes, const MachineInstr &A, const MachineInstr &B)
static bool isTiedToNotUndef(const MachineOperand &MO)
static RegisterRegAlloc fastRegAlloc("fast", "fast register allocator", createFastRegisterAllocator)
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file defines the SmallSet class.
This file defines the SmallVector class.
This file defines the SparseSet class derived from the version described in Briggs,...
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
Class recording the (high level) value of a variable.
A container for analyses that lazily runs them and caches their results.
Represent the analysis usage information of a pass.
void setPreservesCFG()
This function should be called by the pass, iff they do not:
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
const T & front() const
front - Get the first element.
size_t size() const
size - Get the array size.
bool empty() const
empty - Check if the array is empty.
bool test(unsigned Idx) const
void resize(unsigned N, bool t=false)
resize - Grow or shrink the bitvector.
void clear()
clear - Removes all bits from the bitvector.
Represents analyses that only rely on functions' control flow.
This class represents an Operation in the Expression.
iterator find(const_arg_type_t< KeyT > Val)
Lightweight error class with error context and mandatory checking.
FunctionPass class - This class is used to implement most global optimizations.
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function.
void storeRegToStackSlot(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, Register SrcReg, bool isKill, int FrameIndex, const TargetRegisterClass *RC, const TargetRegisterInfo *TRI, Register VReg, MachineInstr::MIFlag Flags=MachineInstr::NoFlags) const override
Store the specified register of the given register class to the specified stack frame index.
void loadRegFromStackSlot(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, Register DestReg, int FrameIndex, const TargetRegisterClass *RC, const TargetRegisterInfo *TRI, Register VReg, MachineInstr::MIFlag Flags=MachineInstr::NoFlags) const override
Load the specified register of the given register class from the specified stack frame index.
void resize(typename StorageT::size_type s)
void diagnose(const DiagnosticInfo &DI)
Report a message to the currently installed diagnostic handler.
MCRegAliasIterator enumerates all registers aliasing Reg.
Wrapper class representing physical registers. Should be passed by value.
An RAII based helper class to modify MachineFunctionProperties when running pass.
iterator_range< liveout_iterator > liveouts() const
instr_iterator insert(instr_iterator I, MachineInstr *M)
Insert MI into the instruction list before I, possibly inside a bundle.
iterator_range< livein_iterator > liveins() const
iterator getFirstTerminator()
Returns an iterator to the first terminator instruction of this basic block.
Instructions::iterator instr_iterator
void addLiveIn(MCRegister PhysReg, LaneBitmask LaneMask=LaneBitmask::getAll())
Adds the specified register as a live in.
const MachineFunction * getParent() const
Return the MachineFunction containing this basic block.
instr_iterator erase(instr_iterator I)
Remove an instruction from the instruction list and delete it.
bool isSuccessor(const MachineBasicBlock *MBB) const
Return true if the specified MBB is a successor of this block.
The MachineFrameInfo class represents an abstract stack frame until prolog/epilog code is inserted.
int CreateSpillStackObject(uint64_t Size, Align Alignment)
Create a new statically sized stack object that represents a spill slot, returning a nonnegative iden...
MachineFunctionPass - This class adapts the FunctionPass interface to allow convenient creation of pa...
virtual MachineFunctionProperties getClearedProperties() const
void getAnalysisUsage(AnalysisUsage &AU) const override
getAnalysisUsage - Subclasses that override getAnalysisUsage must call this.
virtual MachineFunctionProperties getSetProperties() const
virtual bool runOnMachineFunction(MachineFunction &MF)=0
runOnMachineFunction - This method must be overloaded to perform the desired machine code transformat...
virtual MachineFunctionProperties getRequiredProperties() const
Properties which a MachineFunction may have at a given point in time.
MachineFunctionProperties & set(Property P)
bool hasProperty(Property P) const
const TargetSubtargetInfo & getSubtarget() const
getSubtarget - Return the subtarget for which this machine code is being compiled.
StringRef getName() const
getName - Return the name of the corresponding LLVM function.
MachineFrameInfo & getFrameInfo()
getFrameInfo - Return the frame info object for the current function.
MachineRegisterInfo & getRegInfo()
getRegInfo - Return information about the registers currently in use.
Function & getFunction()
Return the LLVM function that this machine code represents.
const MachineFunctionProperties & getProperties() const
Get the function properties.
MachineInstr * CloneMachineInstr(const MachineInstr *Orig)
Create a new MachineInstr which is a copy of Orig, identical in all ways except the instruction has n...
const MachineBasicBlock & front() const
const MachineInstrBuilder & addReg(Register RegNo, unsigned flags=0, unsigned SubReg=0) const
Add a new virtual register operand.
Representation of each machine instruction.
bool isDebugValueList() const
const MachineBasicBlock * getParent() const
bool isNonListDebugValue() const
MachineOperand & getDebugOperand(unsigned Index)
const MachineOperand & getOperand(unsigned i) const
MachineOperand class - Representation of each machine instruction operand.
void setSubReg(unsigned subReg)
unsigned getSubReg() const
bool readsReg() const
readsReg - Returns true if this operand reads the previous value of its register.
void setIsRenamable(bool Val=true)
bool isReg() const
isReg - Tests if this is a MO_Register operand.
bool isRegMask() const
isRegMask - Tests if this is a MO_RegisterMask operand.
MachineBasicBlock * getMBB() const
void setIsDead(bool Val=true)
void setReg(Register Reg)
Change the register this operand corresponds to.
void setIsKill(bool Val=true)
MachineInstr * getParent()
getParent - Return the instruction that this operand belongs to.
bool isEarlyClobber() const
Register getReg() const
getReg - Returns the register number.
bool isInternalRead() const
static bool clobbersPhysReg(const uint32_t *RegMask, MCRegister PhysReg)
clobbersPhysReg - Returns true if this RegMask clobbers PhysReg.
const uint32_t * getRegMask() const
getRegMask - Returns a bit mask of registers preserved by this RegMask operand.
bool isMBB() const
isMBB - Tests if this is a MO_MachineBasicBlock operand.
MachineRegisterInfo - Keep track of information for virtual and physical registers,...
MutableArrayRef - Represent a mutable reference to an array (0 or more elements consecutively in memo...
virtual StringRef getPassName() const
getPassName - Return a nice clean name for a pass.
A set of analyses that are preserved following a run of a transformation pass.
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
PreservedAnalyses run(MachineFunction &MF, MachineFunctionAnalysisManager &)
void printPipeline(raw_ostream &OS, function_ref< StringRef(StringRef)> MapClassName2PassName)
void runOnMachineFunction(const MachineFunction &MF)
runOnFunction - Prepare to answer questions about MF.
ArrayRef< MCPhysReg > getOrder(const TargetRegisterClass *RC) const
getOrder - Returns the preferred allocation order for RC.
Wrapper class representing virtual and physical registers.
unsigned virtRegIndex() const
Convert a virtual register number to a 0-based index.
constexpr bool isVirtual() const
Return true if the specified register number is in the virtual register namespace.
static constexpr bool isPhysicalRegister(unsigned Reg)
Return true if the specified register number is in the physical register namespace.
constexpr bool isPhysical() const
Return true if the specified register number is in the physical register namespace.
SmallSet - This maintains a set of unique values, optimizing for the case when the set is small (less...
size_type count(const T &V) const
count - Return 1 if the element is in the set, 0 otherwise.
std::pair< const_iterator, bool > insert(const T &V)
insert - Insert an element into the set if it isn't already there.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
void assign(size_type NumElts, ValueParamT Elt)
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
SparseSet - Fast set implementation for objects that can be identified by small unsigned keys.
typename DenseT::iterator iterator
typename DenseT::const_iterator const_iterator
StringRef - Represent a constant reference to a string, i.e.
TargetInstrInfo - Interface to description of machine instruction set.
ArrayRef< MCPhysReg > getRegisters() const
unsigned getID() const
Return the register class ID number.
bool contains(Register Reg) const
Return true if the specified register is included in this register class.
bool hasSubClassEq(const TargetRegisterClass *RC) const
Returns true if RC is a sub-class of or equal to this class.
TargetRegisterInfo base class - We assume that the target defines a static array of TargetRegisterDes...
TargetSubtargetInfo - Generic base class for all target subtargets.
virtual const TargetRegisterInfo * getRegisterInfo() const
getRegisterInfo - If register information is available, return it.
virtual const TargetInstrInfo * getInstrInfo() const
An efficient, type-erasing, non-owning reference to a callable.
self_iterator getIterator()
This class implements an extremely fast bulk output stream that can only output to a stream.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
@ C
The default llvm calling convention, compatible with C.
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
@ Kill
The last use of a register.
Reg
All possible values of the reg field in the ModR/M byte.
initializer< Ty > init(const Ty &Val)
This is an optimization pass for GlobalISel generic memory operations.
FunctionPass * createFastRegisterAllocator()
FastRegisterAllocation Pass - This pass register allocates as fast as possible.
std::function< bool(const TargetRegisterInfo &TRI, const MachineRegisterInfo &MRI, const Register Reg)> RegAllocFilterFunc
Filter function for register classes during regalloc.
auto size(R &&Range, std::enable_if_t< std::is_base_of< std::random_access_iterator_tag, typename std::iterator_traits< decltype(Range.begin())>::iterator_category >::value, void > *=nullptr)
Get the size of a range.
MachineInstrBuilder BuildMI(MachineFunction &MF, const MIMetadata &MIMD, const MCInstrDesc &MCID)
Builder interface. Specify how to create the initial instruction itself.
void updateDbgValueForSpill(MachineInstr &Orig, int FrameIndex, Register Reg)
Update a DBG_VALUE whose value has been spilled to FrameIndex.
uint16_t MCPhysReg
An unsigned integer type large enough to represent all physical registers, but not necessarily virtua...
Printable printRegUnit(unsigned Unit, const TargetRegisterInfo *TRI)
Create Printable object to print register units on a raw_ostream.
PreservedAnalyses getMachineFunctionPassPreservedAnalyses()
Returns the minimum set of Analyses that all machine function passes must preserve.
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
auto reverse(ContainerTy &&C)
void sort(IteratorTy Start, IteratorTy End)
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
MachineInstr * buildDbgValueForSpill(MachineBasicBlock &BB, MachineBasicBlock::iterator I, const MachineInstr &Orig, int FrameIndex, Register SpillReg)
Clone a DBG_VALUE whose value has been spilled to FrameIndex.
Printable printReg(Register Reg, const TargetRegisterInfo *TRI=nullptr, unsigned SubIdx=0, const MachineRegisterInfo *MRI=nullptr)
Prints virtual and physical registers with or without a TRI instance.
This struct is a compact representation of a valid (non-zero power of two) alignment.
Pair of physical register and lane mask.
RegAllocFilterFunc Filter
A MapVector that performs no allocations if smaller than a certain size.