51#define DEBUG_TYPE "regalloc"
55STATISTIC(NumCoalesced,
"Number of copies coalesced");
68class InstrPosIndexes {
70 void unsetInitialized() { IsInitialized =
false; }
74 Instr2PosIndex.
clear();
77 LastIndex += InstrDist;
78 Instr2PosIndex[&
MI] = LastIndex;
93 assert(
MI.getParent() == CurMBB &&
"MI is not in CurMBB");
94 auto It = Instr2PosIndex.find(&
MI);
95 if (It != Instr2PosIndex.end()) {
109 unsigned Distance = 1;
111 End = std::next(Start);
112 while (Start != CurMBB->begin() &&
113 !Instr2PosIndex.count(&*std::prev(Start))) {
117 while (
End != CurMBB->end() && !Instr2PosIndex.count(&*(
End))) {
125 Start == CurMBB->begin() ? 0 : Instr2PosIndex.at(&*std::prev(Start));
127 if (
End == CurMBB->end())
128 Step =
static_cast<uint64_t>(InstrDist);
132 assert(EndIndex > LastIndex &&
"Index must be ascending order");
133 unsigned NumAvailableIndexes = EndIndex - LastIndex - 1;
152 Step = (NumAvailableIndexes + 1) / (Distance + 1);
157 if (
LLVM_UNLIKELY(!Step || (!LastIndex && Step == InstrDist))) {
159 Index = Instr2PosIndex.at(&
MI);
163 for (
auto I = Start;
I !=
End; ++
I) {
165 Instr2PosIndex[&*
I] = LastIndex;
167 Index = Instr2PosIndex.at(&
MI);
172 bool IsInitialized =
false;
173 enum { InstrDist = 1024 };
178class RegAllocFastImpl {
181 bool ClearVirtRegs_ =
true)
182 : ShouldAllocateRegisterImpl(
F), StackSlotForVirtReg(-1),
183 ClearVirtRegs(ClearVirtRegs_) {}
204 bool LiveOut =
false;
205 bool Reloaded =
false;
208 explicit LiveReg(
Register VirtReg) : VirtReg(VirtReg) {}
210 unsigned getSparseSetIndex()
const {
218 LiveRegMap LiveVirtRegs;
252 std::vector<unsigned> RegUnitStates;
277 InstrPosIndexes PosIndexes;
279 void setPhysRegState(
MCPhysReg PhysReg,
unsigned NewState);
280 bool isPhysRegFree(
MCPhysReg PhysReg)
const;
283 void markRegUsedInInstr(
MCPhysReg PhysReg) {
285 UsedInInstr[Unit] = InstrGen | 1;
289 bool isClobberedByRegMasks(
MCPhysReg PhysReg)
const {
296 bool isRegUsedInInstr(
MCPhysReg PhysReg,
bool LookAtPhysRegUses)
const {
297 if (LookAtPhysRegUses && isClobberedByRegMasks(PhysReg))
300 if (UsedInInstr[Unit] >= (InstrGen | !LookAtPhysRegUses))
307 void markPhysRegUsedInInstr(
MCPhysReg PhysReg) {
309 assert(UsedInInstr[Unit] <= InstrGen &&
"non-phys use before phys use?");
310 UsedInInstr[Unit] = InstrGen;
315 void unmarkRegUsedInInstr(
MCPhysReg PhysReg) {
317 UsedInInstr[Unit] = 0;
324 spillImpossible = ~0
u
349 unsigned calcSpillCost(
MCPhysReg PhysReg)
const;
361 bool LookAtPhysRegUses =
false);
368 bool LookAtPhysRegUses =
false);
384 bool shouldAllocateRegister(
const Register Reg)
const;
385 int getStackSpaceFor(
Register VirtReg);
387 MCPhysReg AssignedReg,
bool Kill,
bool LiveOut);
394 void dumpState()
const;
398 RegAllocFastImpl Impl;
407 return Impl.runOnMachineFunction(MF);
419 MachineFunctionProperties::Property::NoPHIs);
423 if (Impl.ClearVirtRegs) {
425 MachineFunctionProperties::Property::NoVRegs);
433 MachineFunctionProperties::Property::IsSSA);
439char RegAllocFast::ID = 0;
446 if (!ShouldAllocateRegisterImpl)
449 return ShouldAllocateRegisterImpl(*
TRI, *
MRI, Reg);
452void RegAllocFastImpl::setPhysRegState(
MCPhysReg PhysReg,
unsigned NewState) {
454 RegUnitStates[Unit] = NewState;
457bool RegAllocFastImpl::isPhysRegFree(
MCPhysReg PhysReg)
const {
459 if (RegUnitStates[Unit] != regFree)
467int RegAllocFastImpl::getStackSpaceFor(
Register VirtReg) {
469 int SS = StackSlotForVirtReg[VirtReg];
476 unsigned Size =
TRI->getSpillSize(RC);
477 Align Alignment =
TRI->getSpillAlign(RC);
481 StackSlotForVirtReg[VirtReg] = FrameIdx;
488 PosIndexes.getIndex(
A, IndexA);
490 PosIndexes.getIndex(
A, IndexA);
491 return IndexA < IndexB;
495bool RegAllocFastImpl::mayLiveOut(
Register VirtReg) {
508 if (DefInst.getParent() !=
MBB) {
512 if (!SelfLoopDef ||
dominates(PosIndexes, DefInst, *SelfLoopDef))
513 SelfLoopDef = &DefInst;
524 static const unsigned Limit = 8;
526 for (
const MachineInstr &UseInst :
MRI->use_nodbg_instructions(VirtReg)) {
527 if (UseInst.getParent() !=
MBB || ++
C >= Limit) {
536 if (SelfLoopDef == &UseInst ||
537 !
dominates(PosIndexes, *SelfLoopDef, UseInst)) {
548bool RegAllocFastImpl::mayLiveIn(
Register VirtReg) {
553 static const unsigned Limit = 8;
556 if (DefInst.getParent() !=
MBB || ++
C >= Limit) {
572 int FI = getStackSpaceFor(VirtReg);
589 SpilledOperandsMap[MO->getParent()].push_back(MO);
590 for (
const auto &MISpilledOperands : SpilledOperandsMap) {
596 *
MBB,
Before, *MISpilledOperands.first, FI, MISpilledOperands.second);
599 LLVM_DEBUG(
dbgs() <<
"Inserting debug info due to spill:\n" << *NewDV);
608 LLVM_DEBUG(
dbgs() <<
"Cloning debug info due to live out spill\n");
624 LRIDbgOperands.clear();
632 int FI = getStackSpaceFor(VirtReg);
652 if (!
TII->isBasicBlockPrologue(*
I))
670 if (LiveVirtRegs.empty())
677 setPhysRegState(Reg, regLiveIn);
685 getMBBBeginInsertionPoint(
MBB, PrologLiveIns);
686 for (
const LiveReg &LR : LiveVirtRegs) {
688 if (PhysReg == 0 || LR.Error)
692 if (RegUnitStates[FirstUnit] == regLiveIn)
696 "no reload in start block. Missing vreg def?");
698 if (PrologLiveIns.
count(PhysReg)) {
702 reload(
MBB.
begin(), LR.VirtReg, PhysReg);
704 reload(InsertBefore, LR.VirtReg, PhysReg);
706 LiveVirtRegs.clear();
714 bool displacedAny = displacePhysReg(
MI, Reg);
715 setPhysRegState(Reg, regPreAssigned);
716 markRegUsedInInstr(Reg);
721 bool displacedAny = displacePhysReg(
MI, Reg);
722 setPhysRegState(Reg, regPreAssigned);
730 bool displacedAny =
false;
733 switch (
unsigned VirtReg = RegUnitStates[Unit]) {
735 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
736 assert(LRI != LiveVirtRegs.end() &&
"datastructures in sync");
739 reload(ReloadBefore, VirtReg, LRI->PhysReg);
741 setPhysRegState(LRI->PhysReg, regFree);
743 LRI->Reloaded =
true;
748 RegUnitStates[Unit] = regFree;
758void RegAllocFastImpl::freePhysReg(
MCPhysReg PhysReg) {
762 switch (
unsigned VirtReg = RegUnitStates[FirstUnit]) {
768 setPhysRegState(PhysReg, regFree);
771 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
772 assert(LRI != LiveVirtRegs.end());
774 setPhysRegState(LRI->PhysReg, regFree);
785unsigned RegAllocFastImpl::calcSpillCost(
MCPhysReg PhysReg)
const {
787 switch (
unsigned VirtReg = RegUnitStates[Unit]) {
793 return spillImpossible;
795 bool SureSpill = StackSlotForVirtReg[VirtReg] != -1 ||
796 findLiveVirtReg(VirtReg)->LiveOut;
797 return SureSpill ? spillClean : spillDirty;
804void RegAllocFastImpl::assignDanglingDebugValues(
MachineInstr &Definition,
807 auto UDBGValIter = DanglingDbgValues.
find(VirtReg);
808 if (UDBGValIter == DanglingDbgValues.
end())
814 if (!
DbgValue->hasDebugOperandForReg(VirtReg))
823 if (
I->modifiesRegister(Reg,
TRI) || --Limit == 0) {
842void RegAllocFastImpl::assignVirtToPhysReg(
MachineInstr &AtMI, LiveReg &LR,
847 assert(LR.PhysReg == 0 &&
"Already assigned a physreg");
848 assert(PhysReg != 0 &&
"Trying to assign no register");
849 LR.PhysReg = PhysReg;
850 setPhysRegState(PhysReg, VirtReg);
852 assignDanglingDebugValues(AtMI, VirtReg, PhysReg);
858 static const unsigned ChainLengthLimit = 3;
861 if (
Reg.isPhysical())
869 }
while (++
C <= ChainLengthLimit);
877 static const unsigned DefLimit = 3;
882 Reg = traceCopyChain(Reg);
895 Register Hint0,
bool LookAtPhysRegUses) {
896 const Register VirtReg = LR.VirtReg;
901 <<
" in class " <<
TRI->getRegClassName(&RC)
906 !isRegUsedInInstr(Hint0, LookAtPhysRegUses)) {
908 if (isPhysRegFree(Hint0)) {
911 assignVirtToPhysReg(
MI, LR, Hint0);
922 Register Hint1 = traceCopies(VirtReg);
924 !isRegUsedInInstr(Hint1, LookAtPhysRegUses)) {
926 if (isPhysRegFree(Hint1)) {
929 assignVirtToPhysReg(
MI, LR, Hint1);
940 unsigned BestCost = spillImpossible;
944 if (isRegUsedInInstr(PhysReg, LookAtPhysRegUses)) {
949 unsigned Cost = calcSpillCost(PhysReg);
953 assignVirtToPhysReg(
MI, LR, PhysReg);
957 if (PhysReg == Hint0 || PhysReg == Hint1)
958 Cost -= spillPrefBonus;
960 if (
Cost < BestCost) {
969 LR.PhysReg = getErrorAssignment(LR,
MI, RC);
974 displacePhysReg(
MI, BestReg);
975 assignVirtToPhysReg(
MI, LR, BestReg);
982 if (!shouldAllocateRegister(VirtReg))
985 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
987 if (LRI != LiveVirtRegs.end() && LRI->PhysReg) {
988 PhysReg = LRI->PhysReg;
998 PhysReg = getErrorAssignment(*LRI, *MO.
getParent(), RC);
1005 if (SubRegIdx != 0) {
1006 PhysReg =
TRI->getSubReg(PhysReg, SubRegIdx);
1019 if (!shouldAllocateRegister(VirtReg))
1021 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
1022 if (LRI != LiveVirtRegs.end()) {
1024 if (PrevReg != 0 && isRegUsedInInstr(PrevReg,
true)) {
1026 <<
" (tied/earlyclobber resolution)\n");
1027 freePhysReg(PrevReg);
1029 allocVirtReg(
MI, *LRI, 0,
true);
1035 TII->get(TargetOpcode::COPY), PrevReg)
1043 return defineVirtReg(
MI, OpNum, VirtReg,
true);
1053bool RegAllocFastImpl::defineVirtReg(
MachineInstr &
MI,
unsigned OpNum,
1054 Register VirtReg,
bool LookAtPhysRegUses) {
1056 if (!shouldAllocateRegister(VirtReg))
1059 LiveRegMap::iterator LRI;
1061 std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg));
1064 if (mayLiveOut(VirtReg)) {
1065 LRI->LiveOut =
true;
1072 if (LRI->PhysReg == 0) {
1073 allocVirtReg(
MI, *LRI, 0, LookAtPhysRegUses);
1075 assert((!isRegUsedInInstr(LRI->PhysReg, LookAtPhysRegUses) || LRI->Error) &&
1076 "TODO: preassign mismatch");
1078 <<
" use existing assignment to "
1083 if (LRI->Reloaded || LRI->LiveOut) {
1084 if (!
MI.isImplicitDef()) {
1088 <<
" RL: " << LRI->Reloaded <<
'\n');
1089 bool Kill = LRI->LastUse ==
nullptr;
1090 spill(SpillBefore, VirtReg, PhysReg, Kill, LRI->LiveOut);
1094 if (
MI.getOpcode() == TargetOpcode::INLINEASM_BR) {
1095 int FI = StackSlotForVirtReg[VirtReg];
1108 LRI->LastUse =
nullptr;
1110 LRI->LiveOut =
false;
1111 LRI->Reloaded =
false;
1113 if (
MI.getOpcode() == TargetOpcode::BUNDLE) {
1114 BundleVirtRegsMap[VirtReg] = PhysReg;
1116 markRegUsedInInstr(PhysReg);
1117 return setPhysReg(
MI, MO, PhysReg);
1125 if (!shouldAllocateRegister(VirtReg))
1127 LiveRegMap::iterator LRI;
1129 std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg));
1132 if (mayLiveOut(VirtReg)) {
1133 LRI->LiveOut =
true;
1140 assert((!MO.
isKill() || LRI->LastUse == &
MI) &&
"Invalid kill flag");
1144 if (LRI->PhysReg == 0) {
1147 if (
MI.isCopy() &&
MI.getOperand(1).getSubReg() == 0) {
1148 Hint =
MI.getOperand(0).getReg();
1150 assert(!shouldAllocateRegister(Hint));
1154 "Copy destination should already be assigned");
1157 allocVirtReg(
MI, *LRI, Hint,
false);
1162 if (
MI.getOpcode() == TargetOpcode::BUNDLE) {
1163 BundleVirtRegsMap[VirtReg] = LRI->PhysReg;
1165 markRegUsedInInstr(LRI->PhysReg);
1166 return setPhysReg(
MI, MO, LRI->PhysReg);
1172MCPhysReg RegAllocFastImpl::getErrorAssignment(
const LiveReg &LR,
1179 MachineFunctionProperties::Property::FailedRegAlloc);
1181 MF.
getProperties().
set(MachineFunctionProperties::Property::FailedRegAlloc);
1191 "no registers from class available to allocate", Fn,
1196 assert(!RawRegs.
empty() &&
"register classes cannot have no registers");
1197 return RawRegs.
front();
1200 if (!LR.Error && EmitError) {
1203 if (
MI.isInlineAsm()) {
1204 MI.emitInlineAsmError(
1205 "inline assembly requires more registers than available");
1209 "ran out of registers during register allocation", Fn,
1239 MI.addRegisterKilled(PhysReg,
TRI,
true);
1248 MI.addRegisterDead(PhysReg,
TRI,
true);
1250 MI.addRegisterDefined(PhysReg,
TRI);
1259void RegAllocFastImpl::dumpState()
const {
1260 for (
unsigned Unit = 1, UnitE =
TRI->getNumRegUnits(); Unit != UnitE;
1262 switch (
unsigned VirtReg = RegUnitStates[Unit]) {
1265 case regPreAssigned:
1272 LiveRegMap::const_iterator
I = findLiveVirtReg(VirtReg);
1273 assert(
I != LiveVirtRegs.end() &&
"have LiveVirtRegs entry");
1274 if (
I->LiveOut ||
I->Reloaded) {
1282 assert(
TRI->hasRegUnit(
I->PhysReg, Unit) &&
"inverse mapping present");
1289 for (
const LiveReg &LR : LiveVirtRegs) {
1296 assert(RegUnitStates[Unit] == VirtReg &&
"inverse map valid");
1304void RegAllocFastImpl::addRegClassDefCounts(
1306 assert(RegClassDefCounts.
size() ==
TRI->getNumRegClasses());
1308 if (
Reg.isVirtual()) {
1309 if (!shouldAllocateRegister(Reg))
1312 for (
unsigned RCIdx = 0, RCIdxEnd =
TRI->getNumRegClasses();
1313 RCIdx != RCIdxEnd; ++RCIdx) {
1317 ++RegClassDefCounts[RCIdx];
1323 for (
unsigned RCIdx = 0, RCIdxEnd =
TRI->getNumRegClasses();
1324 RCIdx != RCIdxEnd; ++RCIdx) {
1328 ++RegClassDefCounts[RCIdx];
1338void RegAllocFastImpl::findAndSortDefOperandIndexes(
const MachineInstr &
MI) {
1339 DefOperandIndexes.
clear();
1342 for (
unsigned I = 0, E =
MI.getNumOperands();
I < E; ++
I) {
1348 if (
Reg.isPhysical()) {
1350 markPhysRegUsedInInstr(Reg);
1354 if (MO.
isDef() &&
Reg.isVirtual() && shouldAllocateRegister(Reg))
1360 if (DefOperandIndexes.
size() <= 1)
1371 addRegClassDefCounts(RegClassDefCounts, MO.
getReg());
1373 llvm::sort(DefOperandIndexes, [&](
unsigned I0,
unsigned I1) {
1383 unsigned ClassSize0 = RegClassInfo.
getOrder(&RC0).
size();
1384 unsigned ClassSize1 = RegClassInfo.
getOrder(&RC1).
size();
1386 bool SmallClass0 = ClassSize0 < RegClassDefCounts[RC0.
getID()];
1387 bool SmallClass1 = ClassSize1 < RegClassDefCounts[RC1.
getID()];
1388 if (SmallClass0 > SmallClass1)
1390 if (SmallClass0 < SmallClass1)
1398 if (Livethrough0 > Livethrough1)
1400 if (Livethrough0 < Livethrough1)
1414 unsigned TiedIdx =
MI.findTiedOperandIdx(
MI.getOperandNo(&MO));
1439 BundleVirtRegsMap.
clear();
1442 bool HasPhysRegUse =
false;
1443 bool HasRegMask =
false;
1444 bool HasVRegDef =
false;
1445 bool HasDef =
false;
1446 bool HasEarlyClobber =
false;
1447 bool NeedToAssignLiveThroughs =
false;
1451 if (
Reg.isVirtual()) {
1452 if (!shouldAllocateRegister(Reg))
1458 HasEarlyClobber =
true;
1459 NeedToAssignLiveThroughs =
true;
1462 NeedToAssignLiveThroughs =
true;
1464 }
else if (
Reg.isPhysical()) {
1465 if (!
MRI->isReserved(Reg)) {
1468 bool displacedAny = definePhysReg(
MI, Reg);
1470 HasEarlyClobber =
true;
1475 HasPhysRegUse =
true;
1489 bool ReArrangedImplicitOps =
true;
1497 if (NeedToAssignLiveThroughs) {
1498 while (ReArrangedImplicitOps) {
1499 ReArrangedImplicitOps =
false;
1500 findAndSortDefOperandIndexes(
MI);
1501 for (
unsigned OpIdx : DefOperandIndexes) {
1507 ReArrangedImplicitOps = defineLiveThroughVirtReg(
MI, OpIdx, Reg);
1509 ReArrangedImplicitOps = defineVirtReg(
MI, OpIdx, Reg);
1513 if (ReArrangedImplicitOps)
1519 while (ReArrangedImplicitOps) {
1520 ReArrangedImplicitOps =
false;
1523 if (
Reg.isVirtual()) {
1524 ReArrangedImplicitOps =
1525 defineVirtReg(
MI,
MI.getOperandNo(&MO), Reg);
1526 if (ReArrangedImplicitOps)
1548 "tied def assigned to clobbered register");
1555 if (
Reg.isVirtual()) {
1556 assert(!shouldAllocateRegister(Reg));
1560 if (
MRI->isReserved(Reg))
1563 unmarkRegUsedInInstr(Reg);
1571 for (
const auto *RM : RegMasks)
1572 MRI->addPhysRegsUsedFromRegMask(RM);
1575 for (
const LiveReg &LR : LiveVirtRegs) {
1577 if (PhysReg != 0 && isClobberedByRegMasks(PhysReg))
1578 displacePhysReg(
MI, PhysReg);
1583 if (HasPhysRegUse) {
1588 if (!
Reg.isPhysical())
1590 if (
MRI->isReserved(Reg))
1592 if (!usePhysReg(
MI, Reg))
1600 bool HasUndefUse =
false;
1601 bool ReArrangedImplicitMOs =
true;
1602 while (ReArrangedImplicitMOs) {
1603 ReArrangedImplicitMOs =
false;
1608 if (!
Reg.isVirtual() || !shouldAllocateRegister(Reg))
1622 ReArrangedImplicitMOs = useVirtReg(
MI, MO, Reg);
1623 if (ReArrangedImplicitMOs)
1634 if (!
Reg.isVirtual() || !shouldAllocateRegister(Reg))
1637 assert(MO.
isUndef() &&
"Should only have undef virtreg uses left");
1638 allocVirtRegUndef(MO);
1643 if (HasEarlyClobber) {
1647 assert(!MO.
getSubReg() &&
"should be already handled in def processing");
1652 if (
Reg.isVirtual()) {
1653 assert(!shouldAllocateRegister(Reg));
1656 assert(
Reg.isPhysical() &&
"should have register assigned");
1664 if (
MI.readsRegister(Reg,
TRI))
1672 if (
MI.isCopy() &&
MI.getOperand(0).getReg() ==
MI.getOperand(1).getReg() &&
1673 MI.getNumOperands() == 2) {
1682 assert(
MI.isDebugValue() &&
"not a DBG_VALUE*");
1683 for (
const auto &MO :
MI.debug_operands()) {
1687 if (!
Reg.isVirtual())
1689 if (!shouldAllocateRegister(Reg))
1693 int SS = StackSlotForVirtReg[
Reg];
1703 LiveRegMap::iterator LRI = findLiveVirtReg(Reg);
1708 if (LRI != LiveVirtRegs.end() && LRI->PhysReg) {
1710 for (
auto &RegMO : DbgOps)
1711 setPhysReg(
MI, *RegMO, LRI->PhysReg);
1713 DanglingDbgValues[
Reg].push_back(&
MI);
1718 LiveDbgValueMap[
Reg].append(DbgOps.
begin(), DbgOps.
end());
1725 while (BundledMI->isBundledWithPred()) {
1731 if (!
Reg.isVirtual() || !shouldAllocateRegister(Reg))
1735 DI = BundleVirtRegsMap.
find(Reg);
1736 assert(DI != BundleVirtRegsMap.
end() &&
"Unassigned virtual register");
1738 setPhysReg(
MI, MO, DI->second);
1749 PosIndexes.unsetInitialized();
1750 RegUnitStates.assign(
TRI->getNumRegUnits(), regFree);
1751 assert(LiveVirtRegs.empty() &&
"Mapping not cleared from last block?");
1754 setPhysRegState(LiveReg.PhysReg, regPreAssigned);
1764 if (
MI.isDebugValue()) {
1765 handleDebugValue(
MI);
1769 allocateInstruction(
MI);
1773 if (
MI.getOpcode() == TargetOpcode::BUNDLE) {
1781 LLVM_DEBUG(
dbgs() <<
"Loading live registers at begin of block.\n");
1788 NumCoalesced += Coalesced.size();
1790 for (
auto &UDBGPair : DanglingDbgValues) {
1794 if (!
DbgValue->hasDebugOperandForReg(UDBGPair.first))
1801 DanglingDbgValues.clear();
1807 LLVM_DEBUG(
dbgs() <<
"********** FAST REGISTER ALLOCATION **********\n"
1808 <<
"********** Function: " << MF.
getName() <<
'\n');
1814 MRI->freezeReservedRegs();
1816 unsigned NumRegUnits =
TRI->getNumRegUnits();
1818 UsedInInstr.
assign(NumRegUnits, 0);
1822 unsigned NumVirtRegs =
MRI->getNumVirtRegs();
1823 StackSlotForVirtReg.
resize(NumVirtRegs);
1824 LiveVirtRegs.setUniverse(NumVirtRegs);
1825 MayLiveAcrossBlocks.
clear();
1826 MayLiveAcrossBlocks.
resize(NumVirtRegs);
1830 allocateBasicBlock(
MBB);
1832 if (ClearVirtRegs) {
1835 MRI->clearVirtRegs();
1838 StackSlotForVirtReg.
clear();
1839 LiveDbgValueMap.
clear();
1847 bool Changed = Impl.runOnMachineFunction(MF);
1857 bool PrintFilterName = Opts.
FilterName !=
"all";
1859 bool PrintSemicolon = PrintFilterName && PrintNoClearVRegs;
1861 OS <<
"regallocfast";
1862 if (PrintFilterName || PrintNoClearVRegs) {
1864 if (PrintFilterName)
1868 if (PrintNoClearVRegs)
1869 OS <<
"no-clear-vregs";
1877 bool ClearVirtRegs) {
1878 return new RegAllocFast(Ftor, ClearVirtRegs);
unsigned const MachineRegisterInfo * MRI
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
#define LLVM_UNLIKELY(EXPR)
This file defines the DenseMap class.
const HexagonInstrInfo * TII
This file implements an indexed map.
unsigned const TargetRegisterInfo * TRI
This file implements a map that provides insertion order iteration.
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
static bool isCoalescable(const MachineInstr &MI)
static cl::opt< bool > IgnoreMissingDefs("rafast-ignore-missing-defs", cl::Hidden)
static bool dominates(InstrPosIndexes &PosIndexes, const MachineInstr &A, const MachineInstr &B)
static bool isTiedToNotUndef(const MachineOperand &MO)
static RegisterRegAlloc fastRegAlloc("fast", "fast register allocator", createFastRegisterAllocator)
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file defines the SmallSet class.
This file defines the SmallVector class.
This file defines the SparseSet class derived from the version described in Briggs,...
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
Class recording the (high level) value of a variable.
A container for analyses that lazily runs them and caches their results.
Represent the analysis usage information of a pass.
void setPreservesCFG()
This function should be called by the pass, iff they do not:
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
const T & front() const
front - Get the first element.
size_t size() const
size - Get the array size.
bool empty() const
empty - Check if the array is empty.
bool test(unsigned Idx) const
void resize(unsigned N, bool t=false)
resize - Grow or shrink the bitvector.
void clear()
clear - Removes all bits from the bitvector.
Represents analyses that only rely on functions' control flow.
This class represents an Operation in the Expression.
iterator find(const_arg_type_t< KeyT > Val)
Lightweight error class with error context and mandatory checking.
FunctionPass class - This class is used to implement most global optimizations.
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function.
void storeRegToStackSlot(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, Register SrcReg, bool isKill, int FrameIndex, const TargetRegisterClass *RC, const TargetRegisterInfo *TRI, Register VReg) const override
Store the specified register of the given register class to the specified stack frame index.
void loadRegFromStackSlot(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, Register DestReg, int FrameIndex, const TargetRegisterClass *RC, const TargetRegisterInfo *TRI, Register VReg) const override
Load the specified register of the given register class from the specified stack frame index.
void resize(typename StorageT::size_type s)
void diagnose(const DiagnosticInfo &DI)
Report a message to the currently installed diagnostic handler.
MCRegAliasIterator enumerates all registers aliasing Reg.
Wrapper class representing physical registers. Should be passed by value.
An RAII based helper class to modify MachineFunctionProperties when running pass.
iterator_range< liveout_iterator > liveouts() const
instr_iterator insert(instr_iterator I, MachineInstr *M)
Insert MI into the instruction list before I, possibly inside a bundle.
iterator_range< livein_iterator > liveins() const
iterator getFirstTerminator()
Returns an iterator to the first terminator instruction of this basic block.
Instructions::iterator instr_iterator
void addLiveIn(MCRegister PhysReg, LaneBitmask LaneMask=LaneBitmask::getAll())
Adds the specified register as a live in.
const MachineFunction * getParent() const
Return the MachineFunction containing this basic block.
instr_iterator erase(instr_iterator I)
Remove an instruction from the instruction list and delete it.
bool isSuccessor(const MachineBasicBlock *MBB) const
Return true if the specified MBB is a successor of this block.
The MachineFrameInfo class represents an abstract stack frame until prolog/epilog code is inserted.
int CreateSpillStackObject(uint64_t Size, Align Alignment)
Create a new statically sized stack object that represents a spill slot, returning a nonnegative iden...
MachineFunctionPass - This class adapts the FunctionPass interface to allow convenient creation of pa...
virtual MachineFunctionProperties getClearedProperties() const
void getAnalysisUsage(AnalysisUsage &AU) const override
getAnalysisUsage - Subclasses that override getAnalysisUsage must call this.
virtual MachineFunctionProperties getSetProperties() const
virtual bool runOnMachineFunction(MachineFunction &MF)=0
runOnMachineFunction - This method must be overloaded to perform the desired machine code transformat...
virtual MachineFunctionProperties getRequiredProperties() const
Properties which a MachineFunction may have at a given point in time.
MachineFunctionProperties & set(Property P)
bool hasProperty(Property P) const
const TargetSubtargetInfo & getSubtarget() const
getSubtarget - Return the subtarget for which this machine code is being compiled.
StringRef getName() const
getName - Return the name of the corresponding LLVM function.
MachineFrameInfo & getFrameInfo()
getFrameInfo - Return the frame info object for the current function.
MachineRegisterInfo & getRegInfo()
getRegInfo - Return information about the registers currently in use.
Function & getFunction()
Return the LLVM function that this machine code represents.
const MachineFunctionProperties & getProperties() const
Get the function properties.
MachineInstr * CloneMachineInstr(const MachineInstr *Orig)
Create a new MachineInstr which is a copy of Orig, identical in all ways except the instruction has n...
const MachineBasicBlock & front() const
const MachineInstrBuilder & addReg(Register RegNo, unsigned flags=0, unsigned SubReg=0) const
Add a new virtual register operand.
Representation of each machine instruction.
bool isDebugValueList() const
const MachineBasicBlock * getParent() const
bool isNonListDebugValue() const
MachineOperand & getDebugOperand(unsigned Index)
const MachineOperand & getOperand(unsigned i) const
MachineOperand class - Representation of each machine instruction operand.
void setSubReg(unsigned subReg)
unsigned getSubReg() const
bool readsReg() const
readsReg - Returns true if this operand reads the previous value of its register.
void setIsRenamable(bool Val=true)
bool isReg() const
isReg - Tests if this is a MO_Register operand.
bool isRegMask() const
isRegMask - Tests if this is a MO_RegisterMask operand.
MachineBasicBlock * getMBB() const
void setIsDead(bool Val=true)
void setReg(Register Reg)
Change the register this operand corresponds to.
void setIsKill(bool Val=true)
MachineInstr * getParent()
getParent - Return the instruction that this operand belongs to.
bool isEarlyClobber() const
Register getReg() const
getReg - Returns the register number.
bool isInternalRead() const
static bool clobbersPhysReg(const uint32_t *RegMask, MCRegister PhysReg)
clobbersPhysReg - Returns true if this RegMask clobbers PhysReg.
const uint32_t * getRegMask() const
getRegMask - Returns a bit mask of registers preserved by this RegMask operand.
bool isMBB() const
isMBB - Tests if this is a MO_MachineBasicBlock operand.
MachineRegisterInfo - Keep track of information for virtual and physical registers,...
MutableArrayRef - Represent a mutable reference to an array (0 or more elements consecutively in memo...
virtual StringRef getPassName() const
getPassName - Return a nice clean name for a pass.
A set of analyses that are preserved following a run of a transformation pass.
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
PreservedAnalyses run(MachineFunction &MF, MachineFunctionAnalysisManager &)
void printPipeline(raw_ostream &OS, function_ref< StringRef(StringRef)> MapClassName2PassName)
void runOnMachineFunction(const MachineFunction &MF)
runOnFunction - Prepare to answer questions about MF.
ArrayRef< MCPhysReg > getOrder(const TargetRegisterClass *RC) const
getOrder - Returns the preferred allocation order for RC.
Wrapper class representing virtual and physical registers.
static unsigned virtReg2Index(Register Reg)
Convert a virtual register number to a 0-based index.
constexpr bool isVirtual() const
Return true if the specified register number is in the virtual register namespace.
static constexpr bool isPhysicalRegister(unsigned Reg)
Return true if the specified register number is in the physical register namespace.
constexpr bool isPhysical() const
Return true if the specified register number is in the physical register namespace.
SmallSet - This maintains a set of unique values, optimizing for the case when the set is small (less...
size_type count(const T &V) const
count - Return 1 if the element is in the set, 0 otherwise.
std::pair< const_iterator, bool > insert(const T &V)
insert - Insert an element into the set if it isn't already there.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
void assign(size_type NumElts, ValueParamT Elt)
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
SparseSet - Fast set implementation for objects that can be identified by small unsigned keys.
typename DenseT::iterator iterator
typename DenseT::const_iterator const_iterator
StringRef - Represent a constant reference to a string, i.e.
TargetInstrInfo - Interface to description of machine instruction set.
ArrayRef< MCPhysReg > getRegisters() const
unsigned getID() const
Return the register class ID number.
bool contains(Register Reg) const
Return true if the specified register is included in this register class.
bool hasSubClassEq(const TargetRegisterClass *RC) const
Returns true if RC is a sub-class of or equal to this class.
TargetRegisterInfo base class - We assume that the target defines a static array of TargetRegisterDes...
TargetSubtargetInfo - Generic base class for all target subtargets.
virtual const TargetRegisterInfo * getRegisterInfo() const
getRegisterInfo - If register information is available, return it.
virtual const TargetInstrInfo * getInstrInfo() const
An efficient, type-erasing, non-owning reference to a callable.
self_iterator getIterator()
This class implements an extremely fast bulk output stream that can only output to a stream.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
@ C
The default llvm calling convention, compatible with C.
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
@ Kill
The last use of a register.
Reg
All possible values of the reg field in the ModR/M byte.
initializer< Ty > init(const Ty &Val)
This is an optimization pass for GlobalISel generic memory operations.
FunctionPass * createFastRegisterAllocator()
FastRegisterAllocation Pass - This pass register allocates as fast as possible.
std::function< bool(const TargetRegisterInfo &TRI, const MachineRegisterInfo &MRI, const Register Reg)> RegAllocFilterFunc
Filter function for register classes during regalloc.
auto size(R &&Range, std::enable_if_t< std::is_base_of< std::random_access_iterator_tag, typename std::iterator_traits< decltype(Range.begin())>::iterator_category >::value, void > *=nullptr)
Get the size of a range.
MachineInstrBuilder BuildMI(MachineFunction &MF, const MIMetadata &MIMD, const MCInstrDesc &MCID)
Builder interface. Specify how to create the initial instruction itself.
void updateDbgValueForSpill(MachineInstr &Orig, int FrameIndex, Register Reg)
Update a DBG_VALUE whose value has been spilled to FrameIndex.
uint16_t MCPhysReg
An unsigned integer type large enough to represent all physical registers, but not necessarily virtua...
Printable printRegUnit(unsigned Unit, const TargetRegisterInfo *TRI)
Create Printable object to print register units on a raw_ostream.
PreservedAnalyses getMachineFunctionPassPreservedAnalyses()
Returns the minimum set of Analyses that all machine function passes must preserve.
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
auto reverse(ContainerTy &&C)
void sort(IteratorTy Start, IteratorTy End)
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
MachineInstr * buildDbgValueForSpill(MachineBasicBlock &BB, MachineBasicBlock::iterator I, const MachineInstr &Orig, int FrameIndex, Register SpillReg)
Clone a DBG_VALUE whose value has been spilled to FrameIndex.
Printable printReg(Register Reg, const TargetRegisterInfo *TRI=nullptr, unsigned SubIdx=0, const MachineRegisterInfo *MRI=nullptr)
Prints virtual and physical registers with or without a TRI instance.
This struct is a compact representation of a valid (non-zero power of two) alignment.
Pair of physical register and lane mask.
RegAllocFilterFunc Filter
A MapVector that performs no allocations if smaller than a certain size.