52 #define DEBUG_TYPE "regalloc"
54 STATISTIC(NumGlobalSplits,
"Number of split global live ranges");
55 STATISTIC(NumLocalSplits,
"Number of split local live ranges");
56 STATISTIC(NumEvicted,
"Number of interferences evicted");
60 cl::desc(
"Spill mode for splitting live ranges"),
69 cl::desc(
"Last chance recoloring max depth"),
74 cl::desc(
"Last chance recoloring maximum number of considered"
75 " interference at a time"),
80 cl::desc(
"Exhaustive Search for registers bypassing the depth "
81 "and interference cutoffs of last chance recoloring"));
85 cl::desc(
"Local reassignment can yield better allocation decisions, but "
86 "may be compile time intensive"),
92 cl::desc(
"Cost for first time use of callee-saved register."),
103 typedef std::priority_queue<std::pair<unsigned, unsigned> > PQueue;
125 std::unique_ptr<Spiller> SpillerInstance;
127 unsigned NextCascade;
142 enum LiveRangeStage {
182 static const char *
const StageName[];
187 LiveRangeStage Stage;
192 RegInfo() : Stage(RS_New), Cascade(0) {}
197 LiveRangeStage getStage(
const LiveInterval &VirtReg)
const {
198 return ExtraRegInfo[VirtReg.
reg].Stage;
201 void setStage(
const LiveInterval &VirtReg, LiveRangeStage Stage) {
202 ExtraRegInfo.
resize(MRI->getNumVirtRegs());
203 ExtraRegInfo[VirtReg.
reg].Stage = Stage;
206 template<
typename Iterator>
207 void setStage(Iterator Begin, Iterator End, LiveRangeStage NewStage) {
208 ExtraRegInfo.resize(MRI->getNumVirtRegs());
209 for (;Begin != End; ++Begin) {
210 unsigned Reg = *Begin;
211 if (ExtraRegInfo[Reg].Stage == RS_New)
212 ExtraRegInfo[
Reg].Stage = NewStage;
217 struct EvictionCost {
218 unsigned BrokenHints;
221 EvictionCost(): BrokenHints(0), MaxWeight(0) {}
223 bool isMax()
const {
return BrokenHints == ~0u; }
225 void setMax() { BrokenHints = ~0u; }
227 void setBrokenHints(
unsigned NHints) { BrokenHints = NHints; }
229 bool operator<(
const EvictionCost &O)
const {
230 return std::tie(BrokenHints, MaxWeight) <
231 std::tie(O.BrokenHints, O.MaxWeight);
236 std::unique_ptr<SplitAnalysis> SA;
237 std::unique_ptr<SplitEditor> SE;
246 struct GlobalSplitCandidate {
263 Intf.setPhysReg(Cache, Reg);
265 ActiveBlocks.clear();
271 for (
int i = LiveBundles.find_first(); i >= 0;
272 i = LiveBundles.find_next(i))
273 if (B[i] == NoCand) {
286 enum :
unsigned { NoCand = ~0u };
297 bool EnableLocalReassign;
306 const char* getPassName()
const override {
307 return "Greedy Register Allocator";
312 void releaseMemory()
override;
313 Spiller &spiller()
override {
return *SpillerInstance; }
326 SmallVirtRegSet &,
unsigned = 0);
328 bool LRE_CanEraseVirtReg(
unsigned)
override;
329 void LRE_WillShrinkVirtReg(
unsigned)
override;
330 void LRE_DidCloneVirtReg(
unsigned,
unsigned)
override;
337 void growRegion(GlobalSplitCandidate &Cand);
339 bool calcCompactRegion(GlobalSplitCandidate&);
342 unsigned canReassign(
LiveInterval &VirtReg,
unsigned PhysReg);
344 bool canEvictInterference(
LiveInterval&,
unsigned,
bool, EvictionCost&);
347 bool mayRecolorAllInterferences(
unsigned PhysReg,
LiveInterval &VirtReg,
348 SmallLISet &RecoloringCandidates,
349 const SmallVirtRegSet &FixedRegisters);
358 unsigned calculateRegionSplitCost(
LiveInterval &VirtReg,
361 unsigned &NumCands,
bool IgnoreCSR);
363 unsigned doRegionSplit(
LiveInterval &VirtReg,
unsigned BestCand,
369 unsigned PhysReg,
unsigned &CostPerUseLimit,
371 void initializeCSRCost();
382 SmallVirtRegSet &,
unsigned);
384 SmallVirtRegSet &,
unsigned);
386 void tryHintsRecoloring();
398 : Freq(Freq), Reg(Reg), PhysReg(PhysReg) {}
402 void collectHintInfo(
unsigned, HintsInfo &);
404 bool isUnusedCalleeSavedReg(
unsigned PhysReg)
const;
411 const char *
const RAGreedy::StageName[] = {
427 return new RAGreedy();
470 MachineFunctionPass::getAnalysisUsage(AU);
478 bool RAGreedy::LRE_CanEraseVirtReg(
unsigned VirtReg) {
479 if (VRM->hasPhys(VirtReg)) {
482 aboutToRemoveInterval(LI);
490 void RAGreedy::LRE_WillShrinkVirtReg(
unsigned VirtReg) {
491 if (!VRM->hasPhys(VirtReg))
500 void RAGreedy::LRE_DidCloneVirtReg(
unsigned New,
unsigned Old) {
502 if (!ExtraRegInfo.inBounds(Old))
509 ExtraRegInfo[Old].Stage = RS_Assign;
510 ExtraRegInfo.grow(New);
511 ExtraRegInfo[New] = ExtraRegInfo[Old];
514 void RAGreedy::releaseMemory() {
515 SpillerInstance.reset();
516 ExtraRegInfo.clear();
520 void RAGreedy::enqueue(
LiveInterval *LI) { enqueue(Queue, LI); }
522 void RAGreedy::enqueue(PQueue &CurQueue,
LiveInterval *LI) {
525 const unsigned Size = LI->
getSize();
526 const unsigned Reg = LI->
reg;
527 assert(TargetRegisterInfo::isVirtualRegister(Reg) &&
528 "Can only enqueue virtual registers");
531 ExtraRegInfo.grow(Reg);
532 if (ExtraRegInfo[Reg].Stage == RS_New)
533 ExtraRegInfo[
Reg].Stage = RS_Assign;
535 if (ExtraRegInfo[Reg].Stage == RS_Split) {
542 bool ReverseLocal = TRI->reverseLocalAssignment();
544 bool ForceGlobal = !ReverseLocal &&
545 (Size / SlotIndex::InstrDist) > (2 * RC.
getNumRegs());
547 if (ExtraRegInfo[Reg].Stage == RS_Assign && !ForceGlobal && !LI->
empty() &&
548 LIS->intervalIsInOneMBB(*LI)) {
558 Prio = Indexes->getZeroIndex().getInstrDistance(LI->
endIndex());
565 Prio = (1u << 29) + Size;
571 if (VRM->hasKnownPreference(Reg))
576 CurQueue.push(std::make_pair(Prio, ~Reg));
579 LiveInterval *RAGreedy::dequeue() {
return dequeue(Queue); }
582 if (CurQueue.empty())
584 LiveInterval *LI = &LIS->getInterval(~CurQueue.top().second);
600 while ((PhysReg = Order.
next()))
601 if (!
Matrix->checkInterference(VirtReg, PhysReg))
603 if (!PhysReg || Order.
isHint())
610 if (
unsigned Hint = MRI->getSimpleHint(VirtReg.
reg))
613 EvictionCost MaxCost;
614 MaxCost.setBrokenHints(1);
615 if (canEvictInterference(VirtReg,
Hint,
true, MaxCost)) {
616 evictInterference(VirtReg,
Hint, NewVRegs);
622 unsigned Cost = TRI->getCostPerUse(PhysReg);
630 unsigned CheapReg = tryEvict(VirtReg, Order, NewVRegs, Cost);
631 return CheapReg ? CheapReg : PhysReg;
639 unsigned RAGreedy::canReassign(
LiveInterval &VirtReg,
unsigned PrevReg) {
642 while ((PhysReg = Order.
next())) {
643 if (PhysReg == PrevReg)
647 for (; Units.isValid(); ++Units) {
650 if (subQ.checkInterference())
654 if (!Units.isValid())
658 DEBUG(
dbgs() <<
"can reassign: " << VirtReg <<
" from "
679 bool CanSplit = getStage(B) < RS_Spill;
683 if (CanSplit && IsHint && !BreaksHint)
702 bool RAGreedy::canEvictInterference(
LiveInterval &VirtReg,
unsigned PhysReg,
703 bool IsHint, EvictionCost &MaxCost) {
705 if (
Matrix->checkInterference(VirtReg, PhysReg) > LiveRegMatrix::IK_VirtReg)
708 bool IsLocal = LIS->intervalIsInOneMBB(VirtReg);
717 unsigned Cascade = ExtraRegInfo[VirtReg.
reg].Cascade;
719 Cascade = NextCascade;
731 assert(TargetRegisterInfo::isVirtualRegister(Intf->
reg) &&
732 "Only expecting virtual register interference from query");
734 if (getStage(*Intf) == RS_Done)
744 RegClassInfo.getNumAllocatableRegs(MRI->getRegClass(VirtReg.
reg)) <
745 RegClassInfo.getNumAllocatableRegs(MRI->getRegClass(Intf->
reg)));
747 unsigned IntfCascade = ExtraRegInfo[Intf->
reg].Cascade;
748 if (Cascade <= IntfCascade) {
753 Cost.BrokenHints += 10;
756 bool BreaksHint = VRM->hasPreferredPhys(Intf->
reg);
758 Cost.BrokenHints += BreaksHint;
759 Cost.MaxWeight = std::max(Cost.MaxWeight, Intf->
weight);
761 if (!(Cost < MaxCost))
766 if (!shouldEvict(VirtReg, IsHint, *Intf, BreaksHint))
771 if (!MaxCost.isMax() && IsLocal && LIS->intervalIsInOneMBB(*Intf) &&
772 (!EnableLocalReassign || !canReassign(*Intf, PhysReg))) {
784 void RAGreedy::evictInterference(
LiveInterval &VirtReg,
unsigned PhysReg,
789 unsigned Cascade = ExtraRegInfo[VirtReg.
reg].Cascade;
791 Cascade = ExtraRegInfo[VirtReg.
reg].Cascade = NextCascade++;
794 <<
" interference: Cascade " << Cascade <<
'\n');
802 Intfs.
append(IVR.begin(), IVR.end());
806 for (
unsigned i = 0, e = Intfs.
size(); i != e; ++i) {
809 if (!VRM->hasPhys(Intf->
reg))
812 assert((ExtraRegInfo[Intf->
reg].Cascade < Cascade ||
814 "Cannot decrease cascade number, illegal eviction");
815 ExtraRegInfo[Intf->
reg].Cascade = Cascade;
823 bool RAGreedy::isUnusedCalleeSavedReg(
unsigned PhysReg)
const {
824 unsigned CSR = RegClassInfo.getLastCalleeSavedAlias(PhysReg);
828 return !
Matrix->isPhysRegUsed(PhysReg);
838 unsigned CostPerUseLimit) {
842 EvictionCost BestCost;
844 unsigned BestPhys = 0;
849 if (CostPerUseLimit < ~0u) {
850 BestCost.BrokenHints = 0;
851 BestCost.MaxWeight = VirtReg.
weight;
855 unsigned MinCost = RegClassInfo.getMinCost(RC);
856 if (MinCost >= CostPerUseLimit) {
857 DEBUG(
dbgs() << TRI->getRegClassName(RC) <<
" minimum cost = " << MinCost
858 <<
", no cheaper registers to be found.\n");
864 if (TRI->getCostPerUse(Order.
getOrder().
back()) >= CostPerUseLimit) {
865 OrderLimit = RegClassInfo.getLastCostChange(RC);
866 DEBUG(
dbgs() <<
"Only trying the first " << OrderLimit <<
" regs.\n");
871 while (
unsigned PhysReg = Order.
next(OrderLimit)) {
872 if (TRI->getCostPerUse(PhysReg) >= CostPerUseLimit)
876 if (CostPerUseLimit == 1 && isUnusedCalleeSavedReg(PhysReg)) {
878 <<
PrintReg(RegClassInfo.getLastCalleeSavedAlias(PhysReg), TRI)
883 if (!canEvictInterference(VirtReg, PhysReg,
false, BestCost))
897 evictInterference(VirtReg, BestPhys, NewVRegs);
916 SplitConstraints.resize(UseBlocks.
size());
918 for (
unsigned i = 0; i != UseBlocks.
size(); ++i) {
924 BC.
Entry = BI.
LiveIn ? SpillPlacement::PrefReg : SpillPlacement::DontCare;
925 BC.
Exit = BI.
LiveOut ? SpillPlacement::PrefReg : SpillPlacement::DontCare;
936 if (Intf.
first() <= Indexes->getMBBStartIdx(BC.
Number))
937 BC.
Entry = SpillPlacement::MustSpill, ++Ins;
939 BC.
Entry = SpillPlacement::PrefSpill, ++Ins;
946 if (Intf.
last() >= SA->getLastSplitPoint(BC.
Number))
947 BC.
Exit = SpillPlacement::MustSpill, ++Ins;
949 BC.
Exit = SpillPlacement::PrefSpill, ++Ins;
956 StaticCost += SpillPlacer->getBlockFrequency(BC.
Number);
962 SpillPlacer->addConstraints(SplitConstraints);
963 return SpillPlacer->scanActiveBundles();
971 const unsigned GroupSize = 8;
973 unsigned TBS[GroupSize];
974 unsigned B = 0,
T = 0;
976 for (
unsigned i = 0; i != Blocks.
size(); ++i) {
977 unsigned Number = Blocks[i];
981 assert(
T < GroupSize &&
"Array overflow");
983 if (++
T == GroupSize) {
990 assert(B < GroupSize &&
"Array overflow");
994 if (Intf.
first() <= Indexes->getMBBStartIdx(Number))
995 BCS[B].Entry = SpillPlacement::MustSpill;
997 BCS[B].
Entry = SpillPlacement::PrefSpill;
1000 if (Intf.
last() >= SA->getLastSplitPoint(Number))
1001 BCS[B].Exit = SpillPlacement::MustSpill;
1003 BCS[B].
Exit = SpillPlacement::PrefSpill;
1005 if (++B == GroupSize) {
1015 void RAGreedy::growRegion(GlobalSplitCandidate &Cand) {
1017 BitVector Todo = SA->getThroughBlocks();
1019 unsigned AddedTo = 0;
1021 unsigned Visited = 0;
1027 for (
int i = 0, e = NewBundles.
size(); i != e; ++i) {
1028 unsigned Bundle = NewBundles[i];
1034 if (!Todo.
test(Block))
1045 if (ActiveBlocks.
size() == AddedTo)
1050 auto NewBlocks =
makeArrayRef(ActiveBlocks).slice(AddedTo);
1052 addThroughConstraints(Cand.Intf, NewBlocks);
1056 SpillPlacer->addPrefSpill(NewBlocks,
true);
1057 AddedTo = ActiveBlocks.
size();
1060 SpillPlacer->iterate();
1072 bool RAGreedy::calcCompactRegion(GlobalSplitCandidate &Cand) {
1074 if (!SA->getNumThroughBlocks())
1078 Cand.reset(IntfCache, 0);
1080 DEBUG(
dbgs() <<
"Compact region bundles");
1084 SpillPlacer->prepare(Cand.LiveBundles);
1088 if (!addSplitConstraints(Cand.Intf, Cost)) {
1094 SpillPlacer->finish();
1096 if (!Cand.LiveBundles.any()) {
1102 for (
int i = Cand.LiveBundles.find_first(); i>=0;
1103 i = Cand.LiveBundles.find_next(i))
1104 dbgs() <<
" EB#" << i;
1115 for (
unsigned i = 0; i != UseBlocks.
size(); ++i) {
1119 Cost += SpillPlacer->getBlockFrequency(Number);
1123 Cost += SpillPlacer->getBlockFrequency(Number);
1132 BlockFrequency RAGreedy::calcGlobalSplitCost(GlobalSplitCandidate &Cand) {
1134 const BitVector &LiveBundles = Cand.LiveBundles;
1136 for (
unsigned i = 0; i != UseBlocks.
size(); ++i) {
1139 bool RegIn = LiveBundles[Bundles->getBundle(BC.
Number, 0)];
1140 bool RegOut = LiveBundles[Bundles->getBundle(BC.
Number, 1)];
1144 Ins += RegIn != (BC.
Entry == SpillPlacement::PrefReg);
1146 Ins += RegOut != (BC.
Exit == SpillPlacement::PrefReg);
1148 GlobalCost += SpillPlacer->getBlockFrequency(BC.
Number);
1151 for (
unsigned i = 0, e = Cand.ActiveBlocks.size(); i != e; ++i) {
1152 unsigned Number = Cand.ActiveBlocks[i];
1153 bool RegIn = LiveBundles[Bundles->getBundle(Number, 0)];
1154 bool RegOut = LiveBundles[Bundles->getBundle(Number, 1)];
1155 if (!RegIn && !RegOut)
1157 if (RegIn && RegOut) {
1159 Cand.Intf.moveToBlock(Number);
1160 if (Cand.Intf.hasInterference()) {
1161 GlobalCost += SpillPlacer->getBlockFrequency(Number);
1162 GlobalCost += SpillPlacer->getBlockFrequency(Number);
1167 GlobalCost += SpillPlacer->getBlockFrequency(Number);
1188 const unsigned NumGlobalIntvs = LREdit.
size();
1189 DEBUG(
dbgs() <<
"splitAroundRegion with " << NumGlobalIntvs <<
" globals.\n");
1190 assert(NumGlobalIntvs &&
"No global intervals configured");
1195 unsigned Reg = SA->getParent().reg;
1196 bool SingleInstrs = RegClassInfo.isProperSubClass(MRI->getRegClass(Reg));
1200 for (
unsigned i = 0; i != UseBlocks.
size(); ++i) {
1203 unsigned IntvIn = 0, IntvOut = 0;
1206 unsigned CandIn = BundleCand[Bundles->getBundle(Number, 0)];
1207 if (CandIn != NoCand) {
1208 GlobalSplitCandidate &Cand = GlobalCand[CandIn];
1209 IntvIn = Cand.IntvIdx;
1210 Cand.Intf.moveToBlock(Number);
1211 IntfIn = Cand.Intf.first();
1215 unsigned CandOut = BundleCand[Bundles->getBundle(Number, 1)];
1216 if (CandOut != NoCand) {
1217 GlobalSplitCandidate &Cand = GlobalCand[CandOut];
1218 IntvOut = Cand.IntvIdx;
1219 Cand.Intf.moveToBlock(Number);
1220 IntfOut = Cand.Intf.last();
1225 if (!IntvIn && !IntvOut) {
1227 if (SA->shouldSplitSingleBlock(BI, SingleInstrs))
1228 SE->splitSingleBlock(BI);
1232 if (IntvIn && IntvOut)
1233 SE->splitLiveThroughBlock(Number, IntvIn, IntfIn, IntvOut, IntfOut);
1235 SE->splitRegInBlock(BI, IntvIn, IntfIn);
1237 SE->splitRegOutBlock(BI, IntvOut, IntfOut);
1243 BitVector Todo = SA->getThroughBlocks();
1244 for (
unsigned c = 0; c != UsedCands.
size(); ++c) {
1246 for (
unsigned i = 0, e = Blocks.
size(); i != e; ++i) {
1247 unsigned Number = Blocks[i];
1248 if (!Todo.
test(Number))
1252 unsigned IntvIn = 0, IntvOut = 0;
1255 unsigned CandIn = BundleCand[Bundles->getBundle(Number, 0)];
1256 if (CandIn != NoCand) {
1257 GlobalSplitCandidate &Cand = GlobalCand[CandIn];
1258 IntvIn = Cand.IntvIdx;
1259 Cand.Intf.moveToBlock(Number);
1260 IntfIn = Cand.Intf.first();
1263 unsigned CandOut = BundleCand[Bundles->getBundle(Number, 1)];
1264 if (CandOut != NoCand) {
1265 GlobalSplitCandidate &Cand = GlobalCand[CandOut];
1266 IntvOut = Cand.IntvIdx;
1267 Cand.Intf.moveToBlock(Number);
1268 IntfOut = Cand.Intf.last();
1270 if (!IntvIn && !IntvOut)
1272 SE->splitLiveThroughBlock(Number, IntvIn, IntfIn, IntvOut, IntfOut);
1279 SE->finish(&IntvMap);
1280 DebugVars->splitRegister(Reg, LREdit.
regs(), *LIS);
1282 ExtraRegInfo.
resize(MRI->getNumVirtRegs());
1283 unsigned OrigBlocks = SA->getNumLiveBlocks();
1290 for (
unsigned i = 0, e = LREdit.
size(); i != e; ++i) {
1294 if (getStage(Reg) != RS_New)
1299 if (IntvMap[i] == 0) {
1300 setStage(Reg, RS_Spill);
1306 if (IntvMap[i] < NumGlobalIntvs) {
1307 if (SA->countLiveBlocks(&Reg) >= OrigBlocks) {
1308 DEBUG(
dbgs() <<
"Main interval covers the same " << OrigBlocks
1309 <<
" blocks as original.\n");
1311 setStage(Reg, RS_Split2);
1321 MF->verify(
this,
"After splitting live range around region");
1326 unsigned NumCands = 0;
1330 bool HasCompact = calcCompactRegion(GlobalCand.front());
1334 BestCost = BlockFrequency::getMaxFrequency();
1338 BestCost = calcSpillCost();
1339 DEBUG(
dbgs() <<
"Cost of isolating all blocks = ";
1340 MBFI->printBlockFreq(
dbgs(), BestCost) <<
'\n');
1344 calculateRegionSplitCost(VirtReg, Order, BestCost, NumCands,
1348 if (!HasCompact && BestCand == NoCand)
1351 return doRegionSplit(VirtReg, BestCand, HasCompact, NewVRegs);
1354 unsigned RAGreedy::calculateRegionSplitCost(
LiveInterval &VirtReg,
1359 unsigned BestCand = NoCand;
1361 while (
unsigned PhysReg = Order.
next()) {
1362 if (IgnoreCSR && isUnusedCalleeSavedReg(PhysReg))
1367 if (NumCands == IntfCache.getMaxCursors()) {
1368 unsigned WorstCount = ~0u;
1370 for (
unsigned i = 0; i != NumCands; ++i) {
1371 if (i == BestCand || !GlobalCand[i].PhysReg)
1373 unsigned Count = GlobalCand[i].LiveBundles.count();
1374 if (Count < WorstCount)
1375 Worst = i, WorstCount = Count;
1378 GlobalCand[Worst] = GlobalCand[NumCands];
1379 if (BestCand == NumCands)
1383 if (GlobalCand.size() <= NumCands)
1384 GlobalCand.resize(NumCands+1);
1385 GlobalSplitCandidate &Cand = GlobalCand[NumCands];
1386 Cand.reset(IntfCache, PhysReg);
1388 SpillPlacer->prepare(Cand.LiveBundles);
1390 if (!addSplitConstraints(Cand.Intf, Cost)) {
1395 MBFI->printBlockFreq(
dbgs(), Cost));
1396 if (Cost >= BestCost) {
1398 if (BestCand == NoCand)
1399 dbgs() <<
" worse than no bundles\n";
1401 dbgs() <<
" worse than "
1402 <<
PrintReg(GlobalCand[BestCand].PhysReg, TRI) <<
'\n';
1408 SpillPlacer->finish();
1411 if (!Cand.LiveBundles.any()) {
1416 Cost += calcGlobalSplitCost(Cand);
1418 dbgs() <<
", total = "; MBFI->printBlockFreq(
dbgs(), Cost)
1420 for (
int i = Cand.LiveBundles.find_first(); i>=0;
1421 i = Cand.LiveBundles.find_next(i))
1422 dbgs() <<
" EB#" << i;
1425 if (Cost < BestCost) {
1426 BestCand = NumCands;
1434 unsigned RAGreedy::doRegionSplit(
LiveInterval &VirtReg,
unsigned BestCand,
1439 LiveRangeEdit LREdit(&VirtReg, NewVRegs, *MF, *LIS, VRM,
this);
1443 BundleCand.assign(Bundles->getNumBundles(), NoCand);
1446 if (BestCand != NoCand) {
1447 GlobalSplitCandidate &Cand = GlobalCand[BestCand];
1448 if (
unsigned B = Cand.getBundles(BundleCand, BestCand)) {
1450 Cand.IntvIdx = SE->openIntv();
1452 << B <<
" bundles, intv " << Cand.IntvIdx <<
".\n");
1459 GlobalSplitCandidate &Cand = GlobalCand.front();
1460 assert(!Cand.PhysReg &&
"Compact region has no physreg");
1461 if (
unsigned B = Cand.getBundles(BundleCand, 0)) {
1463 Cand.IntvIdx = SE->openIntv();
1464 DEBUG(
dbgs() <<
"Split for compact region in " << B <<
" bundles, intv "
1465 << Cand.IntvIdx <<
".\n");
1470 splitAroundRegion(LREdit, UsedCands);
1484 assert(&SA->getParent() == &VirtReg &&
"Live range wasn't analyzed");
1485 unsigned Reg = VirtReg.
reg;
1486 bool SingleInstrs = RegClassInfo.isProperSubClass(MRI->getRegClass(Reg));
1487 LiveRangeEdit LREdit(&VirtReg, NewVRegs, *MF, *LIS, VRM,
this);
1490 for (
unsigned i = 0; i != UseBlocks.
size(); ++i) {
1492 if (SA->shouldSplitSingleBlock(BI, SingleInstrs))
1493 SE->splitSingleBlock(BI);
1501 SE->finish(&IntvMap);
1504 DebugVars->splitRegister(Reg, LREdit.
regs(), *LIS);
1506 ExtraRegInfo.
resize(MRI->getNumVirtRegs());
1510 for (
unsigned i = 0, e = LREdit.
size(); i != e; ++i) {
1512 if (getStage(LI) == RS_New && IntvMap[i] == 0)
1513 setStage(LI, RS_Spill);
1517 MF->
verify(
this,
"After splitting live range around basic blocks");
1532 assert(SuperRC &&
"Invalid register class");
1554 if (!RegClassInfo.isProperSubClass(CurRC))
1559 LiveRangeEdit LREdit(&VirtReg, NewVRegs, *MF, *LIS, VRM,
this);
1560 SE->reset(LREdit, SplitEditor::SM_Size);
1563 if (Uses.
size() <= 1)
1566 DEBUG(
dbgs() <<
"Split around " << Uses.
size() <<
" individual instrs.\n");
1569 TRI->getLargestLegalSuperClass(CurRC, *MF);
1570 unsigned SuperRCNumAllocatableRegs = RCI.getNumAllocatableRegs(SuperRC);
1575 for (
unsigned i = 0; i != Uses.
size(); ++i) {
1576 if (
const MachineInstr *
MI = Indexes->getInstructionFromIndex(Uses[i]))
1577 if (
MI->isFullCopy() ||
1578 SuperRCNumAllocatableRegs ==
1581 DEBUG(
dbgs() <<
" skip:\t" << Uses[i] <<
'\t' << *
MI);
1585 SlotIndex SegStart = SE->enterIntvBefore(Uses[i]);
1586 SlotIndex SegStop = SE->leaveIntvAfter(Uses[i]);
1587 SE->useIntv(SegStart, SegStop);
1590 if (LREdit.
empty()) {
1591 DEBUG(
dbgs() <<
"All uses were copies.\n");
1596 SE->finish(&IntvMap);
1597 DebugVars->splitRegister(VirtReg.
reg, LREdit.
regs(), *LIS);
1598 ExtraRegInfo.
resize(MRI->getNumVirtRegs());
1601 setStage(LREdit.
begin(), LREdit.
end(), RS_Spill);
1616 void RAGreedy::calcGapWeights(
unsigned PhysReg,
1618 assert(SA->getUseBlocks().size() == 1 &&
"Not a local interval");
1621 const unsigned NumGaps = Uses.
size()-1;
1625 BI.LiveIn ? BI.FirstInstr.
getBaseIndex() : BI.FirstInstr;
1629 GapWeight.
assign(NumGaps, 0.0f);
1633 if (!
Matrix->query(const_cast<LiveInterval&>(SA->getParent()), *Units)
1634 .checkInterference())
1645 Matrix->getLiveUnions()[*Units] .find(StartIdx);
1646 for (
unsigned Gap = 0; IntI.valid() && IntI.start() < StopIdx; ++IntI) {
1648 while (Uses[Gap+1].getBoundaryIndex() < IntI.start())
1649 if (++Gap == NumGaps)
1655 const float weight = IntI.value()->weight;
1656 for (; Gap != NumGaps; ++Gap) {
1657 GapWeight[Gap] = std::max(GapWeight[Gap], weight);
1658 if (Uses[Gap+1].getBaseIndex() >= IntI.stop())
1668 const LiveRange &LR = LIS->getRegUnit(*Units);
1673 for (
unsigned Gap = 0; I != E && I->start < StopIdx; ++
I) {
1674 while (Uses[Gap+1].getBoundaryIndex() < I->start)
1675 if (++Gap == NumGaps)
1680 for (; Gap != NumGaps; ++Gap) {
1682 if (Uses[Gap+1].getBaseIndex() >= I->end)
1696 assert(SA->getUseBlocks().size() == 1 &&
"Not a local interval");
1707 if (Uses.
size() <= 2)
1709 const unsigned NumGaps = Uses.
size()-1;
1712 dbgs() <<
"tryLocalSplit: ";
1713 for (
unsigned i = 0, e = Uses.
size(); i != e; ++i)
1714 dbgs() <<
' ' << Uses[i];
1721 if (
Matrix->checkRegMaskInterference(VirtReg)) {
1726 unsigned ri = std::lower_bound(RMS.
begin(), RMS.
end(),
1728 unsigned re = RMS.
size();
1729 for (
unsigned i = 0; i != NumGaps && ri != re; ++i) {
1731 assert(!SlotIndex::isEarlierInstr(RMS[ri], Uses[i]));
1732 if (SlotIndex::isEarlierInstr(Uses[i+1], RMS[ri]))
1736 if (SlotIndex::isSameInstr(Uses[i+1], RMS[ri]) && i+1 == NumGaps)
1738 DEBUG(
dbgs() <<
' ' << RMS[ri] <<
':' << Uses[i] <<
'-' << Uses[i+1]);
1739 RegMaskGaps.push_back(i);
1742 while (ri != re && SlotIndex::isEarlierInstr(RMS[ri], Uses[i+1]))
1766 bool ProgressRequired = getStage(VirtReg) >= RS_Split2;
1769 unsigned BestBefore = NumGaps;
1770 unsigned BestAfter = 0;
1773 const float blockFreq =
1774 SpillPlacer->getBlockFrequency(BI.MBB->getNumber()).getFrequency() *
1775 (1.0f / MBFI->getEntryFreq());
1779 while (
unsigned PhysReg = Order.
next()) {
1782 calcGapWeights(PhysReg, GapWeight);
1785 if (
Matrix->checkRegMaskInterference(VirtReg, PhysReg))
1786 for (
unsigned i = 0, e = RegMaskGaps.size(); i != e; ++i)
1793 unsigned SplitBefore = 0, SplitAfter = 1;
1797 float MaxGap = GapWeight[0];
1801 const bool LiveBefore = SplitBefore != 0 || BI.LiveIn;
1802 const bool LiveAfter = SplitAfter != NumGaps || BI.LiveOut;
1805 << Uses[SplitBefore] <<
'-' << Uses[SplitAfter]
1806 <<
" i=" << MaxGap);
1809 if (!LiveBefore && !LiveAfter) {
1817 unsigned NewGaps = LiveBefore + SplitAfter - SplitBefore + LiveAfter;
1820 bool Legal = !ProgressRequired || NewGaps < NumGaps;
1829 blockFreq * (NewGaps + 1),
1830 Uses[SplitBefore].distance(Uses[SplitAfter]) +
1831 (LiveBefore + LiveAfter) * SlotIndex::InstrDist,
1838 float Diff = EstWeight - MaxGap;
1839 if (Diff > BestDiff) {
1842 BestBefore = SplitBefore;
1843 BestAfter = SplitAfter;
1850 if (++SplitBefore < SplitAfter) {
1853 if (GapWeight[SplitBefore - 1] >= MaxGap) {
1854 MaxGap = GapWeight[SplitBefore];
1855 for (
unsigned i = SplitBefore + 1; i != SplitAfter; ++i)
1856 MaxGap = std::max(MaxGap, GapWeight[i]);
1864 if (SplitAfter >= NumGaps) {
1870 MaxGap = std::max(MaxGap, GapWeight[SplitAfter++]);
1875 if (BestBefore == NumGaps)
1878 DEBUG(
dbgs() <<
"Best local split range: " << Uses[BestBefore]
1879 <<
'-' << Uses[BestAfter] <<
", " << BestDiff
1880 <<
", " << (BestAfter - BestBefore + 1) <<
" instrs\n");
1882 LiveRangeEdit LREdit(&VirtReg, NewVRegs, *MF, *LIS, VRM,
this);
1886 SlotIndex SegStart = SE->enterIntvBefore(Uses[BestBefore]);
1887 SlotIndex SegStop = SE->leaveIntvAfter(Uses[BestAfter]);
1888 SE->useIntv(SegStart, SegStop);
1890 SE->finish(&IntvMap);
1891 DebugVars->splitRegister(VirtReg.
reg, LREdit.
regs(), *LIS);
1896 bool LiveBefore = BestBefore != 0 || BI.LiveIn;
1897 bool LiveAfter = BestAfter != NumGaps || BI.LiveOut;
1898 unsigned NewGaps = LiveBefore + BestAfter - BestBefore + LiveAfter;
1899 if (NewGaps >= NumGaps) {
1900 DEBUG(
dbgs() <<
"Tagging non-progress ranges: ");
1901 assert(!ProgressRequired &&
"Didn't make progress when it was required.");
1902 for (
unsigned i = 0, e = IntvMap.
size(); i != e; ++i)
1903 if (IntvMap[i] == 1) {
1904 setStage(LIS->getInterval(LREdit.
get(i)), RS_Split2);
1924 if (getStage(VirtReg) >= RS_Spill)
1928 if (LIS->intervalIsInOneMBB(VirtReg)) {
1930 SA->analyze(&VirtReg);
1931 unsigned PhysReg = tryLocalSplit(VirtReg, Order, NewVRegs);
1932 if (PhysReg || !NewVRegs.
empty())
1934 return tryInstructionSplit(VirtReg, Order, NewVRegs);
1939 SA->analyze(&VirtReg);
1945 if (SA->didRepairRange()) {
1947 Matrix->invalidateVirtRegs();
1948 if (
unsigned PhysReg = tryAssign(VirtReg, Order, NewVRegs))
1955 if (getStage(VirtReg) < RS_Split2) {
1956 unsigned PhysReg = tryRegionSplit(VirtReg, Order, NewVRegs);
1957 if (PhysReg || !NewVRegs.
empty())
1962 return tryBlockSplit(VirtReg, Order, NewVRegs);
1978 RAGreedy::mayRecolorAllInterferences(
unsigned PhysReg,
LiveInterval &VirtReg,
1979 SmallLISet &RecoloringCandidates,
1980 const SmallVirtRegSet &FixedRegisters) {
1989 DEBUG(
dbgs() <<
"Early abort: too many interferences.\n");
1990 CutOffInfo |= CO_Interf;
1997 if ((getStage(*Intf) == RS_Done &&
1998 MRI->getRegClass(Intf->
reg) == CurRC) ||
1999 FixedRegisters.count(Intf->
reg)) {
2000 DEBUG(
dbgs() <<
"Early abort: the inteference is not recolorable.\n");
2003 RecoloringCandidates.insert(Intf);
2048 unsigned RAGreedy::tryLastChanceRecoloring(
LiveInterval &VirtReg,
2051 SmallVirtRegSet &FixedRegisters,
2053 DEBUG(
dbgs() <<
"Try last chance recoloring for " << VirtReg <<
'\n');
2055 assert((getStage(VirtReg) >= RS_Done || !VirtReg.
isSpillable()) &&
2056 "Last chance recoloring should really be last chance");
2062 DEBUG(
dbgs() <<
"Abort because max depth has been reached.\n");
2063 CutOffInfo |= CO_Depth;
2068 SmallLISet RecoloringCandidates;
2077 while (
unsigned PhysReg = Order.
next()) {
2078 DEBUG(
dbgs() <<
"Try to assign: " << VirtReg <<
" to "
2079 <<
PrintReg(PhysReg, TRI) <<
'\n');
2080 RecoloringCandidates.clear();
2081 VirtRegToPhysReg.
clear();
2084 if (
Matrix->checkInterference(VirtReg, PhysReg) >
2085 LiveRegMatrix::IK_VirtReg) {
2086 DEBUG(
dbgs() <<
"Some inteferences are not with virtual registers.\n");
2093 if (!mayRecolorAllInterferences(PhysReg, VirtReg, RecoloringCandidates,
2095 DEBUG(
dbgs() <<
"Some inteferences cannot be recolored.\n");
2102 PQueue RecoloringQueue;
2103 for (SmallLISet::iterator It = RecoloringCandidates.begin(),
2104 EndIt = RecoloringCandidates.end();
2105 It != EndIt; ++It) {
2106 unsigned ItVirtReg = (*It)->reg;
2107 enqueue(RecoloringQueue, *It);
2108 assert(VRM->hasPhys(ItVirtReg) &&
2109 "Interferences are supposed to be with allocated vairables");
2112 VirtRegToPhysReg[ItVirtReg] = VRM->getPhys(ItVirtReg);
2120 Matrix->assign(VirtReg, PhysReg);
2125 SmallVirtRegSet SaveFixedRegisters(FixedRegisters);
2126 if (tryRecoloringCandidates(RecoloringQueue, NewVRegs, FixedRegisters,
2130 Matrix->unassign(VirtReg);
2134 DEBUG(
dbgs() <<
"Fail to assign: " << VirtReg <<
" to "
2135 <<
PrintReg(PhysReg, TRI) <<
'\n');
2138 FixedRegisters = SaveFixedRegisters;
2139 Matrix->unassign(VirtReg);
2141 for (SmallLISet::iterator It = RecoloringCandidates.begin(),
2142 EndIt = RecoloringCandidates.end();
2143 It != EndIt; ++It) {
2144 unsigned ItVirtReg = (*It)->reg;
2145 if (VRM->hasPhys(ItVirtReg))
2147 unsigned ItPhysReg = VirtRegToPhysReg[ItVirtReg];
2148 Matrix->assign(**It, ItPhysReg);
2164 bool RAGreedy::tryRecoloringCandidates(PQueue &RecoloringQueue,
2166 SmallVirtRegSet &FixedRegisters,
2168 while (!RecoloringQueue.empty()) {
2170 DEBUG(
dbgs() <<
"Try to recolor: " << *LI <<
'\n');
2172 PhysReg = selectOrSplitImpl(*LI, NewVRegs, FixedRegisters, Depth + 1);
2173 if (PhysReg == ~0u || !PhysReg)
2176 <<
" succeeded with: " <<
PrintReg(PhysReg, TRI) <<
'\n');
2177 Matrix->assign(*LI, PhysReg);
2178 FixedRegisters.insert(LI->
reg);
2187 unsigned RAGreedy::selectOrSplit(
LiveInterval &VirtReg,
2189 CutOffInfo = CO_None;
2190 LLVMContext &Ctx = MF->getFunction()->getContext();
2191 SmallVirtRegSet FixedRegisters;
2192 unsigned Reg = selectOrSplitImpl(VirtReg, NewVRegs, FixedRegisters);
2193 if (Reg == ~0U && (CutOffInfo != CO_None)) {
2194 uint8_t CutOffEncountered = CutOffInfo & (CO_Depth | CO_Interf);
2195 if (CutOffEncountered == CO_Depth)
2196 Ctx.
emitError(
"register allocation failed: maximum depth for recoloring "
2197 "reached. Use -fexhaustive-register-search to skip "
2199 else if (CutOffEncountered == CO_Interf)
2200 Ctx.
emitError(
"register allocation failed: maximum interference for "
2201 "recoloring reached. Use -fexhaustive-register-search "
2203 else if (CutOffEncountered == (CO_Depth | CO_Interf))
2204 Ctx.
emitError(
"register allocation failed: maximum interference and "
2205 "depth for recoloring reached. Use "
2206 "-fexhaustive-register-search to skip cutoffs");
2217 unsigned RAGreedy::tryAssignCSRFirstTime(
LiveInterval &VirtReg,
2220 unsigned &CostPerUseLimit,
2222 if (getStage(VirtReg) == RS_Spill && VirtReg.
isSpillable()) {
2225 SA->analyze(&VirtReg);
2226 if (calcSpillCost() >= CSRCost)
2231 CostPerUseLimit = 1;
2234 if (getStage(VirtReg) < RS_Split) {
2237 SA->analyze(&VirtReg);
2238 unsigned NumCands = 0;
2240 unsigned BestCand = calculateRegionSplitCost(VirtReg, Order, BestCost,
2242 if (BestCand == NoCand)
2247 doRegionSplit(VirtReg, BestCand,
false, NewVRegs);
2253 void RAGreedy::aboutToRemoveInterval(
LiveInterval &LI) {
2255 SetOfBrokenHints.remove(&LI);
2258 void RAGreedy::initializeCSRCost() {
2263 if (!CSRCost.getFrequency())
2267 uint64_t ActualEntry = MBFI->getEntryFreq();
2272 uint64_t FixedEntry = 1 << 14;
2273 if (ActualEntry < FixedEntry)
2275 else if (ActualEntry <= UINT32_MAX)
2280 CSRCost = CSRCost.getFrequency() * (ActualEntry / FixedEntry);
2286 void RAGreedy::collectHintInfo(
unsigned Reg, HintsInfo &Out) {
2287 for (
const MachineInstr &Instr : MRI->reg_nodbg_instructions(Reg)) {
2288 if (!Instr.isFullCopy())
2291 unsigned OtherReg = Instr.getOperand(0).getReg();
2292 if (OtherReg == Reg) {
2293 OtherReg = Instr.getOperand(1).getReg();
2294 if (OtherReg == Reg)
2298 unsigned OtherPhysReg = TargetRegisterInfo::isPhysicalRegister(OtherReg)
2300 : VRM->getPhys(OtherReg);
2302 Out.push_back(HintInfo(MBFI->getBlockFreq(Instr.getParent()), OtherReg,
2313 for (
const HintInfo &Info : List) {
2314 if (Info.PhysReg != PhysReg)
2328 void RAGreedy::tryHintRecoloring(
LiveInterval &VirtReg) {
2335 unsigned Reg = VirtReg.
reg;
2336 unsigned PhysReg = VRM->getPhys(Reg);
2343 <<
PrintReg(PhysReg, TRI) <<
")\n");
2349 if (TargetRegisterInfo::isPhysicalRegister(Reg))
2352 assert(VRM->hasPhys(Reg) &&
"We have unallocated variable!!");
2357 unsigned CurrPhys = VRM->getPhys(Reg);
2360 if (CurrPhys != PhysReg && (!MRI->getRegClass(Reg)->contains(PhysReg) ||
2361 Matrix->checkInterference(LI, PhysReg)))
2365 <<
") is recolorable.\n");
2369 collectHintInfo(Reg, Info);
2372 if (CurrPhys != PhysReg) {
2373 DEBUG(
dbgs() <<
"Checking profitability:\n");
2374 BlockFrequency OldCopiesCost = getBrokenHintFreq(Info, CurrPhys);
2377 <<
"\nNew Cost: " << NewCopiesCost.
getFrequency() <<
'\n');
2378 if (OldCopiesCost < NewCopiesCost) {
2379 DEBUG(
dbgs() <<
"=> Not profitable.\n");
2388 Matrix->assign(LI, PhysReg);
2392 for (
const HintInfo &
HI : Info) {
2396 }
while (!RecoloringCandidates.
empty());
2435 void RAGreedy::tryHintsRecoloring() {
2437 assert(TargetRegisterInfo::isVirtualRegister(LI->reg) &&
2438 "Recoloring is possible only for virtual registers");
2441 if (!VRM->hasPhys(LI->reg))
2443 tryHintRecoloring(*LI);
2447 unsigned RAGreedy::selectOrSplitImpl(
LiveInterval &VirtReg,
2449 SmallVirtRegSet &FixedRegisters,
2451 unsigned CostPerUseLimit = ~0u;
2454 if (
unsigned PhysReg = tryAssign(VirtReg, Order, NewVRegs)) {
2458 if (CSRCost.getFrequency() && isUnusedCalleeSavedReg(PhysReg) &&
2460 unsigned CSRReg = tryAssignCSRFirstTime(VirtReg, Order, PhysReg,
2461 CostPerUseLimit, NewVRegs);
2462 if (CSRReg || !NewVRegs.
empty())
2470 LiveRangeStage Stage = getStage(VirtReg);
2472 <<
" Cascade " << ExtraRegInfo[VirtReg.
reg].Cascade <<
'\n');
2477 if (Stage != RS_Split)
2478 if (
unsigned PhysReg =
2479 tryEvict(VirtReg, Order, NewVRegs, CostPerUseLimit)) {
2480 unsigned Hint = MRI->getSimpleHint(VirtReg.
reg);
2486 if (Hint && Hint != PhysReg)
2487 SetOfBrokenHints.insert(&VirtReg);
2491 assert(NewVRegs.
empty() &&
"Cannot append to existing NewVRegs");
2496 if (Stage < RS_Split) {
2497 setStage(VirtReg, RS_Split);
2498 DEBUG(
dbgs() <<
"wait for second round\n");
2506 return tryLastChanceRecoloring(VirtReg, Order, NewVRegs, FixedRegisters,
2510 unsigned PhysReg = trySplit(VirtReg, Order, NewVRegs);
2511 if (PhysReg || !NewVRegs.
empty())
2516 LiveRangeEdit LRE(&VirtReg, NewVRegs, *MF, *LIS, VRM,
this);
2517 spiller().spill(LRE);
2518 setStage(NewVRegs.
begin(), NewVRegs.
end(), RS_Done);
2521 MF->verify(
this,
"After spilling");
2529 DEBUG(
dbgs() <<
"********** GREEDY REGISTER ALLOCATION **********\n"
2530 <<
"********** Function: " << mf.
getName() <<
'\n');
2534 TII = MF->getSubtarget().getInstrInfo();
2535 RCI.runOnMachineFunction(mf);
2538 MF->getSubtarget().enableRALocalReassignment(
2539 MF->getTarget().getOptLevel());
2542 MF->verify(
this,
"Before greedy register allocator");
2545 getAnalysis<LiveIntervals>(),
2546 getAnalysis<LiveRegMatrix>());
2547 Indexes = &getAnalysis<SlotIndexes>();
2548 MBFI = &getAnalysis<MachineBlockFrequencyInfo>();
2549 DomTree = &getAnalysis<MachineDominatorTree>();
2551 Loops = &getAnalysis<MachineLoopInfo>();
2552 Bundles = &getAnalysis<EdgeBundles>();
2553 SpillPlacer = &getAnalysis<SpillPlacement>();
2554 DebugVars = &getAnalysis<LiveDebugVariables>();
2556 initializeCSRCost();
2563 SE.reset(
new SplitEditor(*SA, *LIS, *VRM, *DomTree, *MBFI));
2564 ExtraRegInfo.clear();
2565 ExtraRegInfo.resize(MRI->getNumVirtRegs());
2567 IntfCache.init(MF,
Matrix->getLiveUnions(), Indexes, LIS, TRI);
2568 GlobalCand.resize(32);
2569 SetOfBrokenHints.clear();
2572 tryHintsRecoloring();
bool seenAllInterferences() const
ValuesClass< DataType > LLVM_END_WITH_NULL values(const char *Arg, DataType Val, const char *Desc,...)
void push_back(const T &Elt)
AnalysisUsage & addPreserved()
Add the specified Pass class to the set of analyses preserved by this pass.
static PassRegistry * getPassRegistry()
getPassRegistry - Access the global registry object, which is automatically initialized at applicatio...
void rewind()
Start over from the beginning.
STATISTIC(NumFunctions,"Total number of functions")
bool isValid() const
isValid - returns true if this iterator is not yet at the end.
SlotIndex getBoundaryIndex() const
Returns the boundary index for associated with this index.
int getInstrDistance(SlotIndex other) const
Return the scaled distance from this index to the given one, where all slots on the same instruction ...
int getNumber() const
getNumber - MachineBasicBlocks are uniquely numbered at the function level, unless they're not in a M...
SlotIndex getBaseIndex() const
Returns the base index for associated with this index.
const T & front() const
front - Get the first element.
LiveInterval - This class represents the liveness of a register, or stack slot.
void initializeLiveDebugVariablesPass(PassRegistry &)
ArrayRef< MCPhysReg > getOrder() const
Get the allocation order without reordered hints.
bool isSpillable() const
isSpillable - Can this interval be spilled?
SM_Speed - Overlap intervals to minimize the expected execution frequency of the inserted copies...
SlotIndex FirstDef
First non-phi valno->def, or SlotIndex().
MachineBlockFrequencyInfo pass uses BlockFrequencyInfoImpl implementation to estimate machine basic b...
unsigned next(unsigned Limit=0)
Return the next physical register in the allocation order, or 0.
unsigned getNumAllocatableRegs(const TargetRegisterClass *RC) const
getNumAllocatableRegs - Returns the number of actually allocatable registers in RC in the current fun...
uint64_t getFrequency() const
Returns the frequency as a fixpoint number scaled by the entry frequency.
void initializeMachineLoopInfoPass(PassRegistry &)
void calculateSpillWeightsAndHints(LiveIntervals &LIS, MachineFunction &MF, const MachineLoopInfo &MLI, const MachineBlockFrequencyInfo &MBFI, VirtRegAuxInfo::NormalizingFn norm=normalizeSpillWeight)
Compute spill weights and allocation hints for all virtual register live intervals.
Callback methods for LiveRangeEdit owners.
aarch64 collect AArch64 Collect Linker Optimization Hint(LOH)"
void initializeRegisterCoalescerPass(PassRegistry &)
This class represents the liveness of a register, stack slot, etc.
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
AnalysisUsage & addRequired()
Query interferences between a single live virtual register and a live interval union.
const TargetSubtargetInfo & getSubtarget() const
getSubtarget - Return the subtarget for which this machine code is being compiled.
MachineFunctionPass - This class adapts the FunctionPass interface to allow convenient creation of pa...
const HexagonInstrInfo * TII
static cl::opt< unsigned > LastChanceRecoloringMaxDepth("lcr-max-depth", cl::Hidden, cl::desc("Last chance recoloring max depth"), cl::init(5))
ArrayRef< T > makeArrayRef(const T &OneElt)
Construct an ArrayRef from a single element.
T LLVM_ATTRIBUTE_UNUSED_RESULT pop_back_val()
NamedRegionTimer - This class is basically a combination of TimeRegion and Timer. ...
Reg
All possible values of the reg field in the ModR/M byte.
unsigned getSize() const
getSize - Returns the sum of sizes of all the LiveRange's.
RegAllocBase provides the register allocation driver and interface that can be extended to add intere...
BorderConstraint Exit
Constraint on block exit.
void assign(size_type NumElts, const T &Elt)
void emitError(unsigned LocCookie, const Twine &ErrorStr)
emitError - Emit an error message to the currently installed error handler with optional location inf...
PrintReg - Helper class for printing registers on a raw_ostream.
const SmallVectorImpl< LiveInterval * > & interferingVRegs() const
bool LLVM_ATTRIBUTE_UNUSED_RESULT empty() const
SplitEditor - Edit machine code and LiveIntervals for live range splitting.
RegisterRegAlloc class - Track the registration of register allocators.
BlockConstraint - Entry and exit constraints for a basic block.
size_t size() const
size - Get the array size.
SlotIndex LastInstr
Last instr accessing current reg.
unsigned get(unsigned idx) const
TargetInstrInfo - Interface to description of machine instruction set.
static cl::opt< unsigned > CSRFirstTimeCost("regalloc-csr-first-time-cost", cl::desc("Cost for first time use of callee-saved register."), cl::init(0), cl::Hidden)
SlotIndex last()
last - Return the ending index of the last interfering range in the current block.
void initializeMachineDominatorTreePass(PassRegistry &)
initializer< Ty > init(const Ty &Val)
const uint8_t AllocationPriority
Classes with a higher priority value are assigned first by register allocators using a greedy heurist...
void initializeSlotIndexesPass(PassRegistry &)
bool isValid() const
Returns true if this is a valid index.
* if(!EatIfPresent(lltok::kw_thread_local)) return false
ParseOptionalThreadLocal := /*empty.
This is an important class for using LLVM in a threaded context.
void initializeLiveStacksPass(PassRegistry &)
Cursor - The primary query interface for the block interference cache.
SM_Partition(Default) - Try to create the complement interval so it doesn't overlap any other interva...
SM_Size - Overlap intervals to minimize the number of inserted COPY instructions. ...
Represent the analysis usage information of a pass.
static unsigned getNumAllocatableRegsForConstraints(const MachineInstr *MI, unsigned Reg, const TargetRegisterClass *SuperRC, const TargetInstrInfo *TII, const TargetRegisterInfo *TRI, const RegisterClassInfo &RCI)
Get the number of allocatable registers that match the constraints of Reg on MI and that are also in ...
void initializeLiveIntervalsPass(PassRegistry &)
unsigned Number
Basic block number (from MBB::getNumber()).
void resize(typename StorageT::size_type s)
FunctionPass class - This class is used to implement most global optimizations.
static cl::opt< SplitEditor::ComplementSpillMode > SplitSpillMode("split-spill-mode", cl::Hidden, cl::desc("Spill mode for splitting live ranges"), cl::values(clEnumValN(SplitEditor::SM_Partition,"default","Default"), clEnumValN(SplitEditor::SM_Size,"size","Optimize for size"), clEnumValN(SplitEditor::SM_Speed,"speed","Optimize for speed"), clEnumValEnd), cl::init(SplitEditor::SM_Partition))
std::pair< NoneType, bool > insert(const T &V)
insert - Insert an element into the set if it isn't already there.
void append(in_iter in_start, in_iter in_end)
Add the specified range to the end of the SmallVector.
TargetRegisterInfo base class - We assume that the target defines a static array of TargetRegisterDes...
BorderConstraint Entry
Constraint on block entry.
static cl::opt< bool > EnableLocalReassignment("enable-local-reassign", cl::Hidden, cl::desc("Local reassignment can yield better allocation decisions, but ""may be compile time intensive"), cl::init(false))
const T & back() const
back - Get the last element.
iterator find(SlotIndex Pos)
find - Return an iterator pointing to the first segment that ends after Pos, or end().
static cl::opt< unsigned > LastChanceRecoloringMaxInterference("lcr-max-interf", cl::Hidden, cl::desc("Last chance recoloring maximum number of considered"" interference at a time"), cl::init(8))
A SetVector that performs no allocations if smaller than a certain size.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements...
SlotIndex FirstInstr
First instr accessing current reg.
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small...
bool test(unsigned Idx) const
Additional information about basic blocks where the current variable is live.
SplitAnalysis - Analyze a LiveInterval, looking for live range splitting opportunities.
void setPreservesCFG()
This function should be called by the pass, iff they do not:
bool hasInterference()
hasInterference - Return true if the current block has any interference.
void initializeSpillPlacementPass(PassRegistry &)
void initializeMachineSchedulerPass(PassRegistry &)
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
static RegisterRegAlloc greedyRegAlloc("greedy","greedy register allocator", createGreedyRegisterAllocator)
unsigned getNumRegs() const
getNumRegs - Return the number of registers in this class.
LiveSegments::iterator SegmentIter
bool LiveOut
Current reg is live out.
void moveToBlock(unsigned MBBNum)
moveTo - Move cursor to basic block MBBNum.
#define clEnumValN(ENUMVAL, FLAGNAME, DESC)
void initializeVirtRegMapPass(PassRegistry &)
Spiller * createInlineSpiller(MachineFunctionPass &pass, MachineFunction &mf, VirtRegMap &vrm)
Create and return a spiller that will insert spill code directly instead of deferring though VirtRegM...
SlotIndex beginIndex() const
beginIndex - Return the lowest numbered slot covered.
Representation of each machine instruction.
ArrayRef< unsigned > regs() const
void initializeLiveRegMatrixPass(PassRegistry &)
SlotIndex endIndex() const
endNumber - return the maximum point of the range of the whole, exclusive.
FunctionPass * createGreedyRegisterAllocator()
Greedy register allocation pass - This pass implements a global register allocator for optimized buil...
unsigned collectInterferingVRegs(unsigned MaxInterferingRegs=UINT_MAX)
const TargetRegisterClass * getRegClassConstraintEffectForVReg(unsigned Reg, const TargetRegisterClass *CurRC, const TargetInstrInfo *TII, const TargetRegisterInfo *TRI, bool ExploreBundle=false) const
Applies the constraints (def/use) implied by this MI on Reg to the given CurRC.
void verify(const MachineRegisterInfo *MRI=nullptr) const
Walks the interval and assert if any invariants fail to hold.
SlotIndex getRegSlot(bool EC=false) const
Returns the register use/def slot in the current instruction for a normal or early-clobber def...
static float normalizeSpillWeight(float UseDefFreq, unsigned Size, unsigned NumInstr)
Normalize the spill weight of a live interval.
bool operator<(int64_t V1, const APSInt &V2)
bool isHint() const
Return true if the last register returned from next() was a preferred register.
static cl::opt< bool > ExhaustiveSearch("exhaustive-register-search", cl::NotHidden, cl::desc("Exhaustive Search for registers bypassing the depth ""and interference cutoffs of last chance recoloring"))
void initializeEdgeBundlesPass(PassRegistry &)
virtual const TargetRegisterInfo * getRegisterInfo() const
getRegisterInfo - If register information is available, return it.
StringRef getName() const
getName - Return the name of the corresponding LLVM function.
bool TimePassesIsEnabled
If the user specifies the -time-passes argument on an LLVM tool command line then the value of this b...
bool LiveIn
Current reg is live in.
SlotIndex - An opaque wrapper around machine indexes.
DominatorTree Class - Concrete subclass of DominatorTreeBase that is used to compute a normal dominat...
SlotIndex first()
first - Return the starting index of the first interfering range in the current block.
bool ChangesValue
True when this block changes the value of the live range.