44 #define DEBUG_TYPE "regalloc"
46 STATISTIC(numJoins ,
"Number of interval joins performed");
47 STATISTIC(numCrossRCs ,
"Number of cross class joins performed");
48 STATISTIC(numCommutes ,
"Number of instruction commuting performed");
49 STATISTIC(numExtends ,
"Number of copies extended");
50 STATISTIC(NumReMats ,
"Number of instructions re-materialized");
51 STATISTIC(NumInflated ,
"Number of register classes inflated");
52 STATISTIC(NumLaneConflicts,
"Number of dead lane conflicts tested");
53 STATISTIC(NumLaneResolves,
"Number of dead lane conflicts resolved");
57 cl::desc(
"Coalesce copies (default=true)"),
72 cl::desc(
"Coalesce copies that span blocks (default=subtarget)"),
77 cl::desc(
"Verify machine instrs before and after register coalescing"),
103 bool JoinGlobalCopies;
124 void eliminateDeadDefs();
130 void coalesceLocals();
133 void joinAllIntervals();
204 void updateRegDefsUses(
unsigned SrcReg,
unsigned DstReg,
unsigned SubIdx);
239 if (LIS->shrinkToUses(LI,
Dead)) {
243 LIS->splitSeparateComponents(*LI, SplitLIs);
255 void releaseMemory()
override;
268 "Simple Register Coalescing",
false,
false)
276 char RegisterCoalescer::
ID = 0;
279 unsigned &Src,
unsigned &Dst,
280 unsigned &SrcSub,
unsigned &DstSub) {
282 Dst = MI->getOperand(0).getReg();
283 DstSub = MI->getOperand(0).getSubReg();
284 Src = MI->getOperand(1).getReg();
285 SrcSub = MI->getOperand(1).getSubReg();
286 }
else if (MI->isSubregToReg()) {
287 Dst = MI->getOperand(0).getReg();
288 DstSub = tri.composeSubRegIndices(MI->getOperand(0).getSubReg(),
289 MI->getOperand(3).getImm());
290 Src = MI->getOperand(2).getReg();
291 SrcSub = MI->getOperand(2).getSubReg();
306 for (
const auto &
MI : *MBB) {
307 if (!
MI.isCopyLike() && !
MI.isUnconditionalBranch())
317 Flipped = CrossClass =
false;
319 unsigned Src, Dst, SrcSub, DstSub;
320 if (!
isMoveInstr(TRI, MI, Src, Dst, SrcSub, DstSub))
322 Partial = SrcSub || DstSub;
339 if (!Dst)
return false;
346 if (!Dst)
return false;
356 if (SrcSub && DstSub) {
358 if (Src == Dst && SrcSub != DstSub)
384 if (DstIdx && !SrcIdx) {
390 CrossClass = NewRC != DstRC || NewRC != SrcRC;
395 "Cannot have a physical SubIdx");
413 unsigned Src, Dst, SrcSub, DstSub;
414 if (!
isMoveInstr(TRI, MI, Src, Dst, SrcSub, DstSub))
421 }
else if (Src != SrcReg) {
429 assert(!DstIdx && !SrcIdx &&
"Inconsistent CoalescerPair state.");
435 return DstReg == Dst;
437 return TRI.
getSubReg(DstReg, SrcSub) == Dst;
448 void RegisterCoalescer::getAnalysisUsage(
AnalysisUsage &AU)
const {
460 void RegisterCoalescer::eliminateDeadDefs() {
466 void RegisterCoalescer::LRE_WillEraseInstruction(
MachineInstr *
MI) {
468 ErasedInstrs.insert(MI);
474 assert(!CP.
isPhys() &&
"This doesn't work for physreg copies.");
499 if (BS == IntB.
end())
return false;
500 VNInfo *BValNo = BS->valno;
505 if (BValNo->
def != CopyIdx)
return false;
511 if (AS == IntA.end())
return false;
512 VNInfo *AValNo = AS->valno;
524 if (ValS == IntB.
end())
530 LIS->getInstructionFromIndex(ValS->end.getPrevSlot());
537 if (ValS+1 != BS)
return false;
541 SlotIndex FillerStart = ValS->end, FillerEnd = BS->start;
545 BValNo->
def = FillerStart;
550 IntB.
addSegment(LiveInterval::Segment(FillerStart, FillerEnd, BValNo));
553 if (BValNo != ValS->valno)
558 VNInfo *SubBValNo = S.getVNInfoAt(CopyIdx);
559 S.addSegment(LiveInterval::Segment(FillerStart, FillerEnd, SubBValNo));
561 if (SubBValNo != SubValSNo)
562 S.MergeValueNumberInto(SubBValNo, SubValSNo);
565 DEBUG(
dbgs() <<
" result = " << IntB <<
'\n');
578 if (AS->end == CopyIdx)
585 bool RegisterCoalescer::hasOtherReachingDefs(
LiveInterval &IntA,
591 if (LIS->hasPHIKill(IntA, AValNo))
595 if (ASeg.
valno != AValNo)
continue;
598 if (BI != IntB.
begin())
600 for (; BI != IntB.
end() && ASeg.
end >= BI->start; ++BI) {
601 if (BI->valno == BValNo)
603 if (BI->start <= ASeg.
start && BI->end > ASeg.
start)
605 if (BI->start > ASeg.
start && BI->start < ASeg.
end)
618 if (S.
valno != SrcValNo)
624 bool RegisterCoalescer::removeCopyByCommutingDef(
const CoalescerPair &CP,
657 assert(BValNo !=
nullptr && BValNo->
def == CopyIdx);
687 if (!
TII->findCommutedOpIndices(*DefMI, UseOpIdx, NewDstIdx))
691 unsigned NewReg = NewDstMO.
getReg();
697 if (hasOtherReachingDefs(IntA, IntB, AValNo, BValNo))
705 SlotIndex UseIdx = LIS->getInstructionIndex(*UseMI);
707 if (US == IntA.end() || US->valno != AValNo)
714 DEBUG(
dbgs() <<
"\tremoveCopyByCommutingDef: " << AValNo->
def <<
'\t'
721 TII->commuteInstruction(*DefMI,
false, UseOpIdx, NewDstIdx);
726 !
MRI->constrainRegClass(IntB.
reg,
MRI->getRegClass(IntA.reg)))
728 if (NewMI != DefMI) {
729 LIS->ReplaceMachineInstrInMaps(*DefMI, *NewMI);
761 assert(US != IntA.end() &&
"Use must be live");
762 if (US->valno != AValNo)
784 DEBUG(
dbgs() <<
"\t\tnoop: " << DefIdx <<
'\t' << *UseMI);
788 VNInfo *SubDVNI = S.getVNInfoAt(DefIdx);
791 VNInfo *SubBValNo = S.getVNInfoAt(CopyIdx);
793 S.MergeValueNumberInto(SubDVNI, SubBValNo);
796 ErasedInstrs.insert(UseMI);
797 LIS->RemoveMachineInstrFromMaps(*UseMI);
805 if (!IntA.hasSubRanges()) {
807 IntA.createSubRangeFrom(Allocator, Mask, IntA);
811 VNInfo *ASubValNo = SA.getVNInfoAt(AIdx);
812 assert(ASubValNo !=
nullptr);
833 SB.LaneMask = Common;
840 BSubValNo->
def = ASubValNo->
def;
853 BValNo->
def = AValNo->
def;
855 DEBUG(
dbgs() <<
"\t\textended: " << IntB <<
'\n');
857 LIS->removeVRegDefAt(IntA, AValNo->
def);
859 DEBUG(
dbgs() <<
"\t\ttrimmed: " << IntA <<
'\n');
868 "This code cannot handle physreg aliasing");
870 if (!
Op.isReg() || !
Op.isDef() ||
Op.getReg() !=
Reg)
874 if (
Op.getSubReg() == 0 ||
Op.isUndef())
880 bool RegisterCoalescer::reMaterializeTrivialDef(
const CoalescerPair &CP,
892 SlotIndex CopyIdx = LIS->getInstructionIndex(*CopyMI);
894 assert(ValNo &&
"CopyMI input register not live");
904 if (!
TII->isAsCheapAsAMove(*DefMI))
906 if (!
TII->isTriviallyReMaterializable(*DefMI, AA))
918 unsigned CopyDstReg = DstOperand.
getReg();
927 if (SrcIdx && DstIdx)
933 unsigned NewDstReg = DstReg;
935 unsigned NewDstIdx = TRI->composeSubRegIndices(CP.
getSrcIdx(),
938 NewDstReg = TRI->getSubReg(DstReg, NewDstIdx);
948 "Only expect to deal with virtual or physical registers");
956 TII->reMaterialize(*MBB, MII, DstReg, SrcIdx, *DefMI, *TRI);
970 &&
"Shouldn't have SrcIdx+DstIdx at this point");
973 TRI->getCommonSubClass(DefRC, DstRC);
974 if (CommonRC !=
nullptr) {
1000 LIS->ReplaceMachineInstrInMaps(*CopyMI, NewMI);
1002 ErasedInstrs.insert(CopyMI);
1022 if (DefRC !=
nullptr) {
1024 NewRC = TRI->getMatchingSuperRegClass(NewRC, DefRC, NewIdx);
1026 NewRC = TRI->getCommonSubClass(NewRC, DefRC);
1027 assert(NewRC &&
"subreg chosen for remat incompatible with instruction");
1032 SR.LaneMask = TRI->composeSubRegIndexLaneMask(DstIdx, SR.LaneMask);
1034 MRI->setRegClass(DstReg, NewRC);
1037 updateRegDefsUses(DstReg, DstReg, DstIdx);
1054 SlotIndex CurrIdx = LIS->getInstructionIndex(NewMI);
1060 if (!SR.liveAt(DefIndex))
1061 SR.createDeadDef(DefIndex, Alloc);
1062 MaxMask &= ~SR.LaneMask;
1064 if (MaxMask.
any()) {
1073 "Only expect virtual or physical registers in remat");
1076 CopyDstReg,
true ,
true ,
false ));
1093 SlotIndex NewMIIdx = LIS->getInstructionIndex(NewMI);
1096 if (
LiveRange *LR = LIS->getCachedRegUnit(*Units))
1097 LR->createDeadDef(NewMIIdx.
getRegSlot(), LIS->getVNInfoAllocator());
1107 SlotIndex NewMIIdx = LIS->getInstructionIndex(NewMI);
1108 for (
unsigned i = 0, e = NewMIImplDefs.
size();
i != e; ++
i) {
1109 unsigned Reg = NewMIImplDefs[
i];
1111 if (
LiveRange *LR = LIS->getCachedRegUnit(*Units))
1112 LR->createDeadDef(NewMIIdx.
getRegSlot(), LIS->getVNInfoAllocator());
1119 shrinkToUses(&SrcInt, &DeadDefs);
1120 if (!DeadDefs.empty()) {
1127 DEBUG(
dbgs() <<
"\t\tupdated: " << *UseMI);
1130 eliminateDeadDefs();
1136 bool RegisterCoalescer::eliminateUndefCopy(
MachineInstr *CopyMI) {
1148 unsigned SrcReg, DstReg, SrcSubIdx, DstSubIdx;
1149 isMoveInstr(*TRI, CopyMI, SrcReg, DstReg, SrcSubIdx, DstSubIdx);
1151 SlotIndex Idx = LIS->getInstructionIndex(*CopyMI);
1155 LaneBitmask SrcMask = TRI->getSubRegIndexLaneMask(SrcSubIdx);
1157 if ((SR.LaneMask & SrcMask).none())
1162 }
else if (SrcLI.
liveAt(Idx))
1165 DEBUG(
dbgs() <<
"\tEliminating copy of <undef> value\n");
1176 LaneBitmask DstMask = TRI->getSubRegIndexLaneMask(DstSubIdx);
1178 if ((SR.LaneMask & DstMask).none())
1181 VNInfo *SVNI = SR.getVNInfoAt(RegIndex);
1183 SR.removeValNo(SVNI);
1187 LIS->removeVRegDefAt(DstLI, RegIndex);
1194 SlotIndex UseIdx = LIS->getInstructionIndex(MI);
1200 if ((SR.LaneMask & UseMask).none())
1202 if (SR.liveAt(UseIdx)) {
1208 isLive = DstLI.
liveAt(UseIdx);
1212 DEBUG(
dbgs() <<
"\tnew undef: " << UseIdx <<
'\t' << MI);
1223 LIS->shrinkToUses(&DstLI);
1230 LaneBitmask Mask = TRI->getSubRegIndexLaneMask(SubRegIdx);
1233 bool IsUndef =
true;
1235 if ((S.LaneMask & Mask).none())
1237 if (S.liveAt(UseIdx)) {
1250 ShrinkMainRange =
true;
1254 void RegisterCoalescer::updateRegDefsUses(
unsigned SrcReg,
1258 LiveInterval *DstInt = DstIsPhys ?
nullptr : &LIS->getInterval(DstReg);
1260 if (DstInt && DstInt->
hasSubRanges() && DstReg != SrcReg) {
1263 if (SubReg == 0 || MO.
isUndef())
1269 addUndefFlag(*DstInt, UseIdx, MO, SubReg);
1275 I =
MRI->reg_instr_begin(SrcReg),
E =
MRI->reg_instr_end();
1284 if (SrcReg == DstReg && !Visited.
insert(UseMI).second)
1293 if (DstInt && !Reads && SubIdx)
1294 Reads = DstInt->
liveAt(LIS->getInstructionIndex(*UseMI));
1297 for (
unsigned i = 0, e = Ops.
size();
i != e; ++
i) {
1303 if (SubIdx && MO.
isDef())
1308 if (SubIdx != 0 && MO.
isUse() &&
MRI->shouldTrackSubRegLiveness(DstReg)) {
1315 ? LIS->getSlotIndexes()->getIndexBefore(*UseMI)
1316 : LIS->getInstructionIndex(*UseMI);
1318 addUndefFlag(*DstInt, UseIdx, MO, SubIdx);
1328 dbgs() <<
"\t\tupdated: ";
1330 dbgs() << LIS->getInstructionIndex(*UseMI) <<
"\t";
1336 bool RegisterCoalescer::canJoinPhys(
const CoalescerPair &CP) {
1341 DEBUG(
dbgs() <<
"\tCan only merge into reserved registers.\n");
1349 DEBUG(
dbgs() <<
"\tCannot join complex intervals into reserved register.\n");
1353 bool RegisterCoalescer::joinCopy(
MachineInstr *CopyMI,
bool &Again) {
1356 DEBUG(
dbgs() << LIS->getInstructionIndex(*CopyMI) <<
'\t' << *CopyMI);
1360 DEBUG(
dbgs() <<
"\tNot coalescable.\n");
1373 if (!TRI->shouldCoalesce(CopyMI, SrcRC, SrcIdx, DstRC, DstIdx,
1375 DEBUG(
dbgs() <<
"\tSubtarget bailed on coalescing.\n");
1385 DeadDefs.push_back(CopyMI);
1386 eliminateDeadDefs();
1391 if (!CP.
isPhys() && eliminateUndefCopy(CopyMI)) {
1392 LIS->RemoveMachineInstrFromMaps(*CopyMI);
1402 DEBUG(
dbgs() <<
"\tCopy already coalesced: " << LI <<
'\n');
1403 const SlotIndex CopyIdx = LIS->getInstructionIndex(*CopyMI);
1407 assert(ReadVNI &&
"No value before copy and no <undef> flag.");
1408 assert(ReadVNI != DefVNI &&
"Cannot read and define the same value.");
1416 S.MergeValueNumberInto(SDefVNI, SReadVNI);
1419 DEBUG(
dbgs() <<
"\tMerged values: " << LI <<
'\n');
1421 LIS->RemoveMachineInstrFromMaps(*CopyMI);
1431 if (!canJoinPhys(CP)) {
1435 if (reMaterializeTrivialDef(CP, CopyMI, IsDefCopy))
1444 LIS->getInterval(CP.
getDstReg()).size())
1448 dbgs() <<
"\tConsidering merging to "
1449 << TRI->getRegClassName(CP.
getNewRC()) <<
" with ";
1452 << TRI->getSubRegIndexName(CP.
getDstIdx()) <<
" and "
1454 << TRI->getSubRegIndexName(CP.
getSrcIdx()) <<
'\n';
1462 ShrinkMainRange =
false;
1468 if (!joinIntervals(CP)) {
1474 if (reMaterializeTrivialDef(CP, CopyMI, IsDefCopy))
1480 if (adjustCopiesBackFrom(CP, CopyMI) ||
1481 removeCopyByCommutingDef(CP, CopyMI)) {
1482 LIS->RemoveMachineInstrFromMaps(*CopyMI);
1504 if (!CP.
isPhys() && RegClassInfo.isProperSubClass(CP.
getNewRC()))
1510 ErasedInstrs.erase(CopyMI);
1519 if (ShrinkMask.any()) {
1522 if ((S.LaneMask & ShrinkMask).none())
1526 LIS->shrinkToUses(S, LI.
reg);
1530 if (ShrinkMainRange) {
1545 dbgs() <<
"\tResult = ";
1557 bool RegisterCoalescer::joinReservedPhysReg(
CoalescerPair &CP) {
1561 assert(
MRI->isReserved(DstReg) &&
"Not a reserved register");
1563 DEBUG(
dbgs() <<
"\t\tRHS = " << RHS <<
'\n');
1565 assert(RHS.containsOneValue() &&
"Invalid join with reserved register");
1574 if (!
MRI->isConstantPhysReg(DstReg)) {
1578 if (!
MRI->isReserved(*RI))
1581 if (RHS.overlaps(LIS->getRegUnit(*UI))) {
1603 CopyMI =
MRI->getVRegDef(SrcReg);
1612 if (!
MRI->hasOneNonDBGUse(SrcReg)) {
1613 DEBUG(
dbgs() <<
"\t\tMultiple vreg uses!\n");
1617 if (!LIS->intervalIsInOneMBB(RHS)) {
1618 DEBUG(
dbgs() <<
"\t\tComplex control flow!\n");
1623 CopyMI = &*
MRI->use_instr_nodbg_begin(SrcReg);
1627 if (!
MRI->isConstantPhysReg(DstReg)) {
1636 DEBUG(
dbgs() <<
"\t\tInterference (read): " << *MI);
1641 for (
const auto &MO : MI->
operands()) {
1643 DEBUG(
dbgs() <<
"\t\tInterference (regmask clobber): " << *MI);
1653 <<
" at " << CopyRegIdx <<
"\n");
1655 LIS->removePhysRegDefAt(DstReg, CopyRegIdx);
1663 LIS->RemoveMachineInstrFromMaps(*CopyMI);
1750 const unsigned SubIdx;
1757 const bool SubRangeJoin;
1759 const bool TrackSubRegLiveness;
1774 enum ConflictResolution {
1805 ConflictResolution Resolution;
1831 bool ErasableImplicitDef;
1838 bool PrunedComputed;
1840 Val() : Resolution(CR_Keep), WriteLanes(), ValidLanes(),
1841 RedefVNI(nullptr), OtherVNI(nullptr), ErasableImplicitDef(
false),
1844 bool isAnalyzed()
const {
return WriteLanes.any(); }
1856 std::pair<const VNInfo*,unsigned> followCopyChain(
const VNInfo *VNI)
const;
1858 bool valuesIdentical(
VNInfo *Val0,
VNInfo *Val1,
const JoinVals &Other)
const;
1867 ConflictResolution analyzeValue(
unsigned ValNo, JoinVals &Other);
1872 void computeAssignment(
unsigned ValNo, JoinVals &Other);
1889 bool taintExtent(
unsigned,
LaneBitmask, JoinVals&,
1902 bool isPrunedValue(
unsigned ValNo, JoinVals &Other);
1908 bool TrackSubRegLiveness)
1909 : LR(LR), Reg(Reg), SubIdx(SubIdx), LaneMask(LaneMask),
1910 SubRangeJoin(SubRangeJoin), TrackSubRegLiveness(TrackSubRegLiveness),
1911 NewVNInfo(newVNInfo), CP(cp), LIS(lis), Indexes(LIS->getSlotIndexes()),
1912 TRI(TRI), Assignments(LR.getNumValNums(), -1),
Vals(LR.getNumValNums())
1917 bool mapValues(JoinVals &Other);
1921 bool resolveConflicts(JoinVals &Other);
1941 void pruneMainSegments(
LiveInterval &LI,
bool &ShrinkMainRange);
1952 void removeImplicitDefs();
1955 const int *getAssignments()
const {
return Assignments.data(); }
1965 L |= TRI->getSubRegIndexLaneMask(
1966 TRI->composeSubRegIndices(SubIdx, MO.
getSubReg()));
1973 std::pair<const VNInfo*, unsigned> JoinVals::followCopyChain(
1974 const VNInfo *VNI)
const {
1975 unsigned Reg = this->
Reg;
1980 assert(MI &&
"No defining instruction");
1982 return std::make_pair(VNI, Reg);
1985 return std::make_pair(VNI, Reg);
1998 LaneBitmask SMask = TRI->composeSubRegIndexLaneMask(SubIdx, S.LaneMask);
1999 if ((SMask & LaneMask).none())
2006 if (ValueIn ==
nullptr)
2011 return std::make_pair(VNI, Reg);
2014 bool JoinVals::valuesIdentical(
VNInfo *Value0,
VNInfo *Value1,
2015 const JoinVals &Other)
const {
2018 std::tie(Orig0, Reg0) = followCopyChain(Value0);
2019 if (Orig0 == Value1)
2024 std::tie(Orig1, Reg1) = Other.followCopyChain(Value1);
2030 return Orig0->
def == Orig1->
def && Reg0 == Reg1;
2033 JoinVals::ConflictResolution
2034 JoinVals::analyzeValue(
unsigned ValNo, JoinVals &Other) {
2035 Val &V =
Vals[ValNo];
2036 assert(!V.isAnalyzed() &&
"Value has already been analyzed!");
2038 if (VNI->isUnused()) {
2045 if (VNI->isPHIDef()) {
2048 : TRI->getSubRegIndexLaneMask(SubIdx);
2049 V.ValidLanes = V.WriteLanes = Lanes;
2052 assert(DefMI !=
nullptr);
2058 V.ErasableImplicitDef =
true;
2062 V.ValidLanes = V.WriteLanes = computeWriteLanes(DefMI, Redef);
2081 assert((TrackSubRegLiveness || V.RedefVNI) &&
2082 "Instruction is reading nonexistent value");
2083 if (V.RedefVNI !=
nullptr) {
2084 computeAssignment(V.RedefVNI->id, Other);
2085 V.ValidLanes |=
Vals[V.RedefVNI->id].ValidLanes;
2094 V.ErasableImplicitDef =
true;
2095 V.ValidLanes &= ~V.WriteLanes;
2112 if (OtherVNI->def < VNI->def)
2113 Other.computeAssignment(OtherVNI->id, *
this);
2114 else if (VNI->def < OtherVNI->def && OtherLRQ.
valueIn()) {
2117 V.OtherVNI = OtherLRQ.
valueIn();
2118 return CR_Impossible;
2120 V.OtherVNI = OtherVNI;
2121 Val &OtherV = Other.Vals[OtherVNI->id];
2123 if (!OtherV.isAnalyzed())
2128 if (VNI->isPHIDef())
2130 if ((V.ValidLanes & OtherV.ValidLanes).any())
2132 return CR_Impossible;
2138 V.OtherVNI = OtherLRQ.
valueIn();
2147 Other.computeAssignment(V.OtherVNI->id, *
this);
2148 Val &OtherV = Other.Vals[V.OtherVNI->id];
2157 if (OtherV.ErasableImplicitDef && DefMI &&
2159 DEBUG(
dbgs() <<
"IMPLICIT_DEF defined at " << V.OtherVNI->def
2161 <<
", keeping it.\n");
2162 OtherV.ErasableImplicitDef =
false;
2167 if (VNI->isPHIDef())
2174 if (TrackSubRegLiveness
2175 && (V.WriteLanes & (OtherV.ValidLanes | OtherV.WriteLanes)).none())
2185 V.ValidLanes &= ~V.WriteLanes | OtherV.ValidLanes;
2200 && valuesIdentical(VNI, V.OtherVNI, Other))
2215 if ((V.WriteLanes & OtherV.ValidLanes).none())
2227 assert(VNI->def.isEarlyClobber() &&
2228 "Only early clobber defs can overlap a kill");
2229 return CR_Impossible;
2236 if ((TRI->getSubRegIndexLaneMask(Other.SubIdx) & ~V.WriteLanes).none())
2237 return CR_Impossible;
2244 return CR_Impossible;
2253 return CR_Unresolved;
2256 void JoinVals::computeAssignment(
unsigned ValNo, JoinVals &Other) {
2257 Val &V =
Vals[ValNo];
2258 if (V.isAnalyzed()) {
2261 assert(Assignments[ValNo] != -1 &&
"Bad recursion?");
2264 switch ((V.Resolution = analyzeValue(ValNo, Other))) {
2268 assert(V.OtherVNI &&
"OtherVNI not assigned, can't merge.");
2269 assert(Other.Vals[V.OtherVNI->id].isAnalyzed() &&
"Missing recursion");
2270 Assignments[ValNo] = Other.Assignments[V.OtherVNI->id];
2273 <<
PrintReg(Other.Reg) <<
':' << V.OtherVNI->id <<
'@'
2274 << V.OtherVNI->def <<
" --> @"
2275 << NewVNInfo[Assignments[ValNo]]->def <<
'\n');
2278 case CR_Unresolved: {
2280 assert(V.OtherVNI &&
"OtherVNI not assigned, can't prune");
2281 Val &OtherV = Other.Vals[V.OtherVNI->id];
2284 if ((OtherV.WriteLanes & ~V.ValidLanes).any() && TrackSubRegLiveness)
2285 OtherV.ErasableImplicitDef =
false;
2286 OtherV.Pruned =
true;
2291 Assignments[ValNo] = NewVNInfo.size();
2297 bool JoinVals::mapValues(JoinVals &Other) {
2299 computeAssignment(i, Other);
2300 if (
Vals[i].Resolution == CR_Impossible) {
2310 taintExtent(
unsigned ValNo,
LaneBitmask TaintedLanes, JoinVals &Other,
2318 assert(OtherI != Other.LR.end() &&
"No conflict?");
2323 if (End >= MBBEnd) {
2325 << OtherI->valno->id <<
'@' << OtherI->start <<
'\n');
2329 << OtherI->valno->id <<
'@' << OtherI->start
2330 <<
" to " << End <<
'\n');
2334 TaintExtent.push_back(std::make_pair(End, TaintedLanes));
2337 if (++OtherI == Other.LR.end() || OtherI->start >= MBBEnd)
2341 const Val &OV = Other.Vals[OtherI->valno->id];
2342 TaintedLanes &= ~OV.WriteLanes;
2345 }
while (TaintedLanes.
any());
2349 bool JoinVals::usesLanes(
const MachineInstr &MI,
unsigned Reg,
unsigned SubIdx,
2358 unsigned S = TRI->composeSubRegIndices(SubIdx, MO.
getSubReg());
2359 if ((Lanes & TRI->getSubRegIndexLaneMask(S)).any())
2365 bool JoinVals::resolveConflicts(JoinVals &Other) {
2368 assert (V.Resolution != CR_Impossible &&
"Unresolvable conflict");
2369 if (V.Resolution != CR_Unresolved)
2377 assert(V.OtherVNI &&
"Inconsistent conflict resolution.");
2379 const Val &OtherV = Other.Vals[V.OtherVNI->id];
2384 LaneBitmask TaintedLanes = V.WriteLanes & OtherV.ValidLanes;
2386 if (!taintExtent(i, TaintedLanes, Other, TaintExtent))
2390 assert(!TaintExtent.
empty() &&
"There should be at least one conflict.");
2401 "Interference ends on VNI->def. Should have been handled earlier");
2404 assert(LastMI &&
"Range must end at a proper instruction");
2405 unsigned TaintNum = 0;
2407 assert(MI != MBB->end() &&
"Bad LastMI");
2408 if (usesLanes(*MI, Other.Reg, Other.SubIdx, TaintedLanes)) {
2409 DEBUG(
dbgs() <<
"\t\ttainted lanes used by: " << *MI);
2413 if (&*MI == LastMI) {
2414 if (++TaintNum == TaintExtent.
size())
2417 assert(LastMI &&
"Range must end at a proper instruction");
2418 TaintedLanes = TaintExtent[TaintNum].second;
2424 V.Resolution = CR_Replace;
2430 bool JoinVals::isPrunedValue(
unsigned ValNo, JoinVals &Other) {
2431 Val &V =
Vals[ValNo];
2432 if (V.Pruned || V.PrunedComputed)
2435 if (V.Resolution != CR_Erase && V.Resolution != CR_Merge)
2440 V.PrunedComputed =
true;
2441 V.Pruned = Other.isPrunedValue(V.OtherVNI->id, *
this);
2445 void JoinVals::pruneValues(JoinVals &Other,
2447 bool changeInstrs) {
2450 switch (
Vals[i].Resolution) {
2455 LIS->pruneValue(Other.LR, Def, &EndPoints);
2460 Val &OtherV = Other.Vals[
Vals[
i].OtherVNI->id];
2461 bool EraseImpDef = OtherV.ErasableImplicitDef &&
2462 OtherV.Resolution == CR_Keep;
2483 <<
": " << Other.LR <<
'\n');
2488 if (isPrunedValue(i, Other)) {
2493 LIS->pruneValue(LR, Def, &EndPoints);
2495 << Def <<
": " << LR <<
'\n');
2507 bool DidPrune =
false;
2509 if (
Vals[i].Resolution != CR_Erase)
2520 if (ValueOut !=
nullptr && Q.
valueIn() ==
nullptr) {
2522 <<
" at " << Def <<
"\n");
2523 LIS->pruneValue(S, Def,
nullptr);
2533 <<
" at " << Def <<
"\n");
2534 ShrinkMask |= S.LaneMask;
2545 if (
VNInfo *VNI = SR.Query(Def).valueOutOrDead())
2546 if (VNI->
def == Def)
2552 void JoinVals::pruneMainSegments(
LiveInterval &LI,
bool &ShrinkMainRange) {
2553 assert(&static_cast<LiveRange&>(LI) == &LR);
2556 if (
Vals[i].Resolution != CR_Keep)
2561 Vals[
i].Pruned =
true;
2562 ShrinkMainRange =
true;
2566 void JoinVals::removeImplicitDefs() {
2569 if (V.Resolution != CR_Keep || !V.ErasableImplicitDef || !V.Pruned)
2584 switch (
Vals[i].Resolution) {
2589 if (!
Vals[i].ErasableImplicitDef || !
Vals[i].Pruned)
2603 if (LI !=
nullptr) {
2618 assert(static_cast<LiveRange*>(LI) == &LR);
2630 LE = LE.
isValid() ? std::max(LE, I->end) : I->
end;
2641 if (S != LR.
begin())
2642 std::prev(S)->end = NewEnd;
2646 dbgs() <<
"\t\tremoved " << i <<
'@' << Def <<
": " << LR <<
'\n';
2648 dbgs() <<
"\t\t LHS = " << *LI <<
'\n';
2655 assert(MI &&
"No instruction to erase");
2663 DEBUG(
dbgs() <<
"\t\terased:\t" << Def <<
'\t' << *MI);
2664 LIS->RemoveMachineInstrFromMaps(*MI);
2679 NewVNInfo,
CP, LIS, TRI,
true,
true);
2680 JoinVals LHSVals(LRange, CP.getDstReg(), CP.getDstIdx(), LaneMask,
2681 NewVNInfo,
CP, LIS, TRI,
true,
true);
2688 if (!LHSVals.mapValues(RHSVals) || !RHSVals.mapValues(LHSVals)) {
2693 if (!LHSVals.resolveConflicts(RHSVals) ||
2694 !RHSVals.resolveConflicts(LHSVals)) {
2705 LHSVals.pruneValues(RHSVals, EndPoints,
false);
2706 RHSVals.pruneValues(LHSVals, EndPoints,
false);
2708 LHSVals.removeImplicitDefs();
2709 RHSVals.removeImplicitDefs();
2715 LRange.
join(RRange, LHSVals.getAssignments(), RHSVals.getAssignments(),
2718 DEBUG(
dbgs() <<
"\t\tjoined lanes: " << LRange <<
"\n");
2719 if (EndPoints.
empty())
2725 dbgs() <<
"\t\trestoring liveness to " << EndPoints.
size() <<
" points: ";
2726 for (
unsigned i = 0, n = EndPoints.
size(); i != n; ++
i) {
2727 dbgs() << EndPoints[
i];
2731 dbgs() <<
": " << LRange <<
'\n';
2733 LIS->extendToIndices(LRange, EndPoints);
2736 void RegisterCoalescer::mergeSubRangeInto(
LiveInterval &LI,
2765 LiveRange RangeCopy(ToMerge, Allocator);
2766 joinSubRegRanges(*CommonRange, RangeCopy, Common, CP);
2770 if (LaneMask.any()) {
2780 bool TrackSubRegLiveness =
MRI->shouldTrackSubRegLiveness(*CP.
getNewRC());
2782 NewVNInfo,
CP, LIS, TRI,
false, TrackSubRegLiveness);
2784 NewVNInfo,
CP, LIS, TRI,
false, TrackSubRegLiveness);
2787 <<
"\n\t\tLHS = " << LHS
2792 if (!LHSVals.mapValues(RHSVals) || !RHSVals.mapValues(LHSVals))
2796 if (!LHSVals.resolveConflicts(RHSVals) || !RHSVals.resolveConflicts(LHSVals))
2805 unsigned DstIdx = CP.getDstIdx();
2807 LaneBitmask Mask = DstIdx == 0 ? CP.getNewRC()->getLaneMask()
2808 : TRI->getSubRegIndexLaneMask(DstIdx);
2812 }
else if (DstIdx != 0) {
2815 LaneBitmask Mask = TRI->composeSubRegIndexLaneMask(DstIdx, R.LaneMask);
2820 <<
' ' << LHS <<
'\n');
2823 unsigned SrcIdx = CP.getSrcIdx();
2825 LaneBitmask Mask = SrcIdx == 0 ? CP.getNewRC()->getLaneMask()
2826 : TRI->getSubRegIndexLaneMask(SrcIdx);
2827 mergeSubRangeInto(LHS, RHS, Mask, CP);
2831 LaneBitmask Mask = TRI->composeSubRegIndexLaneMask(SrcIdx, R.LaneMask);
2832 mergeSubRangeInto(LHS, R, Mask, CP);
2835 DEBUG(
dbgs() <<
"\tJoined SubRanges " << LHS <<
"\n");
2839 LHSVals.pruneMainSegments(LHS, ShrinkMainRange);
2841 LHSVals.pruneSubRegValues(LHS, ShrinkMask);
2842 RHSVals.pruneSubRegValues(LHS, ShrinkMask);
2850 LHSVals.pruneValues(RHSVals, EndPoints,
true);
2851 RHSVals.pruneValues(LHSVals, EndPoints,
true);
2856 LHSVals.eraseInstrs(ErasedInstrs, ShrinkRegs, &LHS);
2857 RHSVals.eraseInstrs(ErasedInstrs, ShrinkRegs);
2858 while (!ShrinkRegs.
empty())
2859 shrinkToUses(&LIS->getInterval(ShrinkRegs.
pop_back_val()));
2862 LHS.
join(RHS, LHSVals.getAssignments(), RHSVals.getAssignments(), NewVNInfo);
2867 MRI->clearKillFlags(LHS.
reg);
2868 MRI->clearKillFlags(RHS.
reg);
2870 if (!EndPoints.
empty()) {
2874 dbgs() <<
"\t\trestoring liveness to " << EndPoints.
size() <<
" points: ";
2875 for (
unsigned i = 0, n = EndPoints.
size(); i != n; ++
i) {
2876 dbgs() << EndPoints[
i];
2880 dbgs() <<
": " << LHS <<
'\n';
2882 LIS->extendToIndices((
LiveRange&)LHS, EndPoints);
2889 return CP.
isPhys() ? joinReservedPhysReg(CP) : joinVirtRegs(CP);
2894 struct MBBPriorityInfo {
2900 : MBB(mbb),
Depth(depth), IsSplit(issplit) {}
2909 const MBBPriorityInfo *RHS) {
2911 if (LHS->Depth != RHS->Depth)
2912 return LHS->Depth > RHS->Depth ? -1 : 1;
2915 if (LHS->IsSplit != RHS->IsSplit)
2916 return LHS->IsSplit ? -1 : 1;
2920 unsigned cl = LHS->MBB->pred_size() + LHS->MBB->succ_size();
2921 unsigned cr = RHS->MBB->pred_size() + RHS->MBB->succ_size();
2923 return cl > cr ? -1 : 1;
2926 return LHS->MBB->getNumber() < RHS->MBB->getNumber() ? -1 : 1;
2947 bool RegisterCoalescer::
2949 bool Progress =
false;
2950 for (
unsigned i = 0, e = CurrList.
size(); i != e; ++
i) {
2955 if (ErasedInstrs.
erase(CurrList[i])) {
2956 CurrList[
i] =
nullptr;
2960 bool Success = joinCopy(CurrList[i], Again);
2962 if (Success || !Again)
2963 CurrList[
i] =
nullptr;
2980 bool RegisterCoalescer::applyTerminalRule(
const MachineInstr &Copy)
const {
2984 unsigned DstReg, DstSubReg, SrcReg, SrcSubReg;
2985 isMoveInstr(*TRI, &Copy, SrcReg, DstReg, SrcSubReg, DstSubReg);
3008 unsigned OtherReg, OtherSubReg, OtherSrcReg, OtherSrcSubReg;
3009 isMoveInstr(*TRI, &Copy, OtherSrcReg, OtherReg, OtherSrcSubReg,
3011 if (OtherReg == SrcReg)
3012 OtherReg = OtherSrcReg;
3018 if (LIS->getInterval(OtherReg).overlaps(DstLI)) {
3032 const unsigned PrevSize = WorkList.size();
3033 if (JoinGlobalCopies) {
3042 if (!MII->isCopyLike())
3044 bool ApplyTerminalRule = applyTerminalRule(*MII);
3046 if (ApplyTerminalRule)
3049 LocalWorkList.push_back(&(*MII));
3051 if (ApplyTerminalRule)
3054 WorkList.push_back(&(*MII));
3058 LocalWorkList.append(LocalTerminals.
begin(), LocalTerminals.
end());
3059 WorkList.append(GlobalTerminals.
begin(), GlobalTerminals.
end());
3064 if (MII.isCopyLike()) {
3065 if (applyTerminalRule(MII))
3068 WorkList.push_back(&MII);
3071 WorkList.append(Terminals.
begin(), Terminals.
end());
3077 CurrList(WorkList.begin() + PrevSize, WorkList.end());
3078 if (copyCoalesceWorkList(CurrList))
3079 WorkList.erase(
std::remove(WorkList.begin() + PrevSize, WorkList.end(),
3083 void RegisterCoalescer::coalesceLocals() {
3084 copyCoalesceWorkList(LocalWorkList);
3085 for (
unsigned j = 0, je = LocalWorkList.size(); j != je; ++j) {
3086 if (LocalWorkList[j])
3087 WorkList.push_back(LocalWorkList[j]);
3089 LocalWorkList.clear();
3092 void RegisterCoalescer::joinAllIntervals() {
3093 DEBUG(
dbgs() <<
"********** JOINING INTERVALS ***********\n");
3094 assert(WorkList.empty() && LocalWorkList.empty() &&
"Old data still around.");
3096 std::vector<MBBPriorityInfo> MBBs;
3097 MBBs.reserve(MF->size());
3100 MBBs.push_back(MBBPriorityInfo(MBB,
Loops->getLoopDepth(MBB),
3106 unsigned CurrDepth = UINT_MAX;
3107 for (
unsigned i = 0, e = MBBs.size(); i != e; ++
i) {
3109 if (JoinGlobalCopies && MBBs[i].
Depth < CurrDepth) {
3111 CurrDepth = MBBs[
i].Depth;
3113 copyCoalesceInMBB(MBBs[i].MBB);
3119 while (copyCoalesceWorkList(WorkList))
3123 void RegisterCoalescer::releaseMemory() {
3124 ErasedInstrs.
clear();
3127 InflateRegs.clear();
3137 LIS = &getAnalysis<LiveIntervals>();
3138 AA = &getAnalysis<AAResultsWrapperPass>().getAAResults();
3139 Loops = &getAnalysis<MachineLoopInfo>();
3150 DEBUG(
dbgs() <<
"********** SIMPLE REGISTER COALESCING **********\n"
3151 <<
"********** Function: " << MF->getName() <<
'\n');
3154 MF->verify(
this,
"Before register coalescing");
3156 RegClassInfo.runOnMachineFunction(fn);
3166 InflateRegs.erase(std::unique(InflateRegs.begin(), InflateRegs.end()),
3168 DEBUG(
dbgs() <<
"Trying to inflate " << InflateRegs.size() <<
" regs.\n");
3169 for (
unsigned i = 0, e = InflateRegs.size(); i != e; ++
i) {
3170 unsigned Reg = InflateRegs[
i];
3171 if (
MRI->reg_nodbg_empty(Reg))
3173 if (
MRI->recomputeRegClass(Reg)) {
3175 << TRI->getRegClassName(
MRI->getRegClass(Reg)) <<
'\n');
3182 if (!
MRI->shouldTrackSubRegLiveness(Reg)) {
3190 assert((S.LaneMask & ~MaxMask).none());
3200 MF->verify(
this,
"After register coalescing");
unsigned succ_size() const
static bool isSplitEdge(const MachineBasicBlock *MBB)
Return true if this block should be vacated by the coalescer to eliminate branches.
void push_back(const T &Elt)
const MachineFunction * getParent() const
Return the MachineFunction containing this basic block.
const_iterator end(StringRef path)
Get end iterator over path.
bool isValid() const
Check if the iterator is at the end of the list.
instr_iterator erase(instr_iterator I)
Remove an instruction from the instruction list and delete it.
AnalysisUsage & addPreserved()
Add the specified Pass class to the set of analyses preserved by this pass.
MachineInstr * getParent()
getParent - Return the instruction that this operand belongs to.
static PassRegistry * getPassRegistry()
getPassRegistry - Access the global registry object, which is automatically initialized at applicatio...
Segments::iterator iterator
SlotIndex def
The index of the defining instruction.
unsigned getDstReg() const
Return the register (virtual or physical) that will remain after coalescing.
STATISTIC(NumFunctions,"Total number of functions")
bool isValid() const
isValid - returns true if this iterator is not yet at the end.
int getNumber() const
MachineBasicBlocks are uniquely numbered at the function level, unless they're not in a MachineFuncti...
A Module instance is used to store all the information related to an LLVM module. ...
INITIALIZE_PASS_BEGIN(RegisterCoalescer,"simple-register-coalescing","Simple Register Coalescing", false, false) INITIALIZE_PASS_END(RegisterCoalescer
static cl::opt< bool > VerifyCoalescing("verify-coalescing", cl::desc("Verify machine instrs before and after register coalescing"), cl::Hidden)
unsigned getNumDefs() const
Return the number of MachineOperands that are register definitions.
std::error_code remove(const Twine &path, bool IgnoreNonExisting=true)
Remove path.
static LaneBitmask getAll()
char & MachineDominatorsID
MachineDominators - This pass is a machine dominators analysis pass.
LiveInterval - This class represents the liveness of a register, or stack slot.
Describe properties that are true of each instruction in the target description file.
void setIsUndef(bool Val=true)
static bool isVirtualRegister(unsigned Reg)
Return true if the specified register number is in the virtual register namespace.
static bool isMoveInstr(const TargetRegisterInfo &tri, const MachineInstr *MI, unsigned &Src, unsigned &Dst, unsigned &SrcSub, unsigned &DstSub)
static bool isLocalCopy(MachineInstr *Copy, const LiveIntervals *LIS)
static bool definesFullReg(const MachineInstr &MI, unsigned Reg)
Returns true if MI defines the full vreg Reg, as opposed to just defining a subregister.
char & RegisterCoalescerID
RegisterCoalescer - This pass merges live ranges to eliminate copies.
bool isKill() const
Return true if the live-in value is killed by this instruction.
const MCInstrDesc & getDesc() const
Returns the target instruction descriptor of this MachineInstr.
A live range for subregisters.
This represents a simple continuous liveness interval for a value.
void setIsDead(bool Val=true)
SlotIndex endPoint() const
Return the end point of the last live range segment to interact with the instruction, if any.
void markUnused()
Mark this value as unused.
void reserve(size_type N)
SubRange * createSubRangeFrom(BumpPtrAllocator &Allocator, LaneBitmask LaneMask, const LiveRange &CopyFrom)
Like createSubRange() but the new range is filled with a copy of the liveness information in CopyFrom...
VNInfo - Value Number Information.
iterator_range< mop_iterator > operands()
void substVirtReg(unsigned Reg, unsigned SubIdx, const TargetRegisterInfo &)
substVirtReg - Substitute the current register with the virtual subregister Reg:SubReg.
unsigned getNumValNums() const
VNInfo * getVNInfoAt(SlotIndex Idx) const
getVNInfoAt - Return the VNInfo that is live at Idx, or NULL.
Callback methods for LiveRangeEdit owners.
void initializeRegisterCoalescerPass(PassRegistry &)
bool allDefsAreDead() const
Return true if all the defs of this instruction are dead.
This class represents the liveness of a register, stack slot, etc.
static bool isDefInSubRange(LiveInterval &LI, SlotIndex Def)
Check if any of the subranges of LI contain a definition at Def.
A templated base class for SmallPtrSet which provides the typesafe interface that is common across al...
void removeEmptySubRanges()
Removes all subranges without any segments (subranges without segments are not considered valid and s...
AnalysisUsage & addRequired()
#define INITIALIZE_PASS_DEPENDENCY(depName)
bool isCrossClass() const
Return true if DstReg is virtual and NewRC is a smaller register class than DstReg's.
MachineBasicBlock * intervalIsInOneMBB(const LiveInterval &LI) const
intervalIsInOneMBB - If LI is confined to a single basic block, return a pointer to that block...
const TargetSubtargetInfo & getSubtarget() const
getSubtarget - Return the subtarget for which this machine code is being compiled.
MachineFunctionPass - This class adapts the FunctionPass interface to allow convenient creation of pa...
const HexagonInstrInfo * TII
static MachineOperand CreateReg(unsigned Reg, bool isDef, bool isImp=false, bool isKill=false, bool isDead=false, bool isUndef=false, bool isEarlyClobber=false, unsigned SubReg=0, bool isDebug=false, bool isInternalRead=false)
bool isFlipped() const
Return true when getSrcReg is the register being defined by the original copy instruction.
A helper class for register coalescers.
std::pair< bool, bool > readsWritesVirtualRegister(unsigned Reg, SmallVectorImpl< unsigned > *Ops=nullptr) const
Return a pair of bools (reads, writes) indicating if this instruction reads or writes Reg...
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
bool isReg() const
isReg - Tests if this is a MO_Register operand.
iterator_range< subrange_iterator > subranges()
void eraseFromParent()
Unlink 'this' from the containing basic block and delete it.
Result of a LiveRange query.
static cl::opt< bool > EnableJoinSplits("join-splitedges", cl::desc("Coalesce copies on split edges (default=subtarget)"), cl::Hidden)
Temporary flag to test critical edge unsplitting.
const TargetRegisterClass * getRegClass(unsigned Reg) const
Return the register class of the specified virtual register.
Reg
All possible values of the reg field in the ModR/M byte.
iterator_range< reg_instr_nodbg_iterator > reg_nodbg_instructions(unsigned Reg) const
bool isBlock() const
isBlock - Returns true if this is a block boundary slot.
constexpr bool any() const
LLVM_NODISCARD bool empty() const
unsigned getNumOperands() const
Access to explicit operands of the instruction.
bool isUnused() const
Returns true if this value is unused.
SlotIndex getNextNonNullIndex(SlotIndex Index)
Returns the next non-null index, if one exists.
defusechain_iterator - This class provides iterator support for machine operands in the function that...
virtual bool enableJoinGlobalCopies() const
True if the subtarget should enable joining global copies.
bool isDead() const
isDead - Returns true if this is a dead def kill slot.
iterator addSegment(Segment S)
Add the specified Segment to this range, merging segments as appropriate.
MCRegUnitRootIterator enumerates the root registers of a register unit.
VNInfo * MergeValueNumberInto(VNInfo *V1, VNInfo *V2)
MergeValueNumberInto - This method is called when two value numbers are found to be equivalent...
unsigned getMatchingSuperReg(unsigned Reg, unsigned SubIdx, const TargetRegisterClass *RC) const
Return a super-register of the specified register Reg so its sub-register of index SubIdx is Reg...
VNInfo * valueOutOrDead() const
Returns the value alive at the end of the instruction, if any.
bool isCopyLike() const
Return true if the instruction behaves like a copy.
virtual const TargetRegisterClass * getMatchingSuperRegClass(const TargetRegisterClass *A, const TargetRegisterClass *B, unsigned Idx) const
Return a subclass of the specified register class A so that each register in it has a sub-register of...
AnalysisUsage & addPreservedID(const void *ID)
Printable PrintReg(unsigned Reg, const TargetRegisterInfo *TRI=nullptr, unsigned SubRegIdx=0)
Prints virtual and physical registers with or without a TRI instance.
size_t size() const
size - Get the array size.
bool setRegisters(const MachineInstr *)
Set registers to match the copy instruction MI.
Maximum length of the test input libFuzzer tries to guess a good value based on the corpus and reports it always prefer smaller inputs during the corpus shuffle When libFuzzer itself reports a bug this exit code will be used If indicates the maximal total time in seconds to run the fuzzer minimizes the provided crash input Use with etc Experimental Use value profile to guide fuzzing Number of simultaneous worker processes to run the jobs If min(jobs, NumberOfCpuCores()/2)\" is used.") FUZZER_FLAG_INT(reload
static GCRegistry::Add< CoreCLRGC > E("coreclr","CoreCLR-compatible GC")
SlotIndex getPrevSlot() const
Returns the previous slot in the index list.
const MachineBasicBlock * getParent() const
TargetInstrInfo - Interface to description of machine instruction set.
bool isDebugValue() const
bool isImplicitDef() const
INITIALIZE_PASS(HexagonEarlyIfConversion,"hexagon-eif","Hexagon early if conversion", false, false) bool HexagonEarlyIfConversion MachineBasicBlock * SB
const TargetRegisterClass * getCommonSuperRegClass(const TargetRegisterClass *RCA, unsigned SubA, const TargetRegisterClass *RCB, unsigned SubB, unsigned &PreA, unsigned &PreB) const
Find a common super-register class if it exists.
bool isEarlyClobber() const
static const unsigned CommuteAnyOperandIndex
void removeValNo(VNInfo *ValNo)
removeValNo - Remove all the segments defined by the specified value#.
const TargetRegisterClass * getNewRC() const
Return the register class of the coalesced register.
initializer< Ty > init(const Ty &Val)
void array_pod_sort(IteratorTy Start, IteratorTy End)
array_pod_sort - This sorts an array with the specified start and end extent.
LiveQueryResult Query(SlotIndex Idx) const
Query Liveness at Idx.
bool isValid() const
Returns true if this is a valid index.
void dump(const SparseBitVector< ElementSize > &LHS, raw_ostream &out)
SubRange * createSubRange(BumpPtrAllocator &Allocator, LaneBitmask LaneMask)
Creates a new empty subregister live range.
unsigned const MachineRegisterInfo * MRI
MutableArrayRef - Represent a mutable reference to an array (0 or more elements consecutively in memo...
constexpr bool none() const
static int compareMBBPriority(const MBBPriorityInfo *LHS, const MBBPriorityInfo *RHS)
C-style comparator that sorts first based on the loop depth of the basic block (the unsigned)...
void getAnalysisUsage(AnalysisUsage &AU) const override
getAnalysisUsage - Subclasses that override getAnalysisUsage must call this.
Allocate memory in an ever growing pool, as if by bump-pointer.
MachineInstrBuilder & UseMI
static void addSegmentsWithValNo(LiveRange &Dst, VNInfo *DstValNo, const LiveRange &Src, const VNInfo *SrcValNo)
Copy segements with value number SrcValNo from liverange Src to live range and use value number DstV...
LLVM_ATTRIBUTE_ALWAYS_INLINE iterator begin()
int findRegisterDefOperandIdx(unsigned Reg, bool isDead=false, bool Overlap=false, const TargetRegisterInfo *TRI=nullptr) const
Returns the operand index that is a def of the specified register or -1 if it is not found...
const MachineOperand & getOperand(unsigned i) const
unsigned getSubReg(unsigned Reg, unsigned Idx) const
Returns the physical register number of sub-register "Index" for physical register RegNo...
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
bool isRegTiedToUseOperand(unsigned DefOpIdx, unsigned *UseOpIdx=nullptr) const
Given the index of a register def operand, check if the register def is tied to a source operand...
void eliminateDeadDefs(SmallVectorImpl< MachineInstr * > &Dead, ArrayRef< unsigned > RegsBeingSpilled=None, AliasAnalysis *AA=nullptr)
eliminateDeadDefs - Try to delete machine instructions that are now dead (allDefsAreDead returns true...
Represent the analysis usage information of a pass.
Greedy Register Allocator
INITIALIZE_PASS_END(RegBankSelect, DEBUG_TYPE,"Assign register bank of generic virtual registers", false, false) RegBankSelect
static const unsigned End
void substPhysReg(unsigned Reg, const TargetRegisterInfo &)
substPhysReg - Substitute the current register with the physical register Reg, taking any existing Su...
VNInfo * valueDefined() const
Return the value defined by this instruction, if any.
static bool isTerminalReg(unsigned DstReg, const MachineInstr &Copy, const MachineRegisterInfo *MRI)
Check if DstReg is a terminal node.
void verify() const
Walk the range and assert if any invariants fail to hold.
unsigned getSubReg() const
TargetRegisterInfo base class - We assume that the target defines a static array of TargetRegisterDes...
bool liveAt(SlotIndex index) const
bool isPHIDef() const
Returns true if this value is defined by a PHI instruction (or was, PHI instructions may have been el...
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
MachineInstr * getInstructionFromIndex(SlotIndex index) const
Returns the instruction for the given index, or null if the given index has no instruction associated...
static cl::opt< cl::boolOrDefault > EnableGlobalCopies("join-globalcopies", cl::desc("Coalesce copies that span blocks (default=subtarget)"), cl::init(cl::BOU_UNSET), cl::Hidden)
Temporary flag to test global copy optimization.
void setIsKill(bool Val=true)
bool isRegMask() const
isRegMask - Tests if this is a MO_RegisterMask operand.
static cl::opt< bool > EnableJoining("join-liveintervals", cl::desc("Coalesce copies (default=true)"), cl::init(true))
iterator find(SlotIndex Pos)
find - Return an iterator pointing to the first segment that ends after Pos, or end().
VNInfo * valueOut() const
Return the value leaving the instruction, if any.
bool isSafeToMove(AliasAnalysis *AA, bool &SawStore) const
Return true if it is safe to move this instruction.
bool readsRegister(unsigned Reg, const TargetRegisterInfo *TRI=nullptr) const
Return true if the MachineInstr reads the specified register.
int findRegisterUseOperandIdx(unsigned Reg, bool isKill=false, const TargetRegisterInfo *TRI=nullptr) const
Returns the operand index that is a use of the specific register or -1 if it is not found...
Iterator for intrusive lists based on ilist_node.
Printable PrintRegUnit(unsigned Unit, const TargetRegisterInfo *TRI)
Create Printable object to print register units on a raw_ostream.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements...
bool isPhys() const
Return true if DstReg is a physical register.
void substituteRegister(unsigned FromReg, unsigned ToReg, unsigned SubIdx, const TargetRegisterInfo &RegInfo)
Replace all occurrences of FromReg with ToReg:SubIdx, properly composing subreg indices where necessa...
void addOperand(MachineFunction &MF, const MachineOperand &Op)
Add the specified operand to the instruction.
bool erase(PtrType Ptr)
erase - If the set contains the specified pointer, remove it and return true, otherwise return false...
static bool isSameInstr(SlotIndex A, SlotIndex B)
isSameInstr - Return true if A and B refer to the same instruction.
simple register Simple Register false
simple register coalescing
MachineOperand class - Representation of each machine instruction operand.
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small...
MachineBasicBlock * getMBBFromIndex(SlotIndex index) const
Returns the basic block which the given index falls in.
static LaneBitmask getNone()
Promote Memory to Register
LLVM_NODISCARD T pop_back_val()
void setPreservesCFG()
This function should be called by the pass, iff they do not:
LiveInterval & getInterval(unsigned Reg)
bool isCoalescable(const MachineInstr *) const
Return true if MI is a copy instruction that will become an identity copy after coalescing.
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
void clearSubRanges()
Removes all subregister liveness information.
void swap(llvm::BitVector &LHS, llvm::BitVector &RHS)
Implement std::swap in terms of BitVector swap.
SlotIndex getMBBEndIdx(unsigned Num) const
Returns the last index in the given basic block number.
static bool clobbersPhysReg(const uint32_t *RegMask, unsigned PhysReg)
clobbersPhysReg - Returns true if this RegMask clobbers PhysReg.
StringRef getName() const
Return the name of the corresponding LLVM basic block, or "(null)".
void setDebugLoc(DebugLoc dl)
Replace current source information with new such.
unsigned getDstIdx() const
Return the subregister index that DstReg will be coalesced into, or 0.
const DebugLoc & getDebugLoc() const
Returns the debug location id of this MachineInstr.
VNInfo * getValNumInfo(unsigned ValNo)
getValNumInfo - Returns pointer to the specified val#.
bool containsOneValue() const
MachineRegisterInfo - Keep track of information for virtual and physical registers, including vreg register classes, use/def chains for registers, etc.
TargetSubtargetInfo - Generic base class for all target subtargets.
unsigned getSrcIdx() const
Return the subregister index that SrcReg will be coalesced into, or 0.
Representation of each machine instruction.
static bool isPhysicalRegister(unsigned Reg)
Return true if the specified register number is in the physical register namespace.
LLVM_ATTRIBUTE_ALWAYS_INLINE iterator end()
static cl::opt< bool > UseTerminalRule("terminal-rule", cl::desc("Apply the terminal rule"), cl::init(false), cl::Hidden)
MachineRegisterInfo & getRegInfo()
getRegInfo - Return information about the registers currently in use.
VNInfo * createDeadDef(SlotIndex Def, VNInfo::Allocator &VNInfoAllocator)
createDeadDef - Make sure the range has a value defined at Def.
void setReg(unsigned Reg)
Change the register this operand corresponds to.
LLVM_ATTRIBUTE_ALWAYS_INLINE size_type size() const
void setSubReg(unsigned subReg)
const TargetRegisterClass * getCommonSubClass(const TargetRegisterClass *A, const TargetRegisterClass *B, const MVT::SimpleValueType SVT=MVT::SimpleValueType::Any) const
Find the largest common subclass of A and B.
constexpr bool all() const
const TargetMachine & getTarget() const
getTarget - Return the target machine this machine code is compiled with
instr_iterator insert(instr_iterator I, MachineInstr *M)
Insert MI into the instruction list before I, possibly inside a bundle.
bool flip()
Swap SrcReg and DstReg.
SlotIndex getRegSlot(bool EC=false) const
Returns the register use/def slot in the current instruction for a normal or early-clobber def...
unsigned getReg() const
getReg - Returns the register number.
VNInfo * getNextValue(SlotIndex def, VNInfo::Allocator &VNInfoAllocator)
getNextValue - Create a new value number and return it.
bool isCommutable(QueryType Type=IgnoreBundle) const
Return true if this may be a 2- or 3-address instruction (of the form "X = op Y, Z, ..."), which produces the same result if Y and Z are exchanged.
bool isPartial() const
Return true if the original copy instruction did not copy the full register, but was a subreg operati...
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
virtual const TargetInstrInfo * getInstrInfo() const
static LLVM_ATTRIBUTE_UNUSED Printable PrintLaneMask(LaneBitmask LaneMask)
Create Printable object to print LaneBitmasks on a raw_ostream.
simple register Simple Register Coalescing
#define LLVM_FALLTHROUGH
LLVM_FALLTHROUGH - Mark fallthrough cases in switch statements.
unsigned getSrcReg() const
Return the virtual register that will be coalesced away.
unsigned getNumOperands() const
Return the number of declared MachineOperands for this MachineInstruction.
std::underlying_type< E >::type Mask()
Get a bitmask with 1s in all places up to the high-order bit of E's largest value.
This class implements an extremely fast bulk output stream that can only output to a stream...
Primary interface to the complete machine description for the target machine.
bool isRegTiedToDefOperand(unsigned UseOpIdx, unsigned *DefOpIdx=nullptr) const
Return true if the use operand of the specified index is tied to a def operand.
virtual const TargetRegisterInfo * getRegisterInfo() const
getRegisterInfo - If register information is available, return it.
unsigned composeSubRegIndices(unsigned a, unsigned b) const
Return the subregister index you get from composing two subregister indices.
void join(LiveRange &Other, const int *ValNoAssignments, const int *RHSValNoAssignments, SmallVectorImpl< VNInfo * > &NewVNInfo)
join - Join two live ranges (this, and other) together.
iterator FindSegmentContaining(SlotIndex Idx)
Return an iterator to the segment that contains the specified index, or end() if there is none...
bool readsReg() const
readsReg - Returns true if this operand reads the previous value of its register. ...
VNInfo * valueIn() const
Return the value that is live-in to the instruction.
A wrapper pass to provide the legacy pass manager access to a suitably prepared AAResults object...
bool hasSubRanges() const
Returns true if subregister liveness information is available.
SlotIndex - An opaque wrapper around machine indexes.
reg_begin/reg_end - Provide iteration support to walk over all definitions and uses of a register wit...
unsigned pred_size() const
bool contains(unsigned Reg) const
Return true if the specified register is included in this register class.