40 #define DEBUG_TYPE "regalloc"
42 STATISTIC(NumSpilledRanges,
"Number of spilled live ranges");
43 STATISTIC(NumSnippets,
"Number of spilled snippets");
44 STATISTIC(NumSpills,
"Number of spills inserted");
45 STATISTIC(NumSpillsRemoved,
"Number of spills removed");
46 STATISTIC(NumReloads,
"Number of reloads inserted");
47 STATISTIC(NumReloadsRemoved,
"Number of reloads removed");
48 STATISTIC(NumFolded,
"Number of folded stack accesses");
49 STATISTIC(NumFoldedLoads,
"Number of folded loads");
50 STATISTIC(NumRemats,
"Number of rematerialized defs for spilling");
51 STATISTIC(NumOmitReloadSpill,
"Number of omitted spills of reloads");
52 STATISTIC(NumHoists,
"Number of hoisted spills");
55 cl::desc(
"Disable inline spill hoisting"));
58 class InlineSpiller :
public Spiller {
93 bool AllDefsAreReloads;
123 : AllDefsAreReloads(
true), DefByOrigPHI(
false), KillsSource(
false),
124 SpillReg(Reg), SpillVNI(VNI), SpillMBB(nullptr), DefMI(nullptr) {}
127 bool hasDef()
const {
return DefByOrigPHI || DefMI; }
133 SibValueMap SibValues;
138 ~InlineSpiller()
override {}
147 MFI(*mf.getFrameInfo()), MRI(mf.getRegInfo()),
148 TII(*mf.getSubtarget().getInstrInfo()),
149 TRI(*mf.getSubtarget().getRegisterInfo()),
156 void collectRegsToSpill();
158 bool isRegToSpill(
unsigned Reg) {
159 return std::find(RegsToSpill.begin(),
160 RegsToSpill.end(),
Reg) != RegsToSpill.end();
163 bool isSibling(
unsigned Reg);
165 void propagateSiblingValue(SibValueMap::iterator,
VNInfo *VNI =
nullptr);
166 void analyzeSiblingValues();
173 void reMaterializeAll();
176 bool foldMemoryOperand(
ArrayRef<std::pair<MachineInstr*, unsigned> >,
181 void spillAroundUses(
unsigned Reg);
189 void Spiller::anchor() { }
194 return new InlineSpiller(pass, mf, vrm);
226 bool InlineSpiller::isSnippet(
const LiveInterval &SnipLI) {
227 unsigned Reg = Edit->getReg();
236 if (SnipLI.
getNumValNums() > 2 || !LIS.intervalIsInOneMBB(SnipLI))
243 RI = MRI.reg_instr_nodbg_begin(SnipLI.
reg),
244 E = MRI.reg_instr_nodbg_end(); RI != E; ) {
261 if (UseMI && MI != UseMI)
270 void InlineSpiller::collectRegsToSpill() {
271 unsigned Reg = Edit->getReg();
274 RegsToSpill.assign(1, Reg);
275 SnippetCopies.clear();
283 RI = MRI.reg_instr_begin(Reg), E = MRI.reg_instr_end(); RI != E; ) {
286 if (!isSibling(SnipReg))
289 if (!isSnippet(SnipLI))
291 SnippetCopies.insert(MI);
292 if (isRegToSpill(SnipReg))
294 RegsToSpill.push_back(SnipReg);
295 DEBUG(
dbgs() <<
"\talso spill snippet " << SnipLI <<
'\n');
313 bool InlineSpiller::isSibling(
unsigned Reg) {
314 return TargetRegisterInfo::isVirtualRegister(Reg) &&
315 VRM.getOriginal(Reg) == Original;
320 const InlineSpiller::SibValueInfo &SVI) {
321 OS <<
"spill " <<
PrintReg(SVI.SpillReg) <<
':'
322 << SVI.SpillVNI->id <<
'@' << SVI.SpillVNI->def;
324 OS <<
" in BB#" << SVI.SpillMBB->getNumber();
325 if (SVI.AllDefsAreReloads)
326 OS <<
" all-reloads";
327 if (SVI.DefByOrigPHI)
332 for (
unsigned i = 0, e = SVI.Deps.size(); i != e; ++i)
333 OS <<
' ' << SVI.Deps[i]->id <<
'@' << SVI.Deps[i]->def;
336 OS <<
" def: " << *SVI.DefMI;
348 void InlineSpiller::propagateSiblingValue(SibValueMap::iterator SVIIter,
350 SibValueMap::value_type *SVI = &*SVIIter;
356 SVI->second.Deps.push_back(VNI);
360 if (!SVI->second.hasDef())
372 SibValueInfo &SV = SVI->second;
374 SV.SpillMBB = LIS.getMBBFromIndex(SV.SpillVNI->def);
377 << SVI->first->id <<
'@' << SVI->first->def <<
":\t" << SV);
379 assert(SV.hasDef() &&
"Propagating undefined value");
384 unsigned SpillDepth = ~0u;
387 DepE = Deps->
end(); DepI != DepE; ++DepI) {
388 SibValueMap::iterator DepSVI = SibValues.find(*DepI);
389 assert(DepSVI != SibValues.end() &&
"Dependent value not in SibValues");
390 SibValueInfo &DepSV = DepSVI->second;
392 DepSV.SpillMBB = LIS.getMBBFromIndex(DepSV.SpillVNI->def);
394 bool Changed =
false;
397 if (!DepSV.hasDef()) {
399 DepSV.DefMI = SV.DefMI;
400 DepSV.DefByOrigPHI = SV.DefByOrigPHI;
405 if (!SV.AllDefsAreReloads && DepSV.AllDefsAreReloads) {
407 DepSV.AllDefsAreReloads =
false;
411 if (PropSpill && SV.SpillVNI != DepSV.SpillVNI) {
412 if (SV.SpillMBB == DepSV.SpillMBB) {
414 if (DepSV.KillsSource && SV.SpillVNI->def < DepSV.SpillVNI->def) {
432 DepSV.SpillReg = SV.SpillReg;
433 DepSV.SpillVNI = SV.SpillVNI;
434 DepSV.SpillMBB = SV.SpillMBB;
438 if (SpillDepth == ~0u)
439 SpillDepth =
Loops.getLoopDepth(SV.SpillMBB);
451 bool HoistCondition =
452 (MBFI.getBlockFreq(DepSV.SpillMBB) >=
453 (MBFI.getBlockFreq(SV.SpillMBB) * MarginProb)) ||
456 if ((
Loops.getLoopDepth(DepSV.SpillMBB) > SpillDepth) &&
458 (!DepSVI->first->isPHIDef() ||
459 MDT.dominates(SV.SpillMBB, DepSV.SpillMBB))) {
461 DepSV.SpillReg = SV.SpillReg;
462 DepSV.SpillVNI = SV.SpillVNI;
463 DepSV.SpillMBB = SV.SpillMBB;
472 WorkList.
insert(&*DepSVI);
474 DEBUG(
dbgs() <<
" update " << DepSVI->first->id <<
'@'
475 << DepSVI->first->def <<
" to:\t" << DepSV);
477 }
while (!WorkList.
empty());
493 SibValueMap::iterator SVI;
495 std::tie(SVI, Inserted) =
496 SibValues.insert(std::make_pair(UseVNI, SibValueInfo(UseReg, UseVNI)));
499 << UseVNI->
id <<
'@' << UseVNI->
def <<
' ' << SVI->second);
500 return SVI->second.DefMI;
504 << UseVNI->
id <<
'@' << UseVNI->
def <<
'\n');
509 WorkList.
push_back(std::make_pair(UseReg, UseVNI));
520 SVI = SibValues.find(VNI);
521 assert(SVI != SibValues.end() &&
"Missing SibValues entry");
531 SVI->second.DefByOrigPHI =
true;
532 SVI->second.AllDefsAreReloads =
false;
533 propagateSiblingValue(SVI);
564 <<
" phi-defs, and " << NonPHIs.
size()
565 <<
" non-phi/orig defs\n");
569 for (
unsigned i = 0, e = PHIs.
size(); i != e; ++i)
570 SibValues.insert(std::make_pair(PHIs[i], SibValueInfo(Reg, PHIs[i])));
573 for (
unsigned i = 0, e = NonPHIs.
size(); i != e; ++i) {
574 VNInfo *NonPHI = NonPHIs[i];
576 std::tie(SVI, Inserted) =
577 SibValues.
insert(std::make_pair(NonPHI, SibValueInfo(Reg, NonPHI)));
579 SVI->second.Deps.insert(SVI->second.Deps.end(), PHIs.
begin(),
583 WorkList.
push_back(std::make_pair(Reg, NonPHI));
586 propagateSiblingValue(SVI);
594 assert(MI &&
"Missing def");
598 if (isSibling(SrcReg)) {
601 assert(SrcQ.
valueIn() &&
"Copy from non-existing value");
603 SVI->second.KillsSource = SrcQ.
isKill();
606 << SrcVNI->
id <<
'@' << SrcVNI->
def
607 <<
" kill=" <<
unsigned(SVI->second.KillsSource) <<
'\n');
609 std::tie(SVI, Inserted) = SibValues.insert(
610 std::make_pair(SrcVNI, SibValueInfo(SrcReg, SrcVNI)));
613 WorkList.
push_back(std::make_pair(SrcReg, SrcVNI));
614 propagateSiblingValue(SVI, VNI);
621 SVI->second.DefMI =
MI;
626 propagateSiblingValue(SVI);
633 SVI->second.AllDefsAreReloads =
false;
634 propagateSiblingValue(SVI);
635 }
while (!WorkList.
empty());
639 SVI = SibValues.find(UseVNI);
640 assert(SVI != SibValues.end() &&
"Didn't compute requested info");
641 DEBUG(
dbgs() <<
" traced to:\t" << SVI->second);
642 return SVI->second.DefMI;
649 void InlineSpiller::analyzeSiblingValues() {
653 if (Edit->getReg() == Original)
657 for (
unsigned i = 0, e = RegsToSpill.size(); i != e; ++i) {
658 unsigned Reg = RegsToSpill[i];
660 for (LiveInterval::const_vni_iterator
VI = LI.
vni_begin(),
667 DefMI = LIS.getInstructionFromIndex(VNI->
def);
668 assert(DefMI &&
"No defining instruction");
673 assert(OrigVNI &&
"Def outside original live range");
674 if (OrigVNI->
def != VNI->
def)
675 DefMI = traceSiblingValue(Reg, VNI, OrigVNI);
677 if (DefMI && Edit->checkRematerializable(VNI, DefMI, AA)) {
679 << VNI->
def <<
" may remat from " << *DefMI);
688 SlotIndex Idx = LIS.getInstructionIndex(CopyMI);
690 assert(VNI && VNI->
def == Idx.
getRegSlot() &&
"Not defined by copy");
691 SibValueMap::iterator
I = SibValues.find(VNI);
692 if (I == SibValues.end())
695 const SibValueInfo &SVI = I->second;
698 if (!SVI.AllDefsAreReloads && SVI.SpillVNI == VNI)
702 if (!LIS.hasInterval(SVI.SpillReg)) {
718 assert(StackInt &&
"No stack slot assigned yet.");
721 StackInt->MergeValueInAsValue(OrigLI, OrigVNI, StackInt->getValNumInfo(0));
722 DEBUG(
dbgs() <<
"\tmerged orig valno " << OrigVNI->
id <<
": "
723 << *StackInt <<
'\n');
726 if (SVI.AllDefsAreReloads) {
727 DEBUG(
dbgs() <<
"\tno spill needed: " << SVI);
728 ++NumOmitReloadSpill;
733 eliminateRedundantSpills(SibLI, SVI.SpillVNI);
737 if (SVI.SpillVNI->isPHIDef())
740 MachineInstr *DefMI = LIS.getInstructionFromIndex(SVI.SpillVNI->def);
741 assert(DefMI &&
"Defining instruction disappeared");
747 MRI.getRegClass(SVI.SpillReg), &TRI);
749 LIS.InsertMachineInstrInMaps(MII);
750 DEBUG(
dbgs() <<
"\thoisted: " << SVI.SpillVNI->def <<
'\t' << *MII);
760 assert(VNI &&
"Missing value");
762 WorkList.
push_back(std::make_pair(&SLI, VNI));
763 assert(StackInt &&
"No stack slot assigned yet.");
768 unsigned Reg = LI->
reg;
769 DEBUG(
dbgs() <<
"Checking redundant spills for "
770 << VNI->
id <<
'@' << VNI->
def <<
" in " << *LI <<
'\n');
773 if (isRegToSpill(Reg))
777 StackInt->MergeValueInAsValue(*LI, VNI, StackInt->getValNumInfo(0));
778 DEBUG(
dbgs() <<
"Merged to stack int: " << *StackInt <<
'\n');
782 UI = MRI.use_instr_nodbg_begin(Reg), E = MRI.use_instr_nodbg_end();
787 SlotIndex Idx = LIS.getInstructionIndex(MI);
793 if (isSibling(DstReg)) {
796 assert(DstVNI &&
"Missing defined value");
797 assert(DstVNI->
def == Idx.
getRegSlot() &&
"Wrong copy def slot");
798 WorkList.
push_back(std::make_pair(&DstLI, DstVNI));
806 DEBUG(
dbgs() <<
"Redundant spill " << Idx <<
'\t' << *MI);
809 DeadDefs.push_back(MI);
814 }
while (!WorkList.
empty());
826 WorkList.
push_back(std::make_pair(LI, VNI));
829 if (!UsedValues.insert(VNI).second)
835 PE = MBB->
pred_end(); PI != PE; ++PI) {
838 WorkList.
push_back(std::make_pair(LI, PVNI));
845 if (!SnippetCopies.count(MI))
848 assert(isRegToSpill(SnipLI.
reg) &&
"Unexpected register in copy");
850 assert(SnipVNI &&
"Snippet undefined before copy");
851 WorkList.
push_back(std::make_pair(&SnipLI, SnipVNI));
852 }
while (!WorkList.
empty());
856 bool InlineSpiller::reMaterializeFor(
LiveInterval &VirtReg,
861 MIBundleOperands::VirtRegInfo RI =
871 DEBUG(
dbgs() <<
"\tadding <undef> flags: ");
872 for (
unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
881 if (SnippetCopies.count(MI))
886 SibValueMap::const_iterator SibI = SibValues.find(ParentVNI);
887 if (SibI != SibValues.end())
888 RM.OrigMI = SibI->second.DefMI;
889 if (!Edit->canRematerializeAt(
RM, UseIdx,
false)) {
890 markValueUsed(&VirtReg, ParentVNI);
891 DEBUG(
dbgs() <<
"\tcannot remat for " << UseIdx <<
'\t' << *MI);
898 markValueUsed(&VirtReg, ParentVNI);
899 DEBUG(
dbgs() <<
"\tcannot remat tied reg: " << UseIdx <<
'\t' << *MI);
905 if (
RM.OrigMI->canFoldAsLoad() &&
906 foldMemoryOperand(Ops,
RM.OrigMI)) {
907 Edit->markRematerialized(
RM.ParentVNI);
913 unsigned NewVReg = Edit->createFrom(Original);
916 SlotIndex DefIdx = Edit->rematerializeAt(*MI->getParent(),
MI, NewVReg,
RM,
919 DEBUG(
dbgs() <<
"\tremat: " << DefIdx <<
'\t'
920 << *LIS.getInstructionFromIndex(DefIdx));
923 for (
unsigned i = 0, e = Ops.
size(); i != e; ++i) {
930 DEBUG(
dbgs() <<
"\t " << UseIdx <<
'\t' << *MI <<
'\n');
938 void InlineSpiller::reMaterializeAll() {
940 if (!Edit->anyRematerializable(AA))
946 bool anyRemat =
false;
947 for (
unsigned i = 0, e = RegsToSpill.size(); i != e; ++i) {
948 unsigned Reg = RegsToSpill[i];
951 RegI = MRI.reg_bundle_begin(Reg), E = MRI.reg_bundle_end();
959 anyRemat |= reMaterializeFor(LI, MI);
966 for (
unsigned i = 0, e = RegsToSpill.size(); i != e; ++i) {
967 unsigned Reg = RegsToSpill[i];
978 DEBUG(
dbgs() <<
"All defs dead: " << *MI);
979 DeadDefs.push_back(MI);
985 if (DeadDefs.empty())
987 DEBUG(
dbgs() <<
"Remat created " << DeadDefs.size() <<
" dead defs.\n");
988 Edit->eliminateDeadDefs(DeadDefs, RegsToSpill);
991 unsigned ResultPos = 0;
992 for (
unsigned i = 0, e = RegsToSpill.size(); i != e; ++i) {
993 unsigned Reg = RegsToSpill[i];
994 if (!LIS.hasInterval(Reg))
999 Edit->eraseVirtReg(Reg);
1003 RegsToSpill[ResultPos++] =
Reg;
1005 RegsToSpill.erase(RegsToSpill.begin() + ResultPos, RegsToSpill.end());
1006 DEBUG(
dbgs() << RegsToSpill.size() <<
" registers to spill after remat.\n");
1015 bool InlineSpiller::coalesceStackAccess(
MachineInstr *MI,
unsigned Reg) {
1018 bool IsLoad = InstrReg;
1023 if (InstrReg != Reg || FI != StackSlot)
1026 DEBUG(
dbgs() <<
"Coalescing stack access: " << *MI);
1027 LIS.RemoveMachineInstrFromMaps(MI);
1031 ++NumReloadsRemoved;
1041 #if !defined(NDEBUG)
1046 const char *
const header,
1048 char NextLine =
'\n';
1049 char SlotIndent =
'\t';
1051 if (std::next(B) == E) {
1056 dbgs() <<
'\t' << header <<
": " << NextLine;
1070 dbgs() << SlotIndent << Idx <<
'\t' << *
I;
1081 bool InlineSpiller::
1082 foldMemoryOperand(
ArrayRef<std::pair<MachineInstr*, unsigned> > Ops,
1091 bool WasCopy = MI->
isCopy();
1092 unsigned ImpReg = 0;
1101 for (
unsigned i = 0, e = Ops.
size(); i != e; ++i) {
1102 unsigned Idx = Ops[i].second;
1103 assert(MI == Ops[i].first &&
"Instruction conflict during operand folding");
1113 if (LoadMI && MO.
isDef())
1123 LoadMI ?
TII.foldMemoryOperand(MI, FoldOps, LoadMI)
1124 :
TII.foldMemoryOperand(MI, FoldOps, StackSlot);
1132 unsigned Reg = MO->
getReg();
1133 if (!Reg || TargetRegisterInfo::isVirtualRegister(Reg) ||
1134 MRI.isReserved(Reg)) {
1140 MIBundleOperands::PhysRegInfo RI =
1145 assert(MO->
isDead() &&
"Cannot fold physreg def");
1146 SlotIndex Idx = LIS.getInstructionIndex(MI).getRegSlot();
1147 LIS.removePhysRegDefAt(Reg, Idx);
1150 LIS.ReplaceMachineInstrInMaps(MI, FoldMI);
1154 assert(!MIS.empty() &&
"Unexpected empty span of instructions!");
1157 if (&*MII != FoldMI)
1158 LIS.InsertMachineInstrInMaps(&*MII);
1167 if (MO.
getReg() == ImpReg)
1176 else if (Ops.
front().second == 0)
1183 void InlineSpiller::insertReload(
unsigned NewVReg,
1190 MRI.getRegClass(NewVReg), &TRI);
1192 LIS.InsertMachineInstrRangeInMaps(MIS.begin(),
MI);
1200 void InlineSpiller::insertSpill(
unsigned NewVReg,
bool isKill,
1206 MRI.getRegClass(NewVReg), &TRI);
1208 LIS.InsertMachineInstrRangeInMaps(std::next(MI), MIS.end());
1216 void InlineSpiller::spillAroundUses(
unsigned Reg) {
1222 RegI = MRI.reg_bundle_begin(Reg), E = MRI.reg_bundle_end();
1234 DEBUG(
dbgs() <<
"Modifying debug info due to spill:" <<
"\t" << *MI);
1236 assert(cast<DILocalVariable>(Var)->isValidLocationForIntrinsic(DL) &&
1237 "Expected inlined-at fields to agree");
1239 .addFrameIndex(StackSlot)
1247 if (SnippetCopies.count(MI))
1251 if (coalesceStackAccess(MI, Reg))
1256 MIBundleOperands::VirtRegInfo RI =
1263 if (SlotIndex::isSameInstr(Idx, VNI->
def))
1268 if (SibReg && isSibling(SibReg)) {
1270 if (isRegToSpill(SibReg)) {
1271 DEBUG(
dbgs() <<
"Found new snippet copy: " << *MI);
1272 SnippetCopies.insert(MI);
1277 if (hoistSpill(OldLI, MI)) {
1280 DeadDefs.push_back(MI);
1286 eliminateRedundantSpills(SibLI, SibLI.
getVNInfoAt(Idx));
1292 if (foldMemoryOperand(Ops))
1297 unsigned NewVReg = Edit->createFrom(Reg);
1300 insertReload(NewVReg, Idx, MI);
1303 bool hasLiveDef =
false;
1304 for (
unsigned i = 0, e = Ops.
size(); i != e; ++i) {
1308 if (!Ops[i].first->isRegTiedToDefOperand(Ops[i].second))
1315 DEBUG(
dbgs() <<
"\trewrite: " << Idx <<
'\t' << *MI <<
'\n');
1320 insertSpill(NewVReg,
true, MI);
1325 void InlineSpiller::spillAll() {
1327 if (StackSlot == VirtRegMap::NO_STACK_SLOT) {
1328 StackSlot = VRM.assignVirt2StackSlot(Original);
1329 StackInt = &LSS.getOrCreateInterval(StackSlot, MRI.getRegClass(Original));
1330 StackInt->getNextValue(
SlotIndex(), LSS.getVNInfoAllocator());
1332 StackInt = &LSS.getInterval(StackSlot);
1334 if (Original != Edit->getReg())
1335 VRM.assignVirt2StackSlot(Edit->getReg(), StackSlot);
1337 assert(StackInt->getNumValNums() == 1 &&
"Bad stack interval values");
1338 for (
unsigned i = 0, e = RegsToSpill.size(); i != e; ++i)
1339 StackInt->MergeSegmentsInAsValue(LIS.getInterval(RegsToSpill[i]),
1340 StackInt->getValNumInfo(0));
1341 DEBUG(
dbgs() <<
"Merged spilled regs: " << *StackInt <<
'\n');
1344 for (
unsigned i = 0, e = RegsToSpill.size(); i != e; ++i)
1345 spillAroundUses(RegsToSpill[i]);
1348 if (!DeadDefs.empty()) {
1349 DEBUG(
dbgs() <<
"Eliminating " << DeadDefs.size() <<
" dead defs\n");
1350 Edit->eliminateDeadDefs(DeadDefs, RegsToSpill);
1354 for (
unsigned i = 0, e = RegsToSpill.size(); i != e; ++i) {
1356 RI = MRI.reg_instr_begin(RegsToSpill[i]), E = MRI.reg_instr_end();
1359 assert(SnippetCopies.count(MI) &&
"Remaining use wasn't a snippet copy");
1361 LIS.RemoveMachineInstrFromMaps(MI);
1367 for (
unsigned i = 0, e = RegsToSpill.size(); i != e; ++i)
1368 Edit->eraseVirtReg(RegsToSpill[i]);
1374 assert(!TargetRegisterInfo::isStackSlot(edit.
getReg())
1375 &&
"Trying to spill a stack slot.");
1377 Original = VRM.getOriginal(edit.
getReg());
1378 StackSlot = VRM.getStackSlot(Original);
1382 << TRI.getRegClassName(MRI.getRegClass(edit.
getReg()))
1384 <<
"\nFrom original " <<
PrintReg(Original) <<
'\n');
1386 "Attempting to spill already spilled value.");
1387 assert(DeadDefs.empty() &&
"Previous spill didn't remove dead defs");
1389 collectRegsToSpill();
1390 analyzeSiblingValues();
1394 if (!RegsToSpill.empty())
1397 Edit->calculateRegClassAndHint(MF,
Loops, MBFI);
const MachineInstrBuilder & addMetadata(const MDNode *MD) const
void push_back(const T &Elt)
const MachineFunction * getParent() const
getParent - Return the MachineFunction containing this basic block.
SlotIndex def
The index of the defining instruction.
STATISTIC(NumFunctions,"Total number of functions")
SlotIndex getBaseIndex() const
Returns the base index for associated with this index.
bool addRegisterDead(unsigned Reg, const TargetRegisterInfo *RegInfo, bool AddIfNotFound=false)
We have determined MI defined a register without a use.
void storeRegToStackSlot(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, unsigned SrcReg, bool isKill, int FrameIndex, const TargetRegisterClass *RC, const TargetRegisterInfo *TRI) const override
const DIExpression * getDebugExpression() const
Return the complex address expression referenced by this DBG_VALUE instruction.
LiveInterval - This class represents the liveness of a register, or stack slot.
MIBundleOperands - Iterate over all operands in a bundle of machine instructions. ...
bool mayStore(QueryType Type=AnyInBundle) const
Return true if this instruction could possibly modify memory.
bool isSpillable() const
isSpillable - Can this interval be spilled?
void setIsUndef(bool Val=true)
SlotIndex getInstructionIndex(const MachineInstr *instr) const
Returns the base index of the given instruction.
A Stackmap instruction captures the location of live variables at its position in the instruction str...
static unsigned isFullCopyOf(const MachineInstr *MI, unsigned Reg)
isFullCopyOf - If MI is a COPY to or from Reg, return the other register, otherwise return 0...
MachineBlockFrequencyInfo pass uses BlockFrequencyInfoImpl implementation to estimate machine basic b...
bool isKill() const
Return true if the live-in value is killed by this instruction.
void setIsDead(bool Val=true)
VNInfo - Value Number Information.
TinyPtrVector - This class is specialized for cases where there are normally 0 or 1 element in a vect...
unsigned getNumValNums() const
VNInfo * getVNInfoAt(SlotIndex Idx) const
getVNInfoAt - Return the VNInfo that is live at Idx, or NULL.
VirtRegInfo analyzeVirtReg(unsigned Reg, SmallVectorImpl< std::pair< MachineInstr *, unsigned > > *Ops=nullptr)
analyzeVirtReg - Analyze how the current instruction or bundle uses a virtual register.
bool allDefsAreDead() const
Return true if all the defs of this instruction are dead.
MachineInstrSpan provides an interface to get an iteration range containing the instruction it was in...
static void dumpMachineInstrRangeWithSlotIndex(MachineBasicBlock::iterator B, MachineBasicBlock::iterator E, LiveIntervals const &LIS, const char *const header, unsigned VReg=0)
void loadRegFromStackSlot(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, unsigned DestReg, int FrameIndex, const TargetRegisterClass *RC, const TargetRegisterInfo *TRI) const override
unsigned isLoadFromStackSlot(const MachineInstr *MI, int &FrameIndex) const override
isLoadFromStackSlot - If the specified machine instruction is a direct load from a stack slot...
instr_iterator erase(instr_iterator I)
Remove an instruction from the instruction list and delete it.
MachineFunctionPass - This class adapts the FunctionPass interface to allow convenient creation of pa...
const HexagonInstrInfo * TII
T LLVM_ATTRIBUTE_UNUSED_RESULT pop_back_val()
bool isReg() const
isReg - Tests if this is a MO_Register operand.
void eraseFromParent()
Unlink 'this' from the containing basic block and delete it.
Result of a LiveRange query.
Reg
All possible values of the reg field in the ModR/M byte.
The MachineFrameInfo class represents an abstract stack frame until prolog/epilog code is inserted...
PhysRegInfo analyzePhysReg(unsigned Reg, const TargetRegisterInfo *TRI)
analyzePhysReg - Analyze how the current instruction or bundle uses a physical register.
const MachineInstrBuilder & addImm(int64_t Val) const
addImm - Add a new immediate operand.
unsigned getNumOperands() const
Access to explicit operands of the instruction.
bool isUnused() const
Returns true if this value is unused.
defusechain_iterator - This class provides iterator support for machine operands in the function that...
PrintReg - Helper class for printing registers on a raw_ostream.
void RemoveOperand(unsigned i)
Erase an operand from an instruction, leaving it with one fewer operand than it started with...
bool insert(const value_type &X)
Insert a new element into the SetVector.
bool LLVM_ATTRIBUTE_UNUSED_RESULT empty() const
bool isBundled() const
Return true if this instruction part of a bundle.
bool empty() const
Determine if the SetVector is empty or not.
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory)...
std::vector< MachineBasicBlock * >::iterator pred_iterator
T LLVM_ATTRIBUTE_UNUSED_RESULT pop_back_val()
unsigned getOpcode() const
Returns the opcode of this MachineInstr.
const MachineBasicBlock * getParent() const
TargetInstrInfo - Interface to description of machine instruction set.
bool isDebugValue() const
DBG_VALUE - a mapping of the llvm.dbg.value intrinsic.
bool isEarlyClobber() const
bundle_iterator< MachineInstr, instr_iterator > iterator
Patchable call instruction - this instruction represents a call to a constant address, followed by a series of NOPs.
iterator SkipPHIsAndLabels(iterator I)
SkipPHIsAndLabels - Return the first instruction in MBB after I that is not a PHI or a label...
LiveQueryResult Query(SlotIndex Idx) const
Query Liveness at Idx.
* if(!EatIfPresent(lltok::kw_thread_local)) return false
ParseOptionalThreadLocal := /*empty.
void push_back(EltTy NewVal)
bool isIndirectDebugValue() const
A DBG_VALUE is indirect iff the first operand is a register and the second operand is an immediate...
const MachineOperand & getOperand(unsigned i) const
Two Address instruction pass
LiveInterval & getParent() const
MachineInstrBuilder BuildMI(MachineFunction &MF, DebugLoc DL, const MCInstrDesc &MCID)
BuildMI - Builder interface.
unsigned getSubReg() const
TargetRegisterInfo base class - We assume that the target defines a static array of TargetRegisterDes...
pred_iterator pred_begin()
bool isPHIDef() const
Returns true if this value is defined by a PHI instruction (or was, PHI instructions may have been el...
bool containsValue(const VNInfo *VNI) const
containsValue - Returns true if VNI belongs to this range.
void setIsKill(bool Val=true)
static raw_ostream & operator<<(raw_ostream &OS, const InlineSpiller::SibValueInfo &SVI)
unsigned id
The ID number of this value.
A SetVector that performs no allocations if smaller than a certain size.
const DILocalVariable * getDebugVariable() const
Return the debug variable referenced by this DBG_VALUE instruction.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements...
void setDesc(const MCInstrDesc &tid)
Replace the instruction descriptor (thus opcode) of the current instruction with a new one...
MachineOperand class - Representation of each machine instruction operand.
static cl::opt< bool > DisableHoisting("disable-spill-hoist", cl::Hidden, cl::desc("Disable inline spill hoisting"))
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
const DebugLoc & getDebugLoc() const
Returns the debug location id of this MachineInstr.
iterator insert(iterator I, T &&Elt)
KILL - This instruction is a noop that is used only to adjust the liveness of registers.
bool containsOneValue() const
MachineRegisterInfo - Keep track of information for virtual and physical registers, including vreg register classes, use/def chains for registers, etc.
Spiller * createInlineSpiller(MachineFunctionPass &pass, MachineFunction &mf, VirtRegMap &vrm)
Create and return a spiller that will insert spill code directly instead of deferring though VirtRegM...
Representation of each machine instruction.
Call instruction with associated vm state for deoptimization and list of live pointers for relocation...
static unsigned UseReg(const MachineOperand &MO)
unsigned isStoreToStackSlot(const MachineInstr *MI, int &FrameIndex) const override
isStoreToStackSlot - If the specified machine instruction is a direct store to a stack slot...
void setReg(unsigned Reg)
Change the register this operand corresponds to.
Remat - Information needed to rematerialize at a specific location.
SlotIndex getRegSlot(bool EC=false) const
Returns the register use/def slot in the current instruction for a normal or early-clobber def...
unsigned getReg() const
getReg - Returns the register number.
This class implements an extremely fast bulk output stream that can only output to a stream...
bool isRegTiedToDefOperand(unsigned UseOpIdx, unsigned *DefOpIdx=nullptr) const
Return true if the use operand of the specified index is tied to a def operand.
VNInfo * valueIn() const
Return the value that is live-in to the instruction.
SlotIndex - An opaque wrapper around machine indexes.
DominatorTree Class - Concrete subclass of DominatorTreeBase that is used to compute a normal dominat...
VNInfo * getVNInfoBefore(SlotIndex Idx) const
getVNInfoBefore - Return the VNInfo that is live up to but not necessarilly including Idx...