47#define DEBUG_TYPE "thumb2-reduce-size"
48#define THUMB2_SIZE_REDUCE_NAME "Thumb2 instruction size reduce pass"
50STATISTIC(NumNarrows,
"Number of 32-bit instrs reduced to 16-bit ones");
51STATISTIC(Num2Addrs,
"Number of 32-bit instrs reduced to 2addr 16-bit ones");
52STATISTIC(NumLdSts,
"Number of 32-bit load / store reduced to 16-bit ones");
71 unsigned LowRegs1 : 1;
72 unsigned LowRegs2 : 1;
77 unsigned PartFlag : 1;
79 unsigned AvoidMovs: 1;
82 static const ReduceEntry ReduceTable[] = {
84 { ARM::t2ADCrr, 0, ARM::tADC, 0, 0, 0, 1, 0,0, 0,0,0 },
85 { ARM::t2ADDri, ARM::tADDi3, ARM::tADDi8, 3, 8, 1, 1, 0,0, 0,1,0 },
86 { ARM::t2ADDrr, ARM::tADDrr, ARM::tADDhirr, 0, 0, 1, 0, 0,1, 0,0,0 },
87 { ARM::t2ADDSri,ARM::tADDi3, ARM::tADDi8, 3, 8, 1, 1, 2,2, 0,1,0 },
88 { ARM::t2ADDSrr,ARM::tADDrr, 0, 0, 0, 1, 0, 2,0, 0,1,0 },
89 { ARM::t2ANDrr, 0, ARM::tAND, 0, 0, 0, 1, 0,0, 1,0,0 },
90 { ARM::t2ASRri, ARM::tASRri, 0, 5, 0, 1, 0, 0,0, 1,0,1 },
91 { ARM::t2ASRrr, 0, ARM::tASRrr, 0, 0, 0, 1, 0,0, 1,0,1 },
92 { ARM::t2BICrr, 0, ARM::tBIC, 0, 0, 0, 1, 0,0, 1,0,0 },
95 { ARM::t2CMNzrr, ARM::tCMNz, 0, 0, 0, 1, 0, 2,0, 0,0,0 },
96 { ARM::t2CMPri, ARM::tCMPi8, 0, 8, 0, 1, 0, 2,0, 0,0,0 },
97 { ARM::t2CMPrr, ARM::tCMPhir, 0, 0, 0, 0, 0, 2,0, 0,1,0 },
98 { ARM::t2EORrr, 0, ARM::tEOR, 0, 0, 0, 1, 0,0, 1,0,0 },
101 { ARM::t2LSLri, ARM::tLSLri, 0, 5, 0, 1, 0, 0,0, 1,0,1 },
102 { ARM::t2LSLrr, 0, ARM::tLSLrr, 0, 0, 0, 1, 0,0, 1,0,1 },
103 { ARM::t2LSRri, ARM::tLSRri, 0, 5, 0, 1, 0, 0,0, 1,0,1 },
104 { ARM::t2LSRrr, 0, ARM::tLSRrr, 0, 0, 0, 1, 0,0, 1,0,1 },
105 { ARM::t2MOVi, ARM::tMOVi8, 0, 8, 0, 1, 0, 0,0, 1,0,0 },
106 { ARM::t2MOVi16,ARM::tMOVi8, 0, 8, 0, 1, 0, 0,0, 1,1,0 },
108 { ARM::t2MOVr,ARM::tMOVr, 0, 0, 0, 0, 0, 1,0, 0,0,0 },
109 { ARM::t2MUL, 0, ARM::tMUL, 0, 0, 0, 1, 0,0, 1,0,0 },
110 { ARM::t2MVNr, ARM::tMVN, 0, 0, 0, 1, 0, 0,0, 0,0,0 },
111 { ARM::t2ORRrr, 0, ARM::tORR, 0, 0, 0, 1, 0,0, 1,0,0 },
112 { ARM::t2REV, ARM::tREV, 0, 0, 0, 1, 0, 1,0, 0,0,0 },
113 { ARM::t2REV16, ARM::tREV16, 0, 0, 0, 1, 0, 1,0, 0,0,0 },
114 { ARM::t2REVSH, ARM::tREVSH, 0, 0, 0, 1, 0, 1,0, 0,0,0 },
115 { ARM::t2RORrr, 0, ARM::tROR, 0, 0, 0, 1, 0,0, 1,0,0 },
116 { ARM::t2RSBri, ARM::tRSB, 0, 0, 0, 1, 0, 0,0, 0,1,0 },
117 { ARM::t2RSBSri,ARM::tRSB, 0, 0, 0, 1, 0, 2,0, 0,1,0 },
118 { ARM::t2SBCrr, 0, ARM::tSBC, 0, 0, 0, 1, 0,0, 0,0,0 },
119 { ARM::t2SUBri, ARM::tSUBi3, ARM::tSUBi8, 3, 8, 1, 1, 0,0, 0,0,0 },
120 { ARM::t2SUBrr, ARM::tSUBrr, 0, 0, 0, 1, 0, 0,0, 0,0,0 },
121 { ARM::t2SUBSri,ARM::tSUBi3, ARM::tSUBi8, 3, 8, 1, 1, 2,2, 0,0,0 },
122 { ARM::t2SUBSrr,ARM::tSUBrr, 0, 0, 0, 1, 0, 2,0, 0,0,0 },
123 { ARM::t2SXTB, ARM::tSXTB, 0, 0, 0, 1, 0, 1,0, 0,1,0 },
124 { ARM::t2SXTH, ARM::tSXTH, 0, 0, 0, 1, 0, 1,0, 0,1,0 },
125 { ARM::t2TEQrr, ARM::tEOR, 0, 0, 0, 1, 0, 2,0, 0,1,0 },
126 { ARM::t2TSTrr, ARM::tTST, 0, 0, 0, 1, 0, 2,0, 0,0,0 },
127 { ARM::t2UXTB, ARM::tUXTB, 0, 0, 0, 1, 0, 1,0, 0,1,0 },
128 { ARM::t2UXTH, ARM::tUXTH, 0, 0, 0, 1, 0, 1,0, 0,1,0 },
132 { ARM::t2LDRi12,ARM::tLDRi, ARM::tLDRspi, 5, 8, 1, 0, 0,0, 0,1,0 },
133 { ARM::t2LDRs, ARM::tLDRr, 0, 0, 0, 1, 0, 0,0, 0,1,0 },
134 { ARM::t2LDRBi12,ARM::tLDRBi, 0, 5, 0, 1, 0, 0,0, 0,1,0 },
135 { ARM::t2LDRBs, ARM::tLDRBr, 0, 0, 0, 1, 0, 0,0, 0,1,0 },
136 { ARM::t2LDRHi12,ARM::tLDRHi, 0, 5, 0, 1, 0, 0,0, 0,1,0 },
137 { ARM::t2LDRHs, ARM::tLDRHr, 0, 0, 0, 1, 0, 0,0, 0,1,0 },
138 { ARM::t2LDRSBs,ARM::tLDRSB, 0, 0, 0, 1, 0, 0,0, 0,1,0 },
139 { ARM::t2LDRSHs,ARM::tLDRSH, 0, 0, 0, 1, 0, 0,0, 0,1,0 },
140 { ARM::t2LDR_POST,ARM::tLDMIA_UPD,0, 0, 0, 1, 0, 0,0, 0,1,0 },
141 { ARM::t2STRi12,ARM::tSTRi, ARM::tSTRspi, 5, 8, 1, 0, 0,0, 0,1,0 },
142 { ARM::t2STRs, ARM::tSTRr, 0, 0, 0, 1, 0, 0,0, 0,1,0 },
143 { ARM::t2STRBi12,ARM::tSTRBi, 0, 5, 0, 1, 0, 0,0, 0,1,0 },
144 { ARM::t2STRBs, ARM::tSTRBr, 0, 0, 0, 1, 0, 0,0, 0,1,0 },
145 { ARM::t2STRHi12,ARM::tSTRHi, 0, 5, 0, 1, 0, 0,0, 0,1,0 },
146 { ARM::t2STRHs, ARM::tSTRHr, 0, 0, 0, 1, 0, 0,0, 0,1,0 },
147 { ARM::t2STR_POST,ARM::tSTMIA_UPD,0, 0, 0, 1, 0, 0,0, 0,1,0 },
149 { ARM::t2LDMIA, ARM::tLDMIA, 0, 0, 0, 1, 1, 1,1, 0,1,0 },
150 { ARM::t2LDMIA_RET,0, ARM::tPOP_RET, 0, 0, 1, 1, 1,1, 0,1,0 },
151 { ARM::t2LDMIA_UPD,ARM::tLDMIA_UPD,ARM::tPOP,0, 0, 1, 1, 1,1, 0,1,0 },
155 { ARM::t2STMIA, ARM::tSTMIA_UPD, 0, 0, 0, 1, 1, 1,1, 0,1,0 },
156 { ARM::t2STMIA_UPD,ARM::tSTMIA_UPD, 0, 0, 0, 1, 1, 1,1, 0,1,0 },
157 { ARM::t2STMDB_UPD, 0, ARM::tPUSH, 0, 0, 1, 1, 1,1, 0,1,0 }
167 Thumb2SizeReduce(std::function<
bool(
const Function &)> Ftor =
nullptr);
173 MachineFunctionProperties::Property::NoVRegs);
188 bool LiveCPSR,
bool &HasCC,
bool &CCDead);
191 const ReduceEntry &Entry);
194 const ReduceEntry &Entry,
bool LiveCPSR,
bool IsSelfLoop);
199 const ReduceEntry &Entry,
bool LiveCPSR,
205 const ReduceEntry &Entry,
bool LiveCPSR,
210 bool IsSelfLoop,
bool SkipPrologueEpilogue);
222 bool HighLatencyCPSR;
226 bool HighLatencyCPSR =
false;
228 bool Visited =
false;
238 char Thumb2SizeReduce::ID = 0;
247 OptimizeSize = MinimizeSize =
false;
248 for (
unsigned i = 0, e = std::size(ReduceTable); i != e; ++i) {
249 unsigned FromOpc = ReduceTable[i].WideOpc;
250 if (!ReduceOpcodeMap.insert(std::make_pair(FromOpc, i)).second)
261 switch(Def->getOpcode()) {
287Thumb2SizeReduce::canAddPseudoFlagDep(
MachineInstr *
Use,
bool FirstInSelfLoop) {
289 if (MinimizeSize || !STI->avoidCPSRPartialUpdate())
295 return HighLatencyCPSR || FirstInSelfLoop;
299 if (!MO.isReg() || MO.isUndef() || MO.isUse())
302 if (Reg == 0 || Reg == ARM::CPSR)
308 if (!MO.isReg() || MO.isUndef() || MO.isDef())
321 if (
Use->getOpcode() == ARM::t2MOVi ||
322 Use->getOpcode() == ARM::t2MOVi16)
330Thumb2SizeReduce::VerifyPredAndCC(
MachineInstr *
MI,
const ReduceEntry &Entry,
332 bool LiveCPSR,
bool &HasCC,
bool &CCDead) {
333 if ((is2Addr &&
Entry.PredCC2 == 0) ||
334 (!is2Addr &&
Entry.PredCC1 == 0)) {
353 }
else if ((is2Addr &&
Entry.PredCC2 == 2) ||
354 (!is2Addr &&
Entry.PredCC1 == 2)) {
373 unsigned Opc =
MI->getOpcode();
374 bool isPCOk = (Opc == ARM::t2LDMIA_RET || Opc == ARM::t2LDMIA_UPD);
375 bool isLROk = (Opc == ARM::t2STMDB_UPD);
376 bool isSPOk = isPCOk || isLROk;
377 for (
unsigned i = 0, e =
MI->getNumOperands(); i != e; ++i) {
382 if (Reg == 0 || Reg == ARM::CPSR)
384 if (isPCOk && Reg == ARM::PC)
386 if (isLROk && Reg == ARM::LR)
388 if (Reg == ARM::SP) {
391 if (i == 1 && (Opc == ARM::t2LDRi12 || Opc == ARM::t2STRi12))
403 const ReduceEntry &Entry) {
408 bool HasImmOffset =
false;
409 bool HasShift =
false;
410 bool HasOffReg =
true;
411 bool isLdStMul =
false;
412 unsigned Opc =
Entry.NarrowOpc1;
414 uint8_t ImmLimit =
Entry.Imm1Limit;
416 switch (
Entry.WideOpc) {
421 if (
MI->getOperand(1).getReg() == ARM::SP) {
422 Opc =
Entry.NarrowOpc2;
423 ImmLimit =
Entry.Imm2Limit;
452 case ARM::t2LDR_POST:
453 case ARM::t2STR_POST: {
457 if (!
MI->hasOneMemOperand() ||
458 (*
MI->memoperands_begin())->getAlign() <
Align(4))
464 bool IsStore =
Entry.WideOpc == ARM::t2STR_POST;
465 Register Rt =
MI->getOperand(IsStore ? 1 : 0).getReg();
466 Register Rn =
MI->getOperand(IsStore ? 0 : 1).getReg();
467 unsigned Offset =
MI->getOperand(3).getImm();
468 unsigned PredImm =
MI->getOperand(4).getImm();
469 Register PredReg =
MI->getOperand(5).getReg();
492 MI->eraseFromBundle();
497 Register BaseReg =
MI->getOperand(0).getReg();
504 if (MO.getReg() == BaseReg) {
521 if (!
MI->getOperand(0).isKill())
527 Register BaseReg =
MI->getOperand(0).getReg();
529 if (MO.getReg() == BaseReg)
534 case ARM::t2LDMIA_RET: {
535 Register BaseReg =
MI->getOperand(1).getReg();
536 if (BaseReg != ARM::SP)
538 Opc =
Entry.NarrowOpc2;
543 case ARM::t2LDMIA_UPD:
544 case ARM::t2STMIA_UPD:
545 case ARM::t2STMDB_UPD: {
548 Register BaseReg =
MI->getOperand(1).getReg();
549 if (BaseReg == ARM::SP &&
550 (
Entry.WideOpc == ARM::t2LDMIA_UPD ||
551 Entry.WideOpc == ARM::t2STMDB_UPD)) {
552 Opc =
Entry.NarrowOpc2;
555 (
Entry.WideOpc != ARM::t2LDMIA_UPD &&
556 Entry.WideOpc != ARM::t2STMIA_UPD)) {
565 unsigned OffsetReg = 0;
566 bool OffsetKill =
false;
567 bool OffsetInternal =
false;
569 OffsetReg =
MI->getOperand(2).getReg();
570 OffsetKill =
MI->getOperand(2).isKill();
571 OffsetInternal =
MI->getOperand(2).isInternalRead();
573 if (
MI->getOperand(3).getImm())
578 unsigned OffsetImm = 0;
580 OffsetImm =
MI->getOperand(2).getImm();
581 unsigned MaxOffset = ((1 << ImmLimit) - 1) * Scale;
583 if ((OffsetImm & (Scale - 1)) || OffsetImm > MaxOffset)
594 if (
Entry.WideOpc == ARM::t2STMIA)
598 MIB.
add(
MI->getOperand(0));
599 MIB.
add(
MI->getOperand(1));
602 MIB.
addImm(OffsetImm / Scale);
604 assert((!HasShift || OffsetReg) &&
"Invalid so_reg load / store address!");
622 <<
" to 16-bit: " << *MIB);
631 const ReduceEntry &Entry,
632 bool LiveCPSR,
bool IsSelfLoop) {
633 unsigned Opc =
MI->getOpcode();
634 if (Opc == ARM::t2ADDri) {
637 if (
MI->getOperand(1).getReg() != ARM::SP) {
638 if (ReduceTo2Addr(
MBB,
MI, Entry, LiveCPSR, IsSelfLoop))
640 return ReduceToNarrow(
MBB,
MI, Entry, LiveCPSR, IsSelfLoop);
643 unsigned Imm =
MI->getOperand(2).getImm();
647 if (Imm & 3 || Imm > 1020)
660 TII->get(ARM::tADDrSPi))
661 .
add(
MI->getOperand(0))
662 .
add(
MI->getOperand(1))
670 <<
" to 16-bit: " << *MIB);
680 if (
MI->mayLoadOrStore())
681 return ReduceLoadStore(
MBB,
MI, Entry);
686 case ARM::t2ADDSrr: {
692 if (ReduceTo2Addr(
MBB,
MI, Entry, LiveCPSR, IsSelfLoop))
696 return ReduceToNarrow(
MBB,
MI, Entry, LiveCPSR, IsSelfLoop);
707 if (
MI->getOperand(2).getImm() == 0)
708 return ReduceToNarrow(
MBB,
MI, Entry, LiveCPSR, IsSelfLoop);
713 if (
MI->getOperand(1).isImm())
714 return ReduceToNarrow(
MBB,
MI, Entry, LiveCPSR, IsSelfLoop);
722 static const ReduceEntry NarrowEntry =
723 { ARM::t2CMPrr,ARM::tCMPr, 0, 0, 0, 1, 1,2, 0, 0,1,0 };
724 if (ReduceToNarrow(
MBB,
MI, NarrowEntry, LiveCPSR, IsSelfLoop))
726 return ReduceToNarrow(
MBB,
MI, Entry, LiveCPSR, IsSelfLoop);
735 if (
MI->getOperand(0).isKill())
736 return ReduceToNarrow(
MBB,
MI, Entry, LiveCPSR, IsSelfLoop);
744 const ReduceEntry &Entry,
745 bool LiveCPSR,
bool IsSelfLoop) {
749 if (!OptimizeSize &&
Entry.AvoidMovs && STI->avoidMOVsShifterOperand())
757 if (
MI->getOpcode() == ARM::t2MUL) {
773 }
else if (Reg0 != Reg1) {
775 unsigned CommOpIdx1 = 1;
777 if (!
TII->findCommutedOpIndices(*
MI, CommOpIdx1, CommOpIdx2) ||
778 MI->getOperand(CommOpIdx2).getReg() != Reg0)
781 TII->commuteInstruction(*
MI,
false, CommOpIdx1, CommOpIdx2);
787 if (
Entry.Imm2Limit) {
788 unsigned Imm =
MI->getOperand(2).getImm();
789 unsigned Limit = (1 <<
Entry.Imm2Limit) - 1;
802 bool SkipPred =
false;
816 HasCC = (
MI->getOperand(NumOps-1).getReg() == ARM::CPSR);
817 if (HasCC &&
MI->getOperand(NumOps-1).isDead())
820 if (!VerifyPredAndCC(
MI, Entry,
true, Pred, LiveCPSR, HasCC, CCDead))
826 canAddPseudoFlagDep(
MI, IsSelfLoop))
832 MIB.
add(
MI->getOperand(0));
838 for (
unsigned i = 1, e =
MI->getNumOperands(); i != e; ++i) {
839 if (i < NumOps && MCID.
operands()[i].isOptionalDef())
841 if (SkipPred && MCID.
operands()[i].isPredicate())
843 MIB.
add(
MI->getOperand(i));
850 <<
" to 16-bit: " << *MIB);
859 const ReduceEntry &Entry,
860 bool LiveCPSR,
bool IsSelfLoop) {
864 if (!OptimizeSize &&
Entry.AvoidMovs && STI->avoidMOVsShifterOperand())
869 unsigned Limit = ~0
U;
871 Limit = (1 <<
Entry.Imm1Limit) - 1;
875 if (MCID.
operands()[i].isPredicate())
880 if (!Reg || Reg == ARM::CPSR)
884 }
else if (MO.
isImm() && !MCID.
operands()[i].isPredicate()) {
885 if (((
unsigned)MO.
getImm()) > Limit)
894 bool SkipPred =
false;
907 HasCC = (
MI->getOperand(NumOps-1).getReg() == ARM::CPSR);
908 if (HasCC &&
MI->getOperand(NumOps-1).isDead())
911 if (!VerifyPredAndCC(
MI, Entry,
false, Pred, LiveCPSR, HasCC, CCDead))
917 canAddPseudoFlagDep(
MI, IsSelfLoop))
928 MIB.
add(
MI->getOperand(0));
935 MIB.
add(
MI->getOperand(0));
937 MIB.
add(
MI->getOperand(0));
944 for (
unsigned i = 1, e =
MI->getNumOperands(); i != e; ++i) {
945 if (i < NumOps && MCID.
operands()[i].isOptionalDef())
947 if ((MCID.
getOpcode() == ARM::t2RSBSri ||
952 MCID.
getOpcode() == ARM::t2UXTH) && i == 2)
955 bool isPred = (i < NumOps && MCID.
operands()[i].isPredicate());
956 if (SkipPred && isPred)
972 <<
" to 16-bit: " << *MIB);
984 if (MO.
getReg() != ARM::CPSR)
992 return HasDef || LiveCPSR;
999 if (MO.
getReg() != ARM::CPSR)
1001 assert(LiveCPSR &&
"CPSR liveness tracking is wrong!");
1012 bool LiveCPSR,
bool IsSelfLoop,
1013 bool SkipPrologueEpilogue) {
1014 unsigned Opcode =
MI->getOpcode();
1016 if (OPI == ReduceOpcodeMap.end())
1021 const ReduceEntry &
Entry = ReduceTable[OPI->second];
1025 return ReduceSpecial(
MBB,
MI, Entry, LiveCPSR, IsSelfLoop);
1028 if (
Entry.NarrowOpc2 &&
1029 ReduceTo2Addr(
MBB,
MI, Entry, LiveCPSR, IsSelfLoop))
1033 if (
Entry.NarrowOpc1 &&
1034 ReduceToNarrow(
MBB,
MI, Entry, LiveCPSR, IsSelfLoop))
1041 bool SkipPrologueEpilogue) {
1049 HighLatencyCPSR =
false;
1053 const MBBInfo &PInfo = BlockInfo[Pred->getNumber()];
1054 if (!PInfo.Visited) {
1058 if (PInfo.HighLatencyCPSR) {
1059 HighLatencyCPSR =
true;
1069 for (; MII != E; MII = NextMII) {
1070 NextMII = std::next(MII);
1073 if (
MI->isBundle()) {
1077 if (
MI->isDebugInstr())
1083 bool NextInSameBundle = NextMII != E && NextMII->isBundledWithPred();
1085 if (ReduceMI(
MBB,
MI, LiveCPSR, IsSelfLoop, SkipPrologueEpilogue)) {
1091 if (NextInSameBundle && !NextMII->isBundledWithPred())
1092 NextMII->bundleWithPred();
1095 if (BundleMI && !NextInSameBundle &&
MI->isInsideBundle()) {
1111 bool DefCPSR =
false;
1116 HighLatencyCPSR =
false;
1118 }
else if (DefCPSR) {
1127 Info.HighLatencyCPSR = HighLatencyCPSR;
1128 Info.Visited =
true;
1133 if (PredicateFtor && !PredicateFtor(MF.
getFunction()))
1163 std::function<
bool(
const Function &)> Ftor) {
1164 return new Thumb2SizeReduce(std::move(Ftor));
Analysis containing CSE Info
Performs the initial survey of the specified function
This file defines the DenseMap class.
const HexagonInstrInfo * TII
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
This file builds on the ADT/GraphTraits.h file to build a generic graph post order iterator.
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file defines the SmallSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
static cl::opt< int > ReduceLimit("t2-reduce-limit", cl::init(-1), cl::Hidden)
static cl::opt< int > ReduceLimitLdSt("t2-reduce-limit3", cl::init(-1), cl::Hidden)
static cl::opt< int > ReduceLimit2Addr("t2-reduce-limit2", cl::init(-1), cl::Hidden)
static bool HasImplicitCPSRDef(const MCInstrDesc &MCID)
static bool isHighLatencyCPSR(MachineInstr *Def)
static bool UpdateCPSRUse(MachineInstr &MI, bool LiveCPSR)
static bool VerifyLowRegs(MachineInstr *MI)
#define THUMB2_SIZE_REDUCE_NAME
static bool UpdateCPSRDef(MachineInstr &MI, bool LiveCPSR, bool &DefCPSR)
const ARMBaseInstrInfo * getInstrInfo() const override
bool isThumb1Only() const
iterator find(const_arg_type_t< KeyT > Val)
FunctionPass class - This class is used to implement most global optimizations.
bool hasOptSize() const
Optimize this function for size (-Os) or minimum size (-Oz).
bool needsUnwindTableEntry() const
True if this function needs an unwind table.
bool usesWindowsCFI() const
Describe properties that are true of each instruction in the target description file.
unsigned getNumOperands() const
Return the number of declared MachineOperands for this MachineInstruction.
ArrayRef< MCOperandInfo > operands() const
bool hasOptionalDef() const
Set if this instruction has an optional definition, e.g.
ArrayRef< MCPhysReg > implicit_defs() const
Return a list of registers that are potentially written by any instance of this machine instruction.
bool isPredicable() const
Return true if this instruction has a predicate operand that controls execution.
unsigned getOpcode() const
Return the opcode number for this descriptor.
instr_iterator instr_begin()
instr_iterator erase_instr(MachineInstr *I)
Remove an instruction from the instruction list and delete it.
int getNumber() const
MachineBasicBlocks are uniquely numbered at the function level, unless they're not in a MachineFuncti...
bool isLiveIn(MCPhysReg Reg, LaneBitmask LaneMask=LaneBitmask::getAll()) const
Return true if the specified register is in the live in set.
Instructions::iterator instr_iterator
instr_iterator instr_end()
bool isSuccessor(const MachineBasicBlock *MBB) const
Return true if the specified MBB is a successor of this block.
iterator_range< pred_iterator > predecessors()
MachineFunctionPass - This class adapts the FunctionPass interface to allow convenient creation of pa...
virtual bool runOnMachineFunction(MachineFunction &MF)=0
runOnMachineFunction - This method must be overloaded to perform the desired machine code transformat...
virtual MachineFunctionProperties getRequiredProperties() const
Properties which a MachineFunction may have at a given point in time.
MachineFunctionProperties & set(Property P)
const TargetSubtargetInfo & getSubtarget() const
getSubtarget - Return the subtarget for which this machine code is being compiled.
Function & getFunction()
Return the LLVM function that this machine code represents.
const LLVMTargetMachine & getTarget() const
getTarget - Return the target machine this machine code is compiled with
unsigned getNumBlockIDs() const
getNumBlockIDs - Return the number of MBB ID's allocated.
const MachineInstrBuilder & setMemRefs(ArrayRef< MachineMemOperand * > MMOs) const
const MachineInstrBuilder & addImm(int64_t Val) const
Add a new immediate operand.
const MachineInstrBuilder & add(const MachineOperand &MO) const
const MachineInstrBuilder & addReg(Register RegNo, unsigned flags=0, unsigned SubReg=0) const
Add a new virtual register operand.
const MachineInstrBuilder & setMIFlags(unsigned Flags) const
Representation of each machine instruction.
bool killsRegister(Register Reg, const TargetRegisterInfo *TRI) const
Return true if the MachineInstr kills the specified register.
MachineOperand * findRegisterUseOperand(Register Reg, const TargetRegisterInfo *TRI, bool isKill=false)
Wrapper for findRegisterUseOperandIdx, it returns a pointer to the MachineOperand rather than an inde...
const MachineOperand & getOperand(unsigned i) const
MachineOperand * findRegisterDefOperand(Register Reg, const TargetRegisterInfo *TRI, bool isDead=false, bool Overlap=false)
Wrapper for findRegisterDefOperandIdx, it returns a pointer to the MachineOperand rather than an inde...
MachineOperand class - Representation of each machine instruction operand.
bool isReg() const
isReg - Tests if this is a MO_Register operand.
void setIsDead(bool Val=true)
bool isImm() const
isImm - Tests if this is a MO_Immediate operand.
void setIsKill(bool Val=true)
Register getReg() const
getReg - Returns the register number.
void setIsDef(bool Val=true)
Change a def to a use, or a use to a def.
virtual StringRef getPassName() const
getPassName - Return a nice clean name for a pass.
Wrapper class representing virtual and physical registers.
SmallSet - This maintains a set of unique values, optimizing for the case when the set is small (less...
size_type count(const T &V) const
count - Return 1 if the element is in the set, 0 otherwise.
std::pair< const_iterator, bool > insert(const T &V)
insert - Insert an element into the set if it isn't already there.
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
StringRef - Represent a constant reference to a string, i.e.
static const unsigned CommuteAnyOperandIndex
const MCAsmInfo * getMCAsmInfo() const
Return target specific asm information.
A Use represents the edge between a Value definition and its users.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
@ Define
Register definition.
Reg
All possible values of the reg field in the ModR/M byte.
initializer< Ty > init(const Ty &Val)
This is an optimization pass for GlobalISel generic memory operations.
auto drop_begin(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the first N elements excluded.
MachineInstrBuilder BuildMI(MachineFunction &MF, const MIMetadata &MIMD, const MCInstrDesc &MCID)
Builder interface. Specify how to create the initial instruction itself.
static std::array< MachineOperand, 2 > predOps(ARMCC::CondCodes Pred, unsigned PredReg=0)
Get the operands corresponding to the given Pred value.
static bool isARMLowRegister(unsigned Reg)
isARMLowRegister - Returns true if the register is a low register (r0-r7).
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
unsigned getInternalReadRegState(bool B)
unsigned getKillRegState(bool B)
ARMCC::CondCodes getInstrPredicate(const MachineInstr &MI, Register &PredReg)
getInstrPredicate - If instruction is predicated, returns its predicate condition,...
OutputIt move(R &&Range, OutputIt Out)
Provide wrappers to std::move which take ranges instead of having to pass begin/end explicitly.
static MachineOperand t1CondCodeOp(bool isDead=false)
Get the operand corresponding to the conditional code result for Thumb1.
bool is_contained(R &&Range, const E &Element)
Returns true if Element is found in Range.
static MachineOperand condCodeOp(unsigned CCReg=0)
Get the operand corresponding to the conditional code result.
FunctionPass * createThumb2SizeReductionPass(std::function< bool(const Function &)> Ftor=nullptr)
createThumb2SizeReductionPass - Returns an instance of the Thumb2 size reduction pass.
Implement std::hash so that hash_code can be used in STL containers.
This struct is a compact representation of a valid (non-zero power of two) alignment.