40#include "llvm/Config/llvm-config.h"
61#define DEBUG_TYPE "arm-cp-islands"
63#define ARM_CP_ISLANDS_OPT_NAME \
64 "ARM constant island placement and branch shortening pass"
66STATISTIC(NumSplit,
"Number of uncond branches inserted");
67STATISTIC(NumCBrFixed,
"Number of cond branches fixed");
68STATISTIC(NumUBrFixed,
"Number of uncond branches fixed");
69STATISTIC(NumTBs,
"Number of table branches generated");
70STATISTIC(NumT2CPShrunk,
"Number of Thumb2 constantpool instructions shrunk");
71STATISTIC(NumT2BrShrunk,
"Number of Thumb2 immediate branches shrunk");
73STATISTIC(NumJTMoved,
"Number of jump table destination blocks moved");
74STATISTIC(NumJTInserted,
"Number of jump table intermediate blocks inserted");
75STATISTIC(NumLEInserted,
"Number of LE backwards branches inserted");
79 cl::desc(
"Adjust basic block layout to better use TB[BH]"));
83 cl::desc(
"The max number of iteration for converge"));
87 cl::desc(
"Use compressed jump tables in Thumb-1 by synthesizing an "
88 "equivalent to the TBB/TBH instructions"));
104 std::unique_ptr<ARMBasicBlockUtils> BBUtils =
nullptr;
109 std::vector<MachineBasicBlock*> WaterList;
115 using water_iterator = std::vector<MachineBasicBlock *>::iterator;
136 bool KnownAlignment =
false;
139 bool neg,
bool soimm)
140 :
MI(mi), CPEMI(cpemi), MaxDisp(maxdisp), NegOk(
neg), IsSoImm(soimm) {
147 unsigned getMaxDisp()
const {
148 return (KnownAlignment ? MaxDisp : MaxDisp - 2) - 2;
154 std::vector<CPUser> CPUsers;
165 : CPEMI(cpemi), CPI(cpi), RefCount(
rc) {}
177 std::vector<std::vector<CPEntry>> CPEntries;
196 unsigned MaxDisp : 31;
200 ImmBranch(
MachineInstr *mi,
unsigned maxdisp,
bool cond,
unsigned ubr)
201 :
MI(mi), MaxDisp(maxdisp), isCond(cond), UncondBr(ubr) {}
205 std::vector<ImmBranch> ImmBranches;
222 bool isPositionIndependentOrROPI;
238 MachineFunctionProperties::Property::NoVRegs);
246 void doInitialConstPlacement(std::vector<MachineInstr *> &CPEMIs);
247 void doInitialJumpTablePlacement(std::vector<MachineInstr *> &CPEMIs);
249 CPEntry *findConstPoolEntry(
unsigned CPI,
const MachineInstr *CPEMI);
251 void scanFunctionJumpTables();
252 void initializeFunctionInfo(
const std::vector<MachineInstr*> &CPEMIs);
255 bool decrementCPEReferenceCount(
unsigned CPI,
MachineInstr* CPEMI);
257 int findInRangeCPEntry(CPUser& U,
unsigned UserOffset);
258 bool findAvailableWater(CPUser&U,
unsigned UserOffset,
259 water_iterator &WaterIter,
bool CloserWater);
260 void createNewWater(
unsigned CPUserIndex,
unsigned UserOffset,
262 bool handleConstantPoolUser(
unsigned CPUserIndex,
bool CloserWater);
264 bool removeUnusedCPEntries();
267 bool DoDump =
false);
269 CPUser &U,
unsigned &Growth);
270 bool fixupImmediateBr(ImmBranch &Br);
271 bool fixupConditionalBr(ImmBranch &Br);
272 bool fixupUnconditionalBr(ImmBranch &Br);
273 bool optimizeThumb2Instructions();
274 bool optimizeThumb2Branches();
275 bool reorderThumb2JumpTables();
277 unsigned &DeadSize,
bool &CanDeleteLEA,
279 bool optimizeThumb2JumpTables();
284 unsigned getUserOffset(CPUser&)
const;
288 bool isOffsetInRange(
unsigned UserOffset,
unsigned TrialOffset,
289 unsigned Disp,
bool NegativeOK,
bool IsSoImm =
false);
290 bool isOffsetInRange(
unsigned UserOffset,
unsigned TrialOffset,
292 return isOffsetInRange(UserOffset, TrialOffset,
293 U.getMaxDisp(),
U.NegOk,
U.IsSoImm);
299char ARMConstantIslands::ID = 0;
302void ARMConstantIslands::verify() {
307 return BBInfo[
LHS.getNumber()].postOffset() <
308 BBInfo[
RHS.getNumber()].postOffset();
310 LLVM_DEBUG(
dbgs() <<
"Verifying " << CPUsers.size() <<
" CP users.\n");
311 for (CPUser &U : CPUsers) {
312 unsigned UserOffset = getUserOffset(U);
315 if (isCPEntryInRange(
U.MI, UserOffset,
U.CPEMI,
U.getMaxDisp()+2,
U.NegOk,
328#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
333 for (
unsigned J = 0,
E = BBInfo.
size(); J !=
E; ++J) {
334 const BasicBlockInfo &BBI = BBInfo[J];
335 dbgs() << format(
"%08x %bb.%u\t", BBI.Offset, J)
336 <<
" kb=" << unsigned(BBI.KnownBits)
337 <<
" ua=" << unsigned(BBI.Unalign) <<
" pa=" << Log2(BBI.PostAlign)
338 << format(
" size=%#x\n", BBInfo[J].Size);
353 const Align Alignment = TLI->getPrefLoopAlignment();
357 bool Changed =
false;
358 bool PrevCanFallthough =
true;
359 for (
auto &
MBB : *MF) {
360 if (!PrevCanFallthough) {
371 if (
MI.getOpcode() == ARM::t2B &&
375 MI.getOpcode() == ARM::t2LoopEndDec) {
376 PrevCanFallthough =
true;
394 << MCP->getConstants().size() <<
" CP entries, aligned to "
395 << MCP->getConstantPoolAlign().value() <<
" bytes *****\n");
398 TII = STI->getInstrInfo();
399 isPositionIndependentOrROPI =
400 STI->getTargetLowering()->isPositionIndependent() || STI->isROPI();
402 DT = &getAnalysis<MachineDominatorTree>();
404 isThumb = AFI->isThumbFunction();
405 isThumb1 = AFI->isThumb1OnlyFunction();
406 isThumb2 = AFI->isThumb2Function();
411 if (STI->hardenSlsRetBr())
416 MF->RenumberBlocks();
420 bool MadeChange =
false;
422 scanFunctionJumpTables();
423 MadeChange |= reorderThumb2JumpTables();
425 T2JumpTables.clear();
427 MF->RenumberBlocks();
435 std::vector<MachineInstr*> CPEMIs;
437 doInitialConstPlacement(CPEMIs);
439 if (MF->getJumpTableInfo())
440 doInitialJumpTablePlacement(CPEMIs);
443 AFI->initPICLabelUId(CPEMIs.size());
448 initializeFunctionInfo(CPEMIs);
454 if (!T2JumpTables.empty())
455 MF->ensureAlignment(
Align(4));
458 MadeChange |= removeUnusedCPEntries();
462 unsigned NoCPIters = 0, NoBRIters = 0;
464 LLVM_DEBUG(
dbgs() <<
"Beginning CP iteration #" << NoCPIters <<
'\n');
465 bool CPChange =
false;
466 for (
unsigned i = 0, e = CPUsers.size(); i != e; ++i)
470 CPChange |= handleConstantPoolUser(i, NoCPIters >=
CPMaxIteration / 2);
477 NewWaterList.
clear();
479 LLVM_DEBUG(
dbgs() <<
"Beginning BR iteration #" << NoBRIters <<
'\n');
480 bool BRChange =
false;
481 for (
unsigned i = 0, e = ImmBranches.size(); i != e; ++i)
482 BRChange |= fixupImmediateBr(ImmBranches[i]);
483 if (BRChange && ++NoBRIters > 30)
487 if (!CPChange && !BRChange)
493 if (isThumb2 && !STI->prefers32BitThumb())
494 MadeChange |= optimizeThumb2Instructions();
497 if (
isThumb && STI->hasV8MBaselineOps())
498 MadeChange |= optimizeThumb2Branches();
501 if (GenerateTBB && !STI->genExecuteOnly())
502 MadeChange |= optimizeThumb2JumpTables();
508 for (
unsigned i = 0, e = CPEntries.size(); i != e; ++i) {
509 for (
unsigned j = 0, je = CPEntries[i].
size();
j != je; ++
j) {
510 const CPEntry & CPE = CPEntries[i][
j];
511 if (CPE.CPEMI && CPE.CPEMI->getOperand(1).isCPI())
512 AFI->recordCPEClone(i, CPE.CPI);
522 JumpTableEntryIndices.clear();
523 JumpTableUserIndices.clear();
524 BlockJumpTableRefCount.clear();
527 T2JumpTables.clear();
535ARMConstantIslands::doInitialConstPlacement(std::vector<MachineInstr*> &CPEMIs) {
541 const Align MaxAlign = MCP->getConstantPoolAlign();
542 const unsigned MaxLogAlign =
Log2(MaxAlign);
550 Align FuncAlign = MaxAlign;
552 FuncAlign =
Align(4);
553 MF->ensureAlignment(FuncAlign);
564 const std::vector<MachineConstantPoolEntry> &CPs = MCP->getConstants();
567 for (
unsigned i = 0, e = CPs.size(); i != e; ++i) {
568 unsigned Size = CPs[i].getSizeInBytes(TD);
569 Align Alignment = CPs[i].getAlign();
575 unsigned LogAlign =
Log2(Alignment);
580 CPEMIs.push_back(CPEMI);
584 for (
unsigned a = LogAlign + 1; a <= MaxLogAlign; ++a)
585 if (InsPoint[a] == InsAt)
589 CPEntries.emplace_back(1, CPEntry(CPEMI, i));
591 LLVM_DEBUG(
dbgs() <<
"Moved CPI#" << i <<
" to end of function, size = "
592 <<
Size <<
", align = " << Alignment.
value() <<
'\n');
602void ARMConstantIslands::doInitialJumpTablePlacement(
603 std::vector<MachineInstr *> &CPEMIs) {
604 unsigned i = CPEntries.size();
605 auto MJTI = MF->getJumpTableInfo();
606 const std::vector<MachineJumpTableEntry> &
JT = MJTI->getJumpTables();
621 switch (
MI->getOpcode()) {
627 case ARM::BR_JTm_i12:
633 "Branch protection must not be enabled for Arm or Thumb1 modes");
634 JTOpcode = ARM::JUMPTABLE_ADDRS;
637 JTOpcode = ARM::JUMPTABLE_INSTS;
641 JTOpcode = ARM::JUMPTABLE_TBB;
645 JTOpcode = ARM::JUMPTABLE_TBH;
649 unsigned NumOps =
MI->getDesc().getNumOperands();
651 MI->getOperand(NumOps - (
MI->isPredicable() ? 2 : 1));
661 CPEMIs.push_back(CPEMI);
662 CPEntries.emplace_back(1, CPEntry(CPEMI, JTI));
663 JumpTableEntryIndices.insert(std::make_pair(JTI, CPEntries.size() - 1));
664 if (!LastCorrectlyNumberedBB)
665 LastCorrectlyNumberedBB = &
MBB;
669 if (LastCorrectlyNumberedBB)
670 MF->RenumberBlocks(LastCorrectlyNumberedBB);
691 return TooDifficult || FBB ==
nullptr;
696ARMConstantIslands::CPEntry *
697ARMConstantIslands::findConstPoolEntry(
unsigned CPI,
699 std::vector<CPEntry> &CPEs = CPEntries[CPI];
702 for (CPEntry &CPE : CPEs)
703 if (CPE.CPEMI == CPEMI)
712 case ARM::CONSTPOOL_ENTRY:
714 case ARM::JUMPTABLE_TBB:
716 case ARM::JUMPTABLE_TBH:
718 case ARM::JUMPTABLE_INSTS:
720 case ARM::JUMPTABLE_ADDRS:
726 unsigned CPI = getCombinedIndex(CPEMI);
727 assert(CPI < MCP->getConstants().
size() &&
"Invalid constant pool index.");
728 return MCP->getConstants()[CPI].getAlign();
742void ARMConstantIslands::scanFunctionJumpTables() {
746 (
I.getOpcode() == ARM::t2BR_JT ||
I.getOpcode() == ARM::tBR_JTr))
759 BlockJumpTableRefCount[
MBB] = std::numeric_limits<int>::max();
761 ++BlockJumpTableRefCount[
MBB];
768void ARMConstantIslands::
769initializeFunctionInfo(
const std::vector<MachineInstr*> &CPEMIs) {
771 BBUtils->computeAllBlockSizes();
775 BBInfo.
front().KnownBits =
Log2(MF->getAlignment());
778 BBUtils->adjustBBOffsetsAfter(&MF->front());
785 WaterList.push_back(&
MBB);
788 if (
I.isDebugInstr())
791 unsigned Opc =
I.getOpcode();
802 T2JumpTables.push_back(&
I);
835 unsigned MaxOffs = ((1 << (
Bits-1))-1) * Scale;
836 ImmBranches.push_back(ImmBranch(&
I, MaxOffs, isCond, UOpc));
839 if (Opc == ARM::tPUSH || Opc == ARM::tPOP_RET)
840 PushPopMIs.push_back(&
I);
842 if (Opc == ARM::CONSTPOOL_ENTRY || Opc == ARM::JUMPTABLE_ADDRS ||
843 Opc == ARM::JUMPTABLE_INSTS || Opc == ARM::JUMPTABLE_TBB ||
844 Opc == ARM::JUMPTABLE_TBH)
848 for (
unsigned op = 0, e =
I.getNumOperands();
op !=
e; ++
op)
849 if (
I.getOperand(
op).isCPI() ||
I.getOperand(
op).isJTI()) {
857 bool IsSoImm =
false;
865 case ARM::LEApcrelJT: {
873 unsigned CPI =
I.getOperand(
op).getIndex();
874 assert(CPI < CPEMIs.size());
876 const Align CPEAlign = getCPEAlign(CPEMI);
877 const unsigned LogCPEAlign =
Log2(CPEAlign);
878 if (LogCPEAlign >= 2)
886 case ARM::t2LEApcrel:
887 case ARM::t2LEApcrelJT:
892 case ARM::tLEApcrelJT:
902 case ARM::t2LDRSHpci:
904 case ARM::t2LDRSBpci:
928 unsigned CPI =
I.getOperand(
op).getIndex();
929 if (
I.getOperand(
op).isJTI()) {
930 JumpTableUserIndices.insert(std::make_pair(CPI, CPUsers.size()));
931 CPI = JumpTableEntryIndices[CPI];
935 unsigned MaxOffs = ((1 <<
Bits)-1) * Scale;
936 CPUsers.push_back(CPUser(&
I, CPEMI, MaxOffs, NegOk, IsSoImm));
939 CPEntry *CPE = findConstPoolEntry(CPI, CPEMI);
940 assert(CPE &&
"Cannot find a corresponding CPEntry!");
955 return LHS->getNumber() <
RHS->getNumber();
972 WaterList.insert(IP, NewBB);
982 LivePhysRegs LRs(*MF->getSubtarget().getRegisterInfo());
983 LRs.addLiveOuts(*OrigBB);
986 LRs.stepBackward(LiveMI);
992 MF->insert(
MBBI, NewBB);
1001 unsigned Opc =
isThumb ? (isThumb2 ? ARM::t2B : ARM::tB) : ARM::B;
1019 if (!
MRI.isReserved(L))
1025 MF->RenumberBlocks(NewBB);
1037 if (WaterBB == OrigBB)
1038 WaterList.
insert(std::next(IP), NewBB);
1040 WaterList.insert(IP, OrigBB);
1041 NewWaterList.
insert(OrigBB);
1048 BBUtils->computeBlockSize(OrigBB);
1052 BBUtils->computeBlockSize(NewBB);
1055 BBUtils->adjustBBOffsetsAfter(OrigBB);
1063unsigned ARMConstantIslands::getUserOffset(CPUser &U)
const {
1064 unsigned UserOffset = BBUtils->getOffsetOf(
U.MI);
1071 UserOffset += (
isThumb ? 4 : 8);
1092bool ARMConstantIslands::isOffsetInRange(
unsigned UserOffset,
1093 unsigned TrialOffset,
unsigned MaxDisp,
1094 bool NegativeOK,
bool IsSoImm) {
1095 if (UserOffset <= TrialOffset) {
1097 if (TrialOffset - UserOffset <= MaxDisp)
1100 }
else if (NegativeOK) {
1101 if (UserOffset - TrialOffset <= MaxDisp)
1112bool ARMConstantIslands::isWaterInRange(
unsigned UserOffset,
1116 const Align CPEAlign = getCPEAlign(
U.CPEMI);
1117 const unsigned CPEOffset = BBInfo[Water->
getNumber()].postOffset(CPEAlign);
1118 unsigned NextBlockOffset;
1119 Align NextBlockAlignment;
1121 if (++NextBlock == MF->end()) {
1122 NextBlockOffset = BBInfo[Water->
getNumber()].postOffset();
1124 NextBlockOffset = BBInfo[NextBlock->getNumber()].Offset;
1125 NextBlockAlignment = NextBlock->getAlignment();
1127 unsigned Size =
U.CPEMI->getOperand(2).getImm();
1128 unsigned CPEEnd = CPEOffset +
Size;
1133 if (CPEEnd > NextBlockOffset) {
1134 Growth = CPEEnd - NextBlockOffset;
1142 if (CPEOffset < UserOffset)
1148 return isOffsetInRange(UserOffset, CPEOffset, U);
1153bool ARMConstantIslands::isCPEntryInRange(
MachineInstr *
MI,
unsigned UserOffset,
1155 bool NegOk,
bool DoDump) {
1156 unsigned CPEOffset = BBUtils->getOffsetOf(CPEMI);
1161 unsigned Block =
MI->getParent()->getNumber();
1164 <<
" max delta=" << MaxDisp
1165 <<
format(
" insn address=%#x", UserOffset) <<
" in "
1168 <<
format(
"CPE address=%#x offset=%+d: ", CPEOffset,
1169 int(CPEOffset - UserOffset));
1173 return isOffsetInRange(UserOffset, CPEOffset, MaxDisp, NegOk);
1197bool ARMConstantIslands::decrementCPEReferenceCount(
unsigned CPI,
1200 CPEntry *CPE = findConstPoolEntry(CPI, CPEMI);
1201 assert(CPE &&
"Unexpected!");
1202 if (--CPE->RefCount == 0) {
1203 removeDeadCPEMI(CPEMI);
1204 CPE->CPEMI =
nullptr;
1211unsigned ARMConstantIslands::getCombinedIndex(
const MachineInstr *CPEMI) {
1224int ARMConstantIslands::findInRangeCPEntry(CPUser& U,
unsigned UserOffset) {
1229 if (isCPEntryInRange(UserMI, UserOffset, CPEMI,
U.getMaxDisp(),
U.NegOk,
1236 unsigned CPI = getCombinedIndex(CPEMI);
1237 std::vector<CPEntry> &CPEs = CPEntries[CPI];
1238 for (CPEntry &CPE : CPEs) {
1240 if (CPE.CPEMI == CPEMI)
1243 if (CPE.CPEMI ==
nullptr)
1245 if (isCPEntryInRange(UserMI, UserOffset, CPE.CPEMI,
U.getMaxDisp(),
1247 LLVM_DEBUG(
dbgs() <<
"Replacing CPE#" << CPI <<
" with CPE#" << CPE.CPI
1250 U.CPEMI = CPE.CPEMI;
1254 MO.setIndex(CPE.CPI);
1261 return decrementCPEReferenceCount(CPI, CPEMI) ? 2 : 1;
1272 return ((1<<10)-1)*2;
1274 return ((1<<23)-1)*2;
1279 return ((1<<23)-1)*4;
1290bool ARMConstantIslands::findAvailableWater(CPUser &U,
unsigned UserOffset,
1291 water_iterator &WaterIter,
1293 if (WaterList.empty())
1296 unsigned BestGrowth = ~0
u;
1309 const Align CPEAlign = getCPEAlign(
U.CPEMI);
1310 unsigned MinNoSplitDisp = BBInfo[UserBB->
getNumber()].postOffset(CPEAlign);
1311 if (CloserWater && MinNoSplitDisp >
U.getMaxDisp() / 2)
1313 for (water_iterator IP = std::prev(WaterList.end()),
B = WaterList.begin();;
1327 if (isWaterInRange(UserOffset, WaterBB, U, Growth) &&
1328 (WaterBB->
getNumber() <
U.HighWaterMark->getNumber() ||
1329 NewWaterList.
count(WaterBB) || WaterBB ==
U.MI->getParent()) &&
1330 Growth < BestGrowth) {
1332 BestGrowth = Growth;
1335 <<
" Growth=" << Growth <<
'\n');
1337 if (CloserWater && WaterBB ==
U.MI->getParent())
1341 if (!CloserWater && BestGrowth == 0)
1347 return BestGrowth != ~0
u;
1357void ARMConstantIslands::createNewWater(
unsigned CPUserIndex,
1358 unsigned UserOffset,
1360 CPUser &
U = CPUsers[CPUserIndex];
1363 const Align CPEAlign = getCPEAlign(CPEMI);
1374 unsigned Delta = isThumb1 ? 2 : 4;
1376 unsigned CPEOffset = UserBBI.
postOffset(CPEAlign) + Delta;
1378 if (isOffsetInRange(UserOffset, CPEOffset, U)) {
1380 <<
format(
", expected CPE offset %#x\n", CPEOffset));
1387 int UncondBr =
isThumb ? ((isThumb2) ? ARM::t2B : ARM::tB) :
ARM::
B;
1395 ImmBranches.push_back(ImmBranch(&UserMBB->
back(),
1396 MaxDisp,
false, UncondBr));
1397 BBUtils->computeBlockSize(UserMBB);
1398 BBUtils->adjustBBOffsetsAfter(UserMBB);
1419 assert(
Align >= CPEAlign &&
"Over-aligned constant pool entry");
1422 unsigned BaseInsertOffset = UserOffset +
U.getMaxDisp() - UPad;
1429 BaseInsertOffset -= 4;
1433 <<
" up=" << UPad <<
'\n');
1439 if (BaseInsertOffset + 8 >= UserBBI.
postOffset()) {
1445 UserOffset +
TII->getInstSizeInBytes(*UserMI) + 1);
1460 for (
unsigned Offset = UserOffset +
TII->getInstSizeInBytes(*UserMI);
1461 I->getOpcode() != ARM::t2IT &&
1463 Offset +=
TII->getInstSizeInBytes(*
I),
I = std::next(
I)) {
1465 std::max(BaseInsertOffset,
Offset +
TII->getInstSizeInBytes(*
I) + 1);
1466 assert(
I != UserMBB->
end() &&
"Fell off end of block");
1470 unsigned EndInsertOffset = BaseInsertOffset + 4 + UPad +
1474 unsigned CPUIndex = CPUserIndex+1;
1475 unsigned NumCPUsers = CPUsers.size();
1477 for (
unsigned Offset = UserOffset +
TII->getInstSizeInBytes(*UserMI);
1478 Offset < BaseInsertOffset;
1480 assert(
MI != UserMBB->
end() &&
"Fell off end of block");
1481 if (CPUIndex < NumCPUsers && CPUsers[CPUIndex].
MI == &*
MI) {
1482 CPUser &
U = CPUsers[CPUIndex];
1483 if (!isOffsetInRange(
Offset, EndInsertOffset, U)) {
1492 EndInsertOffset +=
U.CPEMI->getOperand(2).getImm();
1497 if (
MI->getOpcode() == ARM::t2IT)
1518 if (STI->isTargetWindows() &&
isThumb &&
MI->getOpcode() == ARM::t2MOVTi16 &&
1522 assert(
MI->getOpcode() == ARM::t2MOVi16 &&
1532 NewMBB = splitBlockBeforeInstr(&*
MI);
1539bool ARMConstantIslands::handleConstantPoolUser(
unsigned CPUserIndex,
1541 CPUser &
U = CPUsers[CPUserIndex];
1544 unsigned CPI = getCombinedIndex(CPEMI);
1547 unsigned UserOffset = getUserOffset(U);
1551 int result = findInRangeCPEntry(U, UserOffset);
1552 if (result==1)
return false;
1553 else if (result==2)
return true;
1557 unsigned ID = AFI->createPICLabelUId();
1563 if (findAvailableWater(U, UserOffset, IP, CloserWater)) {
1570 if (NewWaterList.
erase(WaterBB))
1571 NewWaterList.
insert(NewIsland);
1578 createNewWater(CPUserIndex, UserOffset, NewMBB);
1586 IP =
find(WaterList, WaterBB);
1587 if (IP != WaterList.end())
1588 NewWaterList.
erase(WaterBB);
1591 NewWaterList.
insert(NewIsland);
1604 if (IP != WaterList.end())
1605 WaterList.erase(IP);
1611 updateForInsertedWaterBlock(NewIsland);
1615 U.HighWaterMark = NewIsland;
1620 CPEntries[CPI].push_back(CPEntry(
U.CPEMI,
ID, 1));
1624 decrementCPEReferenceCount(CPI, CPEMI);
1630 BBUtils->adjustBBSize(NewIsland,
Size);
1631 BBUtils->adjustBBOffsetsAfter(&*--NewIsland->
getIterator());
1641 dbgs() <<
" Moved CPE to #" <<
ID <<
" CPI=" << CPI
1642 <<
format(
" offset=%#x\n",
1643 BBUtils->getBBInfo()[NewIsland->
getNumber()].Offset));
1650void ARMConstantIslands::removeDeadCPEMI(
MachineInstr *CPEMI) {
1655 BBUtils->adjustBBSize(CPEBB, -
Size);
1657 if (CPEBB->
empty()) {
1667 BBUtils->adjustBBOffsetsAfter(CPEBB);
1677bool ARMConstantIslands::removeUnusedCPEntries() {
1678 unsigned MadeChange =
false;
1679 for (std::vector<CPEntry> &CPEs : CPEntries) {
1680 for (CPEntry &CPE : CPEs) {
1681 if (CPE.RefCount == 0 && CPE.CPEMI) {
1682 removeDeadCPEMI(CPE.CPEMI);
1683 CPE.CPEMI =
nullptr;
1694bool ARMConstantIslands::fixupImmediateBr(ImmBranch &Br) {
1699 if (BBUtils->isBBInRange(
MI, DestBB, Br.MaxDisp))
1703 return fixupUnconditionalBr(Br);
1704 return fixupConditionalBr(Br);
1712ARMConstantIslands::fixupUnconditionalBr(ImmBranch &Br) {
1718 if (!AFI->isLRSpilled())
1722 Br.MaxDisp = (1 << 21) * 2;
1723 MI->setDesc(
TII->get(ARM::tBfar));
1726 BBUtils->adjustBBOffsetsAfter(
MBB);
1738ARMConstantIslands::fixupConditionalBr(ImmBranch &Br) {
1772 if (BBUtils->isBBInRange(
MI, NewDest, Br.MaxDisp)) {
1774 dbgs() <<
" Invert Bcc condition and swap its destination with "
1777 MI->getOperand(0).setMBB(NewDest);
1778 MI->getOperand(1).setImm(
CC);
1785 splitBlockBeforeInstr(
MI);
1788 int delta =
TII->getInstSizeInBytes(
MBB->
back());
1789 BBUtils->adjustBBSize(
MBB, -delta);
1802 <<
" also invert condition and change dest. to "
1810 BBUtils->adjustBBSize(
MBB,
TII->getInstSizeInBytes(
MBB->
back()));
1817 BBUtils->adjustBBSize(
MBB,
TII->getInstSizeInBytes(
MBB->
back()));
1819 ImmBranches.push_back(ImmBranch(&
MBB->
back(), MaxDisp,
false, Br.UncondBr));
1822 BBUtils->adjustBBSize(
MI->getParent(), -
TII->getInstSizeInBytes(*
MI));
1823 MI->eraseFromParent();
1824 BBUtils->adjustBBOffsetsAfter(
MBB);
1828bool ARMConstantIslands::optimizeThumb2Instructions() {
1829 bool MadeChange =
false;
1832 for (CPUser &U : CPUsers) {
1833 unsigned Opcode =
U.MI->getOpcode();
1834 unsigned NewOpc = 0;
1839 case ARM::t2LEApcrel:
1841 NewOpc = ARM::tLEApcrel;
1848 NewOpc = ARM::tLDRpci;
1858 unsigned UserOffset = getUserOffset(U);
1859 unsigned MaxOffs = ((1 <<
Bits) - 1) * Scale;
1862 if (!
U.KnownAlignment)
1866 if (isCPEntryInRange(
U.MI, UserOffset,
U.CPEMI, MaxOffs,
false,
true)) {
1868 U.MI->setDesc(
TII->get(NewOpc));
1870 BBUtils->adjustBBSize(
MBB, -2);
1871 BBUtils->adjustBBOffsetsAfter(
MBB);
1881bool ARMConstantIslands::optimizeThumb2Branches() {
1883 auto TryShrinkBranch = [
this](ImmBranch &Br) {
1884 unsigned Opcode = Br.MI->getOpcode();
1885 unsigned NewOpc = 0;
1902 unsigned MaxOffs = ((1 << (
Bits-1))-1) * Scale;
1904 if (BBUtils->isBBInRange(Br.MI, DestBB, MaxOffs)) {
1906 Br.MI->setDesc(
TII->get(NewOpc));
1908 BBUtils->adjustBBSize(
MBB, -2);
1909 BBUtils->adjustBBOffsetsAfter(
MBB);
1919 unsigned NewOpc = 0;
1922 auto FindCmpForCBZ = [
this](ImmBranch &Br, ImmCompare &ImmCmp,
1924 ImmCmp.MI =
nullptr;
1929 if (!Br.MI->killsRegister(ARM::CPSR))
1933 unsigned NewOpc = 0;
1938 NewOpc = ARM::tCBNZ;
1944 unsigned BrOffset = BBUtils->getOffsetOf(Br.MI) + 4 - 2;
1946 unsigned DestOffset = BBInfo[DestBB->
getNumber()].Offset;
1947 if (BrOffset >= DestOffset || (DestOffset - BrOffset) > 126)
1951 auto *
TRI = STI->getRegisterInfo();
1953 if (!CmpMI || CmpMI->
getOpcode() != ARM::tCMPi8)
1957 ImmCmp.NewOpc = NewOpc;
1961 auto TryConvertToLE = [
this](ImmBranch &Br, ImmCompare &
Cmp) {
1962 if (Br.MI->getOpcode() != ARM::t2Bcc || !STI->hasLOB() ||
1968 if (BBUtils->getOffsetOf(
MBB) < BBUtils->getOffsetOf(DestBB) ||
1969 !BBUtils->isBBInRange(Br.MI, DestBB, 4094))
1972 if (!DT->dominates(DestBB,
MBB))
1977 Cmp.NewOpc =
Cmp.NewOpc == ARM::tCBZ ? ARM::tCBNZ : ARM::tCBZ;
1980 TII->get(ARM::t2LE));
1982 MIB.
add(Br.MI->getOperand(0));
1989 bool MadeChange =
false;
1996 for (ImmBranch &Br :
reverse(ImmBranches)) {
2004 if (FindCmpForCBZ(Br, Cmp, ExitBB) && TryConvertToLE(Br, Cmp)) {
2008 FindCmpForCBZ(Br, Cmp, DestBB);
2009 MadeChange |= TryShrinkBranch(Br);
2012 unsigned Opcode = Br.MI->getOpcode();
2013 if ((Opcode != ARM::tBcc && Opcode != ARM::t2LE) || !
Cmp.NewOpc)
2020 auto *
TRI = STI->getRegisterInfo();
2022 bool RegKilled =
false;
2025 if (KillMI->killsRegister(Reg,
TRI)) {
2026 KillMI->clearRegisterKills(Reg,
TRI);
2030 }
while (KillMI !=
Cmp.MI);
2038 .
addMBB(DestBB, Br.MI->getOperand(0).getTargetFlags());
2040 Cmp.MI->eraseFromParent();
2042 if (Br.MI->getOpcode() == ARM::tBcc) {
2043 Br.MI->eraseFromParent();
2045 BBUtils->adjustBBSize(
MBB, -2);
2056 BBUtils->adjustBBOffsetsAfter(
MBB);
2066 if (
I.getOpcode() != ARM::t2ADDrs)
2069 if (
I.getOperand(0).getReg() != EntryReg)
2072 if (
I.getOperand(1).getReg() != BaseReg)
2085bool ARMConstantIslands::preserveBaseRegister(
MachineInstr *JumpMI,
2089 bool &BaseRegKill) {
2112 CanDeleteLEA =
true;
2113 BaseRegKill =
false;
2116 for (++
I; &*
I != JumpMI; ++
I) {
2123 if (!MO.isReg() || !MO.getReg())
2125 if (MO.isDef() && MO.getReg() == BaseReg)
2127 if (MO.isUse() && MO.getReg() == BaseReg) {
2128 BaseRegKill = BaseRegKill || MO.isKill();
2129 CanDeleteLEA =
false;
2139 for (++
I; &*
I != JumpMI; ++
I) {
2141 if (!MO.isReg() || !MO.getReg())
2143 if (MO.isDef() && MO.getReg() == BaseReg)
2145 if (MO.isUse() && MO.getReg() == EntryReg)
2146 RemovableAdd =
nullptr;
2152 DeadSize += isThumb2 ? 4 : 2;
2153 }
else if (BaseReg == EntryReg) {
2179 unsigned &DeadSize) {
2188 for (++
I; &*
I != JumpMI; ++
I) {
2189 if (
I->getOpcode() == ARM::t2ADDrs &&
I->getOperand(0).getReg() == EntryReg)
2198 for (++J; &*J != JumpMI; ++J) {
2200 if (!MO.isReg() || !MO.getReg())
2202 if (MO.isDef() && MO.getReg() == EntryReg)
2204 if (MO.isUse() && MO.getReg() == EntryReg)
2216bool ARMConstantIslands::optimizeThumb2JumpTables() {
2217 bool MadeChange =
false;
2222 if (!MJTI)
return false;
2225 for (
unsigned i = 0, e = T2JumpTables.size(); i != e; ++i) {
2229 unsigned JTOpIdx = NumOps - (
MI->isPredicable() ? 2 : 1);
2231 unsigned JTI = JTOP.getIndex();
2235 bool HalfWordOk =
true;
2236 unsigned JTOffset = BBUtils->getOffsetOf(
MI) + 4;
2237 const std::vector<MachineBasicBlock*> &JTBBs =
JT[JTI].MBBs;
2243 if (ByteOk && (DstOffset - JTOffset) > ((1<<8)-1)*2)
2245 unsigned TBHLimit = ((1<<16)-1)*2;
2246 if (HalfWordOk && (DstOffset - JTOffset) > TBHLimit)
2248 if (!ByteOk && !HalfWordOk)
2252 if (!ByteOk && !HalfWordOk)
2255 CPUser &
User = CPUsers[JumpTableUserIndices[JTI]];
2257 if (!
MI->getOperand(0).isKill())
2260 unsigned DeadSize = 0;
2261 bool CanDeleteLEA =
false;
2262 bool BaseRegKill =
false;
2264 unsigned IdxReg = ~0
U;
2265 bool IdxRegKill =
true;
2267 IdxReg =
MI->getOperand(1).getReg();
2268 IdxRegKill =
MI->getOperand(1).isKill();
2270 bool PreservedBaseReg =
2271 preserveBaseRegister(
MI,
User.MI, DeadSize, CanDeleteLEA, BaseRegKill);
2283 if (Shift == UserMBB->
begin())
2287 if (Shift->getOpcode() != ARM::tLSLri ||
2288 Shift->getOperand(3).getImm() != 2 ||
2289 !Shift->getOperand(2).isKill())
2291 IdxReg = Shift->getOperand(2).getReg();
2292 Register ShiftedIdxReg = Shift->getOperand(0).getReg();
2301 if (
Load->getOpcode() != ARM::tLDRr)
2303 if (
Load->getOperand(1).getReg() != BaseReg ||
2304 Load->getOperand(2).getReg() != ShiftedIdxReg ||
2305 !
Load->getOperand(2).isKill())
2309 auto *
TRI = STI->getRegisterInfo();
2319 if (isPositionIndependentOrROPI) {
2321 if (
Add->getOpcode() != ARM::tADDrr ||
2322 Add->getOperand(2).getReg() != BaseReg ||
2323 Add->getOperand(3).getReg() !=
Load->getOperand(0).getReg() ||
2324 !
Add->getOperand(3).isKill())
2326 if (
Add->getOperand(0).getReg() !=
MI->getOperand(0).getReg())
2331 Add->eraseFromParent();
2334 if (
Load->getOperand(0).getReg() !=
MI->getOperand(0).getReg())
2342 CanDeleteLEA =
true;
2343 Shift->eraseFromParent();
2344 Load->eraseFromParent();
2350 unsigned Opc = ByteOk ? ARM::t2TBB_JT : ARM::t2TBH_JT;
2352 Opc = ByteOk ? ARM::tTBB_JT : ARM::tTBH_JT;
2364 unsigned JTOpc = ByteOk ? ARM::JUMPTABLE_TBB : ARM::JUMPTABLE_TBH;
2375 User.MI->eraseFromParent();
2376 DeadSize += isThumb2 ? 4 : 2;
2383 User.IsSoImm =
false;
2384 User.KnownAlignment =
false;
2388 int CPEntryIdx = JumpTableEntryIndices[JTI];
2389 auto &CPEs = CPEntries[CPEntryIdx];
2391 find_if(CPEs, [&](CPEntry &
E) {
return E.CPEMI ==
User.CPEMI; });
2393 CPUsers.emplace_back(CPUser(NewJTMI,
User.CPEMI, 4,
false,
false));
2397 unsigned NewSize =
TII->getInstSizeInBytes(*NewJTMI);
2398 unsigned OrigSize =
TII->getInstSizeInBytes(*
MI);
2399 MI->eraseFromParent();
2401 int Delta = OrigSize - NewSize + DeadSize;
2403 BBUtils->adjustBBOffsetsAfter(
MBB);
2414bool ARMConstantIslands::reorderThumb2JumpTables() {
2415 bool MadeChange =
false;
2418 if (!MJTI)
return false;
2421 for (
unsigned i = 0, e = T2JumpTables.size(); i != e; ++i) {
2425 unsigned JTOpIdx = NumOps - (
MI->isPredicable() ? 2 : 1);
2427 unsigned JTI = JTOP.getIndex();
2433 int JTNumber =
MI->getParent()->getNumber();
2434 const std::vector<MachineBasicBlock*> &JTBBs =
JT[JTI].MBBs;
2438 if (DTNumber < JTNumber) {
2442 adjustJTTargetBlockForward(JTI,
MBB,
MI->getParent());
2472 if (!
B &&
Cond.empty() && BB != &MF->front() &&
2475 OldPrior->updateTerminator(BB);
2478 MF->RenumberBlocks();
2487 MF->insert(
MBBI, NewBB);
2506 MF->RenumberBlocks(NewBB);
2519 return new ARMConstantIslands();
unsigned const MachineRegisterInfo * MRI
MachineBasicBlock MachineBasicBlock::iterator MBBI
static bool isThumb(const MCSubtargetInfo &STI)
static cl::opt< unsigned > CPMaxIteration("arm-constant-island-max-iteration", cl::Hidden, cl::init(30), cl::desc("The max number of iteration for converge"))
static bool isSimpleIndexCalc(MachineInstr &I, unsigned EntryReg, unsigned BaseReg)
static bool jumpTableFollowsTB(MachineInstr *JTMI, MachineInstr *CPEMI)
Returns whether CPEMI is the first instruction in the block immediately following JTMI (assumed to be...
static bool CompareMBBNumbers(const MachineBasicBlock *LHS, const MachineBasicBlock *RHS)
CompareMBBNumbers - Little predicate function to sort the WaterList by MBB ID.
static unsigned getUnconditionalBrDisp(int Opc)
getUnconditionalBrDisp - Returns the maximum displacement that can fit in the specific unconditional ...
static void RemoveDeadAddBetweenLEAAndJT(MachineInstr *LEAMI, MachineInstr *JumpMI, unsigned &DeadSize)
static bool isAlwaysIndirectTarget(const MachineBasicBlock &MBB)
static bool AlignBlocks(MachineFunction *MF, const ARMSubtarget *STI)
static cl::opt< bool > SynthesizeThumb1TBB("arm-synthesize-thumb-1-tbb", cl::Hidden, cl::init(true), cl::desc("Use compressed jump tables in Thumb-1 by synthesizing an " "equivalent to the TBB/TBH instructions"))
static cl::opt< bool > AdjustJumpTableBlocks("arm-adjust-jump-tables", cl::Hidden, cl::init(true), cl::desc("Adjust basic block layout to better use TB[BH]"))
#define ARM_CP_ISLANDS_OPT_NAME
static bool BBIsJumpedOver(MachineBasicBlock *MBB)
BBIsJumpedOver - Return true of the specified basic block's only predecessor unconditionally branches...
SmallVector< MachineOperand, 4 > Cond
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
#define LLVM_DUMP_METHOD
Mark debug helper function definitions like dump() that should not be stripped from debug builds.
This file defines the DenseMap class.
const HexagonInstrInfo * TII
This file implements the LivePhysRegs utility for tracking liveness of physical registers.
This file declares the MachineConstantPool class which is an abstract constant pool to keep track of ...
unsigned const TargetRegisterInfo * TRI
static bool BBHasFallthrough(MachineBasicBlock *MBB)
BBHasFallthrough - Return true if the specified basic block can fallthrough into the block immediatel...
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
const SmallVectorImpl< MachineOperand > MachineBasicBlock * TBB
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file defines the SmallSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
ARMFunctionInfo - This class is derived from MachineFunctionInfo and contains private ARM-specific in...
bool branchTargetEnforcement() const
const ARMTargetLowering * getTargetLowering() const override
Represent the analysis usage information of a pass.
AnalysisUsage & addRequired()
A parsed version of the target data layout string in and methods for querying it.
FunctionPass class - This class is used to implement most global optimizations.
bool hasOptSize() const
Optimize this function for size (-Os) or minimum size (-Oz).
bool analyzeBranch(MachineBasicBlock &MBB, MachineBasicBlock *&TBB, MachineBasicBlock *&FBB, SmallVectorImpl< MachineOperand > &Cond, bool AllowModify) const override
Analyze the branching code at the end of MBB, returning true if it cannot be understood (e....
A set of physical registers with utility functions to track liveness when walking backward/forward th...
Describe properties that are true of each instruction in the target description file.
unsigned getNumOperands() const
Return the number of declared MachineOperands for this MachineInstruction.
unsigned getSize() const
Return the number of bytes in the encoding of this instruction, or zero if the encoding size cannot b...
unsigned pred_size() const
bool isEHPad() const
Returns true if the block is a landing pad.
void replaceSuccessor(MachineBasicBlock *Old, MachineBasicBlock *New)
Replace successor OLD with NEW and update probability info.
MachineBasicBlock * getFallThrough(bool JumpToFallThrough=true)
Return the fallthrough block if the block can implicitly transfer control to the block after it by fa...
void transferSuccessors(MachineBasicBlock *FromMBB)
Transfers all the successors from MBB to this machine basic block (i.e., copies all the successors Fr...
instr_iterator insert(instr_iterator I, MachineInstr *M)
Insert MI into the instruction list before I, possibly inside a bundle.
iterator_range< livein_iterator > liveins() const
int getNumber() const
MachineBasicBlocks are uniquely numbered at the function level, unless they're not in a MachineFuncti...
void push_back(MachineInstr *MI)
const BasicBlock * getBasicBlock() const
Return the LLVM basic block that this instance corresponded to originally.
void updateTerminator(MachineBasicBlock *PreviousLayoutSuccessor)
Update the terminator instructions in block to account for changes to block layout which may have bee...
bool canFallThrough()
Return true if the block can implicitly transfer control to the block after it by falling off the end...
succ_iterator succ_begin()
unsigned succ_size() const
bool hasAddressTaken() const
Test whether this block is used as as something other than the target of a terminator,...
void setAlignment(Align A)
Set alignment of the basic block.
void addSuccessor(MachineBasicBlock *Succ, BranchProbability Prob=BranchProbability::getUnknown())
Add Succ as a successor of this MachineBasicBlock.
pred_iterator pred_begin()
iterator getLastNonDebugInstr(bool SkipPseudoOp=true)
Returns an iterator to the last non-debug instruction in the basic block, or end().
void addLiveIn(MCRegister PhysReg, LaneBitmask LaneMask=LaneBitmask::getAll())
Adds the specified register as a live in.
const MachineFunction * getParent() const
Return the MachineFunction containing this basic block.
iterator_range< iterator > terminators()
reverse_iterator rbegin()
bool isSuccessor(const MachineBasicBlock *MBB) const
Return true if the specified MBB is a successor of this block.
void splice(iterator Where, MachineBasicBlock *Other, iterator From)
Take an instruction from MBB 'Other' at the position From, and insert it into this MBB right before '...
Align getAlignment() const
Return alignment of the basic block.
MachineInstrBundleIterator< MachineInstr > iterator
void moveAfter(MachineBasicBlock *NewBefore)
The MachineConstantPool class keeps track of constants referenced by a function which must be spilled...
DominatorTree Class - Concrete subclass of DominatorTreeBase that is used to compute a normal dominat...
MachineFunctionPass - This class adapts the FunctionPass interface to allow convenient creation of pa...
void getAnalysisUsage(AnalysisUsage &AU) const override
getAnalysisUsage - Subclasses that override getAnalysisUsage must call this.
virtual bool runOnMachineFunction(MachineFunction &MF)=0
runOnMachineFunction - This method must be overloaded to perform the desired machine code transformat...
virtual MachineFunctionProperties getRequiredProperties() const
Properties which a MachineFunction may have at a given point in time.
MachineFunctionProperties & set(Property P)
Function & getFunction()
Return the LLVM function that this machine code represents.
const LLVMTargetMachine & getTarget() const
getTarget - Return the target machine this machine code is compiled with
MachineConstantPool * getConstantPool()
getConstantPool - Return the constant pool object for the current function.
void RenumberBlocks(MachineBasicBlock *MBBFrom=nullptr)
RenumberBlocks - This discards all of the MachineBasicBlock numbers and recomputes them.
const MachineBasicBlock & front() const
const MachineInstrBuilder & addImm(int64_t Val) const
Add a new immediate operand.
const MachineInstrBuilder & add(const MachineOperand &MO) const
const MachineInstrBuilder & addConstantPoolIndex(unsigned Idx, int Offset=0, unsigned TargetFlags=0) const
const MachineInstrBuilder & addReg(Register RegNo, unsigned flags=0, unsigned SubReg=0) const
Add a new virtual register operand.
const MachineInstrBuilder & addJumpTableIndex(unsigned Idx, unsigned TargetFlags=0) const
const MachineInstrBuilder & addMBB(MachineBasicBlock *MBB, unsigned TargetFlags=0) const
reverse_iterator getReverse() const
Get a reverse iterator to the same node.
Representation of each machine instruction.
unsigned getOpcode() const
Returns the opcode of this MachineInstr.
const MachineBasicBlock * getParent() const
const MCInstrDesc & getDesc() const
Returns the target instruction descriptor of this MachineInstr.
iterator_range< mop_iterator > operands()
void eraseFromParent()
Unlink 'this' from the containing basic block and delete it.
const MachineOperand & getOperand(unsigned i) const
void setDesc(const MCInstrDesc &TID)
Replace the instruction descriptor (thus opcode) of the current instruction with a new one.
bool ReplaceMBBInJumpTable(unsigned Idx, MachineBasicBlock *Old, MachineBasicBlock *New)
ReplaceMBBInJumpTable - If Old is a target of the jump tables, update the jump table to branch to New...
const std::vector< MachineJumpTableEntry > & getJumpTables() const
MachineOperand class - Representation of each machine instruction operand.
MachineBasicBlock * getMBB() const
bool isCPI() const
isCPI - Tests if this is a MO_ConstantPoolIndex operand.
void setReg(Register Reg)
Change the register this operand corresponds to.
void setIsKill(bool Val=true)
void setMBB(MachineBasicBlock *MBB)
Register getReg() const
getReg - Returns the register number.
MachineRegisterInfo - Keep track of information for virtual and physical registers,...
virtual StringRef getPassName() const
getPassName - Return a nice clean name for a pass.
Wrapper class representing virtual and physical registers.
SmallSet - This maintains a set of unique values, optimizing for the case when the set is small (less...
size_type count(const T &V) const
count - Return 1 if the element is in the set, 0 otherwise.
std::pair< const_iterator, bool > insert(const T &V)
insert - Insert an element into the set if it isn't already there.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
StringRef - Represent a constant reference to a string, i.e.
CodeGenOpt::Level getOptLevel() const
Returns the optimization level: None, Less, Default, or Aggressive.
Value * getOperand(unsigned i) const
Iterator for intrusive lists based on ilist_node.
self_iterator getIterator()
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
static CondCodes getOppositeCondition(CondCodes CC)
@ MO_OPTION_MASK
MO_OPTION_MASK - Most flags are mutually exclusive; this mask selects just that part of the flag set.
@ MO_LO16
MO_LO16 - On a symbol operand, this represents a relocation containing lower 16 bit of the address.
@ MO_HI16
MO_HI16 - On a symbol operand, this represents a relocation containing higher 16 bit of the address.
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
Reg
All possible values of the reg field in the ModR/M byte.
initializer< Ty > init(const Ty &Val)
This is an optimization pass for GlobalISel generic memory operations.
MachineInstr * findCMPToFoldIntoCBZ(MachineInstr *Br, const TargetRegisterInfo *TRI)
Search backwards from a tBcc to find a tCMPi8 against 0, meaning we can convert them to a tCBZ or tCB...
auto find(R &&Range, const T &Val)
Provide wrappers to std::find which take ranges instead of having to pass begin/end explicitly.
auto size(R &&Range, std::enable_if_t< std::is_base_of< std::random_access_iterator_tag, typename std::iterator_traits< decltype(Range.begin())>::iterator_category >::value, void > *=nullptr)
Get the size of a range.
MachineInstrBuilder BuildMI(MachineFunction &MF, const MIMetadata &MIMD, const MCInstrDesc &MCID)
Builder interface. Specify how to create the initial instruction itself.
bool isAligned(Align Lhs, uint64_t SizeInBytes)
Checks that SizeInBytes is a multiple of the alignment.
iterator_range< T > make_range(T x, T y)
Convenience function for iterating over sub-ranges.
bool registerDefinedBetween(unsigned Reg, MachineBasicBlock::iterator From, MachineBasicBlock::iterator To, const TargetRegisterInfo *TRI)
Return true if Reg is defd between From and To.
static std::array< MachineOperand, 2 > predOps(ARMCC::CondCodes Pred, unsigned PredReg=0)
Get the operands corresponding to the given Pred value.
ARMCC::CondCodes getITInstrPredicate(const MachineInstr &MI, Register &PredReg)
getITInstrPredicate - Valid only in Thumb2 mode.
static bool isARMLowRegister(unsigned Reg)
isARMLowRegister - Returns true if the register is a low register (r0-r7).
auto reverse(ContainerTy &&C)
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
void report_fatal_error(Error Err, bool gen_crash_diag=true)
Report a serious error, calling any installed error handler.
static bool isLoopStart(const MachineInstr &MI)
bool is_sorted(R &&Range, Compare C)
Wrapper function around std::is_sorted to check if elements in a range R are sorted with respect to a...
format_object< Ts... > format(const char *Fmt, const Ts &... Vals)
These are helper functions used to produce formatted output.
uint64_t offsetToAlignment(uint64_t Value, Align Alignment)
Returns the offset to the next integer (mod 2**64) that is greater than or equal to Value and is a mu...
unsigned getRegState(const MachineOperand &RegOp)
Get all register state flags from machine operand RegOp.
auto lower_bound(R &&Range, T &&Value)
Provide wrappers to std::lower_bound which take ranges instead of having to pass begin/end explicitly...
unsigned getKillRegState(bool B)
ARMCC::CondCodes getInstrPredicate(const MachineInstr &MI, Register &PredReg)
getInstrPredicate - If instruction is predicated, returns its predicate condition,...
FunctionPass * createARMConstantIslandPass()
createARMConstantIslandPass - returns an instance of the constpool island pass.
auto find_if(R &&Range, UnaryPredicate P)
Provide wrappers to std::find_if which take ranges instead of having to pass begin/end explicitly.
unsigned UnknownPadding(Align Alignment, unsigned KnownBits)
UnknownPadding - Return the worst case padding that could result from unknown offset bits.
APFloat neg(APFloat X)
Returns the negated value of the argument.
unsigned Log2(Align A)
Returns the log2 of the alignment.
static bool isSpeculationBarrierEndBBOpcode(int Opc)
IterT prev_nodbg(IterT It, IterT Begin, bool SkipPseudoOp=true)
Decrement It, then continue decrementing it while it points to a debug instruction.
Printable printMBBReference(const MachineBasicBlock &MBB)
Prints a machine basic block reference.
This struct is a compact representation of a valid (non-zero power of two) alignment.
uint64_t value() const
This is a hole in the type system and should not be abused.
BasicBlockInfo - Information about the offset and size of a single basic block.
unsigned internalKnownBits() const
Compute the number of known offset bits internally to this block.
unsigned postOffset(Align Alignment=Align(1)) const
Compute the offset immediately following this block.
unsigned Offset
Offset - Distance from the beginning of the function to the beginning of this basic block.
Pair of physical register and lane mask.
MachineJumpTableEntry - One jump table in the jump table info.