40#include "llvm/Config/llvm-config.h"
60#define DEBUG_TYPE "arm-cp-islands"
62#define ARM_CP_ISLANDS_OPT_NAME \
63 "ARM constant island placement and branch shortening pass"
65STATISTIC(NumSplit,
"Number of uncond branches inserted");
66STATISTIC(NumCBrFixed,
"Number of cond branches fixed");
67STATISTIC(NumUBrFixed,
"Number of uncond branches fixed");
68STATISTIC(NumTBs,
"Number of table branches generated");
69STATISTIC(NumT2CPShrunk,
"Number of Thumb2 constantpool instructions shrunk");
70STATISTIC(NumT2BrShrunk,
"Number of Thumb2 immediate branches shrunk");
72STATISTIC(NumJTMoved,
"Number of jump table destination blocks moved");
73STATISTIC(NumJTInserted,
"Number of jump table intermediate blocks inserted");
74STATISTIC(NumLEInserted,
"Number of LE backwards branches inserted");
78 cl::desc(
"Adjust basic block layout to better use TB[BH]"));
82 cl::desc(
"The max number of iteration for converge"));
86 cl::desc(
"Use compressed jump tables in Thumb-1 by synthesizing an "
87 "equivalent to the TBB/TBH instructions"));
103 std::unique_ptr<ARMBasicBlockUtils> BBUtils =
nullptr;
108 std::vector<MachineBasicBlock*> WaterList;
114 using water_iterator = std::vector<MachineBasicBlock *>::iterator;
135 bool KnownAlignment =
false;
138 bool neg,
bool soimm)
139 :
MI(mi), CPEMI(cpemi), MaxDisp(maxdisp), NegOk(
neg), IsSoImm(soimm) {
146 unsigned getMaxDisp()
const {
147 return (KnownAlignment ? MaxDisp : MaxDisp - 2) - 2;
153 std::vector<CPUser> CPUsers;
164 : CPEMI(cpemi), CPI(cpi), RefCount(
rc) {}
176 std::vector<std::vector<CPEntry>> CPEntries;
195 unsigned MaxDisp : 31;
199 ImmBranch(
MachineInstr *mi,
unsigned maxdisp,
bool cond,
unsigned ubr)
200 :
MI(mi), MaxDisp(maxdisp), isCond(cond), UncondBr(ubr) {}
204 std::vector<ImmBranch> ImmBranches;
221 bool isPositionIndependentOrROPI;
237 MachineFunctionProperties::Property::NoVRegs);
245 void doInitialConstPlacement(std::vector<MachineInstr *> &CPEMIs);
246 void doInitialJumpTablePlacement(std::vector<MachineInstr *> &CPEMIs);
248 CPEntry *findConstPoolEntry(
unsigned CPI,
const MachineInstr *CPEMI);
250 void scanFunctionJumpTables();
251 void initializeFunctionInfo(
const std::vector<MachineInstr*> &CPEMIs);
254 bool decrementCPEReferenceCount(
unsigned CPI,
MachineInstr* CPEMI);
256 int findInRangeCPEntry(CPUser& U,
unsigned UserOffset);
257 bool findAvailableWater(CPUser&U,
unsigned UserOffset,
258 water_iterator &WaterIter,
bool CloserWater);
259 void createNewWater(
unsigned CPUserIndex,
unsigned UserOffset,
261 bool handleConstantPoolUser(
unsigned CPUserIndex,
bool CloserWater);
263 bool removeUnusedCPEntries();
266 bool DoDump =
false);
268 CPUser &U,
unsigned &Growth);
269 bool fixupImmediateBr(ImmBranch &Br);
270 bool fixupConditionalBr(ImmBranch &Br);
271 bool fixupUnconditionalBr(ImmBranch &Br);
272 bool optimizeThumb2Instructions();
273 bool optimizeThumb2Branches();
274 bool reorderThumb2JumpTables();
276 unsigned &DeadSize,
bool &CanDeleteLEA,
278 bool optimizeThumb2JumpTables();
283 unsigned getUserOffset(CPUser&)
const;
287 bool isOffsetInRange(
unsigned UserOffset,
unsigned TrialOffset,
288 unsigned Disp,
bool NegativeOK,
bool IsSoImm =
false);
289 bool isOffsetInRange(
unsigned UserOffset,
unsigned TrialOffset,
291 return isOffsetInRange(UserOffset, TrialOffset,
292 U.getMaxDisp(),
U.NegOk,
U.IsSoImm);
298char ARMConstantIslands::ID = 0;
301void ARMConstantIslands::verify() {
306 return BBInfo[
LHS.getNumber()].postOffset() <
307 BBInfo[
RHS.getNumber()].postOffset();
309 LLVM_DEBUG(
dbgs() <<
"Verifying " << CPUsers.size() <<
" CP users.\n");
310 for (CPUser &U : CPUsers) {
311 unsigned UserOffset = getUserOffset(U);
314 if (isCPEntryInRange(
U.MI, UserOffset,
U.CPEMI,
U.getMaxDisp()+2,
U.NegOk,
327#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
332 for (
unsigned J = 0, E = BBInfo.
size(); J !=E; ++J) {
333 const BasicBlockInfo &BBI = BBInfo[J];
334 dbgs() << format(
"%08x %bb.%u\t", BBI.Offset, J)
335 <<
" kb=" << unsigned(BBI.KnownBits)
336 <<
" ua=" << unsigned(BBI.Unalign) <<
" pa=" << Log2(BBI.PostAlign)
337 << format(
" size=%#x\n", BBInfo[J].Size);
352 const Align Alignment = TLI->getPrefLoopAlignment();
356 bool Changed =
false;
357 bool PrevCanFallthough =
true;
358 for (
auto &
MBB : *MF) {
359 if (!PrevCanFallthough) {
370 if (
MI.getOpcode() == ARM::t2B &&
374 MI.getOpcode() == ARM::t2LoopEndDec) {
375 PrevCanFallthough =
true;
390 BBUtils = std::make_unique<ARMBasicBlockUtils>(mf);
393 << MCP->getConstants().size() <<
" CP entries, aligned to "
394 << MCP->getConstantPoolAlign().value() <<
" bytes *****\n");
397 TII = STI->getInstrInfo();
398 isPositionIndependentOrROPI =
399 STI->getTargetLowering()->isPositionIndependent() || STI->isROPI();
401 DT = &getAnalysis<MachineDominatorTreeWrapperPass>().getDomTree();
403 isThumb = AFI->isThumbFunction();
404 isThumb1 = AFI->isThumb1OnlyFunction();
405 isThumb2 = AFI->isThumb2Function();
410 if (STI->hardenSlsRetBr())
415 MF->RenumberBlocks();
420 bool MadeChange =
false;
422 scanFunctionJumpTables();
423 MadeChange |= reorderThumb2JumpTables();
425 T2JumpTables.clear();
427 MF->RenumberBlocks();
436 std::vector<MachineInstr*> CPEMIs;
438 doInitialConstPlacement(CPEMIs);
440 if (MF->getJumpTableInfo())
441 doInitialJumpTablePlacement(CPEMIs);
444 AFI->initPICLabelUId(CPEMIs.size());
449 initializeFunctionInfo(CPEMIs);
455 if (!T2JumpTables.empty())
456 MF->ensureAlignment(
Align(4));
459 MadeChange |= removeUnusedCPEntries();
463 unsigned NoCPIters = 0, NoBRIters = 0;
465 LLVM_DEBUG(
dbgs() <<
"Beginning CP iteration #" << NoCPIters <<
'\n');
466 bool CPChange =
false;
467 for (
unsigned i = 0, e = CPUsers.size(); i != e; ++i)
471 CPChange |= handleConstantPoolUser(i, NoCPIters >=
CPMaxIteration / 2);
478 NewWaterList.
clear();
480 LLVM_DEBUG(
dbgs() <<
"Beginning BR iteration #" << NoBRIters <<
'\n');
481 bool BRChange =
false;
482 for (
unsigned i = 0, e = ImmBranches.size(); i != e; ++i)
483 BRChange |= fixupImmediateBr(ImmBranches[i]);
484 if (BRChange && ++NoBRIters > 30)
488 if (!CPChange && !BRChange)
494 if (isThumb2 && !STI->prefers32BitThumb())
495 MadeChange |= optimizeThumb2Instructions();
498 if (
isThumb && STI->hasV8MBaselineOps())
499 MadeChange |= optimizeThumb2Branches();
502 if (GenerateTBB && !STI->genExecuteOnly())
503 MadeChange |= optimizeThumb2JumpTables();
509 for (
unsigned i = 0, e = CPEntries.size(); i != e; ++i) {
510 for (
unsigned j = 0, je = CPEntries[i].
size();
j != je; ++
j) {
511 const CPEntry & CPE = CPEntries[i][
j];
512 if (CPE.CPEMI && CPE.CPEMI->getOperand(1).isCPI())
513 AFI->recordCPEClone(i, CPE.CPI);
523 JumpTableEntryIndices.clear();
524 JumpTableUserIndices.clear();
525 BlockJumpTableRefCount.clear();
528 T2JumpTables.clear();
536ARMConstantIslands::doInitialConstPlacement(std::vector<MachineInstr*> &CPEMIs) {
542 const Align MaxAlign = MCP->getConstantPoolAlign();
543 const unsigned MaxLogAlign =
Log2(MaxAlign);
551 Align FuncAlign = MaxAlign;
553 FuncAlign =
Align(4);
554 MF->ensureAlignment(FuncAlign);
565 const std::vector<MachineConstantPoolEntry> &CPs = MCP->getConstants();
568 for (
unsigned i = 0, e = CPs.size(); i != e; ++i) {
569 unsigned Size = CPs[i].getSizeInBytes(TD);
570 Align Alignment = CPs[i].getAlign();
576 unsigned LogAlign =
Log2(Alignment);
581 CPEMIs.push_back(CPEMI);
585 for (
unsigned a = LogAlign + 1; a <= MaxLogAlign; ++a)
586 if (InsPoint[a] == InsAt)
590 CPEntries.emplace_back(1, CPEntry(CPEMI, i));
592 LLVM_DEBUG(
dbgs() <<
"Moved CPI#" << i <<
" to end of function, size = "
593 <<
Size <<
", align = " << Alignment.
value() <<
'\n');
603void ARMConstantIslands::doInitialJumpTablePlacement(
604 std::vector<MachineInstr *> &CPEMIs) {
605 unsigned i = CPEntries.size();
606 auto MJTI = MF->getJumpTableInfo();
607 const std::vector<MachineJumpTableEntry> &
JT = MJTI->getJumpTables();
626 switch (
MI->getOpcode()) {
632 case ARM::BR_JTm_i12:
638 "Branch protection must not be enabled for Arm or Thumb1 modes");
639 JTOpcode = ARM::JUMPTABLE_ADDRS;
642 JTOpcode = ARM::JUMPTABLE_INSTS;
646 JTOpcode = ARM::JUMPTABLE_TBB;
650 JTOpcode = ARM::JUMPTABLE_TBH;
654 unsigned NumOps =
MI->getDesc().getNumOperands();
656 MI->getOperand(NumOps - (
MI->isPredicable() ? 2 : 1));
666 CPEMIs.push_back(CPEMI);
667 CPEntries.emplace_back(1, CPEntry(CPEMI, JTI));
668 JumpTableEntryIndices.insert(std::make_pair(JTI, CPEntries.size() - 1));
669 if (!LastCorrectlyNumberedBB)
670 LastCorrectlyNumberedBB = &
MBB;
674 if (LastCorrectlyNumberedBB) {
675 MF->RenumberBlocks(LastCorrectlyNumberedBB);
698 return TooDifficult || FBB ==
nullptr;
703ARMConstantIslands::CPEntry *
704ARMConstantIslands::findConstPoolEntry(
unsigned CPI,
706 std::vector<CPEntry> &CPEs = CPEntries[CPI];
709 for (CPEntry &CPE : CPEs)
710 if (CPE.CPEMI == CPEMI)
719 case ARM::CONSTPOOL_ENTRY:
721 case ARM::JUMPTABLE_TBB:
723 case ARM::JUMPTABLE_TBH:
725 case ARM::JUMPTABLE_INSTS:
727 case ARM::JUMPTABLE_ADDRS:
733 unsigned CPI = getCombinedIndex(CPEMI);
734 assert(CPI < MCP->getConstants().
size() &&
"Invalid constant pool index.");
735 return MCP->getConstants()[CPI].getAlign();
749void ARMConstantIslands::scanFunctionJumpTables() {
753 (
I.getOpcode() == ARM::t2BR_JT ||
I.getOpcode() == ARM::tBR_JTr))
766 BlockJumpTableRefCount[
MBB] = std::numeric_limits<int>::max();
768 ++BlockJumpTableRefCount[
MBB];
775void ARMConstantIslands::
776initializeFunctionInfo(
const std::vector<MachineInstr*> &CPEMIs) {
778 BBUtils->computeAllBlockSizes();
782 BBInfo.
front().KnownBits =
Log2(MF->getAlignment());
785 BBUtils->adjustBBOffsetsAfter(&MF->front());
789 bool InlineJumpTables =
797 WaterList.push_back(&
MBB);
800 if (
I.isDebugInstr())
803 unsigned Opc =
I.getOpcode();
814 if (InlineJumpTables)
815 T2JumpTables.push_back(&
I);
848 unsigned MaxOffs = ((1 << (
Bits-1))-1) * Scale;
849 ImmBranches.push_back(ImmBranch(&
I, MaxOffs, isCond, UOpc));
852 if (Opc == ARM::tPUSH || Opc == ARM::tPOP_RET)
853 PushPopMIs.push_back(&
I);
855 if (Opc == ARM::CONSTPOOL_ENTRY || Opc == ARM::JUMPTABLE_ADDRS ||
856 Opc == ARM::JUMPTABLE_INSTS || Opc == ARM::JUMPTABLE_TBB ||
857 Opc == ARM::JUMPTABLE_TBH)
861 for (
unsigned op = 0, e =
I.getNumOperands();
op !=
e; ++
op)
862 if (
I.getOperand(
op).isCPI() ||
863 (
I.getOperand(
op).isJTI() && InlineJumpTables)) {
871 bool IsSoImm =
false;
879 case ARM::LEApcrelJT: {
887 unsigned CPI =
I.getOperand(
op).getIndex();
888 assert(CPI < CPEMIs.size());
890 const Align CPEAlign = getCPEAlign(CPEMI);
891 const unsigned LogCPEAlign =
Log2(CPEAlign);
892 if (LogCPEAlign >= 2)
900 case ARM::t2LEApcrel:
901 case ARM::t2LEApcrelJT:
906 case ARM::tLEApcrelJT:
916 case ARM::t2LDRSHpci:
918 case ARM::t2LDRSBpci:
942 unsigned CPI =
I.getOperand(
op).getIndex();
943 if (
I.getOperand(
op).isJTI()) {
944 JumpTableUserIndices.insert(std::make_pair(CPI, CPUsers.size()));
945 CPI = JumpTableEntryIndices[CPI];
949 unsigned MaxOffs = ((1 <<
Bits)-1) * Scale;
950 CPUsers.push_back(CPUser(&
I, CPEMI, MaxOffs, NegOk, IsSoImm));
953 CPEntry *CPE = findConstPoolEntry(CPI, CPEMI);
954 assert(CPE &&
"Cannot find a corresponding CPEntry!");
969 return LHS->getNumber() <
RHS->getNumber();
987 WaterList.insert(IP, NewBB);
997 LivePhysRegs LRs(*MF->getSubtarget().getRegisterInfo());
998 LRs.addLiveOuts(*OrigBB);
1001 LRs.stepBackward(LiveMI);
1007 MF->insert(
MBBI, NewBB);
1016 unsigned Opc =
isThumb ? (isThumb2 ? ARM::t2B : ARM::tB) : ARM::B;
1034 if (!
MRI.isReserved(L))
1040 MF->RenumberBlocks(NewBB);
1053 if (WaterBB == OrigBB)
1054 WaterList.
insert(std::next(IP), NewBB);
1056 WaterList.insert(IP, OrigBB);
1057 NewWaterList.
insert(OrigBB);
1064 BBUtils->computeBlockSize(OrigBB);
1068 BBUtils->computeBlockSize(NewBB);
1071 BBUtils->adjustBBOffsetsAfter(OrigBB);
1079unsigned ARMConstantIslands::getUserOffset(CPUser &U)
const {
1080 unsigned UserOffset = BBUtils->getOffsetOf(
U.MI);
1087 UserOffset += (
isThumb ? 4 : 8);
1108bool ARMConstantIslands::isOffsetInRange(
unsigned UserOffset,
1109 unsigned TrialOffset,
unsigned MaxDisp,
1110 bool NegativeOK,
bool IsSoImm) {
1111 if (UserOffset <= TrialOffset) {
1113 if (TrialOffset - UserOffset <= MaxDisp)
1116 }
else if (NegativeOK) {
1117 if (UserOffset - TrialOffset <= MaxDisp)
1128bool ARMConstantIslands::isWaterInRange(
unsigned UserOffset,
1132 const Align CPEAlign = getCPEAlign(
U.CPEMI);
1133 const unsigned CPEOffset = BBInfo[Water->
getNumber()].postOffset(CPEAlign);
1134 unsigned NextBlockOffset;
1135 Align NextBlockAlignment;
1137 if (++NextBlock == MF->end()) {
1138 NextBlockOffset = BBInfo[Water->
getNumber()].postOffset();
1140 NextBlockOffset = BBInfo[NextBlock->getNumber()].Offset;
1141 NextBlockAlignment = NextBlock->getAlignment();
1143 unsigned Size =
U.CPEMI->getOperand(2).getImm();
1144 unsigned CPEEnd = CPEOffset +
Size;
1149 if (CPEEnd > NextBlockOffset) {
1150 Growth = CPEEnd - NextBlockOffset;
1158 if (CPEOffset < UserOffset)
1164 return isOffsetInRange(UserOffset, CPEOffset, U);
1169bool ARMConstantIslands::isCPEntryInRange(
MachineInstr *
MI,
unsigned UserOffset,
1171 bool NegOk,
bool DoDump) {
1172 unsigned CPEOffset = BBUtils->getOffsetOf(CPEMI);
1177 unsigned Block =
MI->getParent()->getNumber();
1180 <<
" max delta=" << MaxDisp
1181 <<
format(
" insn address=%#x", UserOffset) <<
" in "
1184 <<
format(
"CPE address=%#x offset=%+d: ", CPEOffset,
1185 int(CPEOffset - UserOffset));
1189 return isOffsetInRange(UserOffset, CPEOffset, MaxDisp, NegOk);
1213bool ARMConstantIslands::decrementCPEReferenceCount(
unsigned CPI,
1216 CPEntry *CPE = findConstPoolEntry(CPI, CPEMI);
1217 assert(CPE &&
"Unexpected!");
1218 if (--CPE->RefCount == 0) {
1219 removeDeadCPEMI(CPEMI);
1220 CPE->CPEMI =
nullptr;
1227unsigned ARMConstantIslands::getCombinedIndex(
const MachineInstr *CPEMI) {
1240int ARMConstantIslands::findInRangeCPEntry(CPUser& U,
unsigned UserOffset) {
1245 if (isCPEntryInRange(UserMI, UserOffset, CPEMI,
U.getMaxDisp(),
U.NegOk,
1252 unsigned CPI = getCombinedIndex(CPEMI);
1253 std::vector<CPEntry> &CPEs = CPEntries[CPI];
1254 for (CPEntry &CPE : CPEs) {
1256 if (CPE.CPEMI == CPEMI)
1259 if (CPE.CPEMI ==
nullptr)
1261 if (isCPEntryInRange(UserMI, UserOffset, CPE.CPEMI,
U.getMaxDisp(),
1263 LLVM_DEBUG(
dbgs() <<
"Replacing CPE#" << CPI <<
" with CPE#" << CPE.CPI
1266 U.CPEMI = CPE.CPEMI;
1270 MO.setIndex(CPE.CPI);
1277 return decrementCPEReferenceCount(CPI, CPEMI) ? 2 : 1;
1288 return ((1<<10)-1)*2;
1290 return ((1<<23)-1)*2;
1295 return ((1<<23)-1)*4;
1306bool ARMConstantIslands::findAvailableWater(CPUser &U,
unsigned UserOffset,
1307 water_iterator &WaterIter,
1309 if (WaterList.empty())
1312 unsigned BestGrowth = ~0
u;
1325 const Align CPEAlign = getCPEAlign(
U.CPEMI);
1326 unsigned MinNoSplitDisp =
1327 BBInfo[UserBB->
getNumber()].postOffset(CPEAlign) - UserOffset;
1328 if (CloserWater && MinNoSplitDisp >
U.getMaxDisp() / 2)
1330 for (water_iterator IP = std::prev(WaterList.end()),
B = WaterList.begin();;
1344 if (isWaterInRange(UserOffset, WaterBB, U, Growth) &&
1345 (WaterBB->
getNumber() <
U.HighWaterMark->getNumber() ||
1346 NewWaterList.
count(WaterBB) || WaterBB ==
U.MI->getParent()) &&
1347 Growth < BestGrowth) {
1349 BestGrowth = Growth;
1352 <<
" Growth=" << Growth <<
'\n');
1354 if (CloserWater && WaterBB ==
U.MI->getParent())
1358 if (!CloserWater && BestGrowth == 0)
1364 return BestGrowth != ~0
u;
1374void ARMConstantIslands::createNewWater(
unsigned CPUserIndex,
1375 unsigned UserOffset,
1377 CPUser &
U = CPUsers[CPUserIndex];
1380 const Align CPEAlign = getCPEAlign(CPEMI);
1391 unsigned Delta = isThumb1 ? 2 : 4;
1393 unsigned CPEOffset = UserBBI.
postOffset(CPEAlign) + Delta;
1395 if (isOffsetInRange(UserOffset, CPEOffset, U)) {
1397 <<
format(
", expected CPE offset %#x\n", CPEOffset));
1404 int UncondBr =
isThumb ? ((isThumb2) ? ARM::t2B : ARM::tB) :
ARM::
B;
1412 ImmBranches.push_back(ImmBranch(&UserMBB->
back(),
1413 MaxDisp,
false, UncondBr));
1414 BBUtils->computeBlockSize(UserMBB);
1415 BBUtils->adjustBBOffsetsAfter(UserMBB);
1436 assert(
Align >= CPEAlign &&
"Over-aligned constant pool entry");
1439 unsigned BaseInsertOffset = UserOffset +
U.getMaxDisp() - UPad;
1446 BaseInsertOffset -= 4;
1450 <<
" up=" << UPad <<
'\n');
1456 if (BaseInsertOffset + 8 >= UserBBI.
postOffset()) {
1462 UserOffset +
TII->getInstSizeInBytes(*UserMI) + 1);
1477 for (
unsigned Offset = UserOffset +
TII->getInstSizeInBytes(*UserMI);
1478 I->getOpcode() != ARM::t2IT &&
1480 Offset +=
TII->getInstSizeInBytes(*
I),
I = std::next(
I)) {
1482 std::max(BaseInsertOffset,
Offset +
TII->getInstSizeInBytes(*
I) + 1);
1483 assert(
I != UserMBB->
end() &&
"Fell off end of block");
1487 unsigned EndInsertOffset = BaseInsertOffset + 4 + UPad +
1491 unsigned CPUIndex = CPUserIndex+1;
1492 unsigned NumCPUsers = CPUsers.size();
1494 for (
unsigned Offset = UserOffset +
TII->getInstSizeInBytes(*UserMI);
1495 Offset < BaseInsertOffset;
1497 assert(
MI != UserMBB->
end() &&
"Fell off end of block");
1498 if (CPUIndex < NumCPUsers && CPUsers[CPUIndex].
MI == &*
MI) {
1499 CPUser &
U = CPUsers[CPUIndex];
1500 if (!isOffsetInRange(
Offset, EndInsertOffset, U)) {
1509 EndInsertOffset +=
U.CPEMI->getOperand(2).getImm();
1514 if (
MI->getOpcode() == ARM::t2IT)
1535 if (STI->isTargetWindows() &&
isThumb &&
MI->getOpcode() == ARM::t2MOVTi16 &&
1539 assert(
MI->getOpcode() == ARM::t2MOVi16 &&
1549 NewMBB = splitBlockBeforeInstr(&*
MI);
1556bool ARMConstantIslands::handleConstantPoolUser(
unsigned CPUserIndex,
1558 CPUser &
U = CPUsers[CPUserIndex];
1561 unsigned CPI = getCombinedIndex(CPEMI);
1564 unsigned UserOffset = getUserOffset(U);
1568 int result = findInRangeCPEntry(U, UserOffset);
1569 if (result==1)
return false;
1570 else if (result==2)
return true;
1574 unsigned ID = AFI->createPICLabelUId();
1580 if (findAvailableWater(U, UserOffset, IP, CloserWater)) {
1587 if (NewWaterList.
erase(WaterBB))
1588 NewWaterList.
insert(NewIsland);
1595 createNewWater(CPUserIndex, UserOffset, NewMBB);
1603 IP =
find(WaterList, WaterBB);
1604 if (IP != WaterList.end())
1605 NewWaterList.
erase(WaterBB);
1608 NewWaterList.
insert(NewIsland);
1621 if (IP != WaterList.end())
1622 WaterList.erase(IP);
1628 updateForInsertedWaterBlock(NewIsland);
1632 U.HighWaterMark = NewIsland;
1637 CPEntries[CPI].push_back(CPEntry(
U.CPEMI,
ID, 1));
1641 decrementCPEReferenceCount(CPI, CPEMI);
1647 BBUtils->adjustBBSize(NewIsland,
Size);
1648 BBUtils->adjustBBOffsetsAfter(&*--NewIsland->
getIterator());
1658 dbgs() <<
" Moved CPE to #" <<
ID <<
" CPI=" << CPI
1659 <<
format(
" offset=%#x\n",
1660 BBUtils->getBBInfo()[NewIsland->
getNumber()].Offset));
1667void ARMConstantIslands::removeDeadCPEMI(
MachineInstr *CPEMI) {
1672 BBUtils->adjustBBSize(CPEBB, -
Size);
1674 if (CPEBB->
empty()) {
1684 BBUtils->adjustBBOffsetsAfter(CPEBB);
1694bool ARMConstantIslands::removeUnusedCPEntries() {
1695 unsigned MadeChange =
false;
1696 for (std::vector<CPEntry> &CPEs : CPEntries) {
1697 for (CPEntry &CPE : CPEs) {
1698 if (CPE.RefCount == 0 && CPE.CPEMI) {
1699 removeDeadCPEMI(CPE.CPEMI);
1700 CPE.CPEMI =
nullptr;
1711bool ARMConstantIslands::fixupImmediateBr(ImmBranch &Br) {
1716 if (BBUtils->isBBInRange(
MI, DestBB, Br.MaxDisp))
1720 return fixupUnconditionalBr(Br);
1721 return fixupConditionalBr(Br);
1729ARMConstantIslands::fixupUnconditionalBr(ImmBranch &Br) {
1735 if (!AFI->isLRSpilled())
1739 Br.MaxDisp = (1 << 21) * 2;
1740 MI->setDesc(
TII->get(ARM::tBfar));
1743 BBUtils->adjustBBOffsetsAfter(
MBB);
1755ARMConstantIslands::fixupConditionalBr(ImmBranch &Br) {
1789 if (BBUtils->isBBInRange(
MI, NewDest, Br.MaxDisp)) {
1791 dbgs() <<
" Invert Bcc condition and swap its destination with "
1794 MI->getOperand(0).setMBB(NewDest);
1795 MI->getOperand(1).setImm(
CC);
1802 splitBlockBeforeInstr(
MI);
1805 int delta =
TII->getInstSizeInBytes(
MBB->
back());
1806 BBUtils->adjustBBSize(
MBB, -delta);
1819 <<
" also invert condition and change dest. to "
1827 BBUtils->adjustBBSize(
MBB,
TII->getInstSizeInBytes(
MBB->
back()));
1834 BBUtils->adjustBBSize(
MBB,
TII->getInstSizeInBytes(
MBB->
back()));
1836 ImmBranches.push_back(ImmBranch(&
MBB->
back(), MaxDisp,
false, Br.UncondBr));
1839 BBUtils->adjustBBSize(
MI->getParent(), -
TII->getInstSizeInBytes(*
MI));
1840 MI->eraseFromParent();
1841 BBUtils->adjustBBOffsetsAfter(
MBB);
1845bool ARMConstantIslands::optimizeThumb2Instructions() {
1846 bool MadeChange =
false;
1849 for (CPUser &U : CPUsers) {
1850 unsigned Opcode =
U.MI->getOpcode();
1851 unsigned NewOpc = 0;
1856 case ARM::t2LEApcrel:
1858 NewOpc = ARM::tLEApcrel;
1865 NewOpc = ARM::tLDRpci;
1875 unsigned UserOffset = getUserOffset(U);
1876 unsigned MaxOffs = ((1 <<
Bits) - 1) * Scale;
1879 if (!
U.KnownAlignment)
1883 if (isCPEntryInRange(
U.MI, UserOffset,
U.CPEMI, MaxOffs,
false,
true)) {
1885 U.MI->setDesc(
TII->get(NewOpc));
1887 BBUtils->adjustBBSize(
MBB, -2);
1888 BBUtils->adjustBBOffsetsAfter(
MBB);
1898bool ARMConstantIslands::optimizeThumb2Branches() {
1900 auto TryShrinkBranch = [
this](ImmBranch &Br) {
1901 unsigned Opcode = Br.MI->getOpcode();
1902 unsigned NewOpc = 0;
1919 unsigned MaxOffs = ((1 << (
Bits-1))-1) * Scale;
1921 if (BBUtils->isBBInRange(Br.MI, DestBB, MaxOffs)) {
1923 Br.MI->setDesc(
TII->get(NewOpc));
1925 BBUtils->adjustBBSize(
MBB, -2);
1926 BBUtils->adjustBBOffsetsAfter(
MBB);
1936 unsigned NewOpc = 0;
1939 auto FindCmpForCBZ = [
this](ImmBranch &Br, ImmCompare &ImmCmp,
1941 ImmCmp.MI =
nullptr;
1946 if (!Br.MI->killsRegister(ARM::CPSR,
nullptr))
1950 unsigned NewOpc = 0;
1955 NewOpc = ARM::tCBNZ;
1961 unsigned BrOffset = BBUtils->getOffsetOf(Br.MI) + 4 - 2;
1963 unsigned DestOffset = BBInfo[DestBB->
getNumber()].Offset;
1964 if (BrOffset >= DestOffset || (DestOffset - BrOffset) > 126)
1968 auto *
TRI = STI->getRegisterInfo();
1970 if (!CmpMI || CmpMI->
getOpcode() != ARM::tCMPi8)
1974 ImmCmp.NewOpc = NewOpc;
1978 auto TryConvertToLE = [
this](ImmBranch &Br, ImmCompare &
Cmp) {
1979 if (Br.MI->getOpcode() != ARM::t2Bcc || !STI->hasLOB() ||
1985 if (BBUtils->getOffsetOf(
MBB) < BBUtils->getOffsetOf(DestBB) ||
1986 !BBUtils->isBBInRange(Br.MI, DestBB, 4094))
1994 Cmp.NewOpc =
Cmp.NewOpc == ARM::tCBZ ? ARM::tCBNZ : ARM::tCBZ;
1997 TII->get(ARM::t2LE));
1999 MIB.
add(Br.MI->getOperand(0));
2006 bool MadeChange =
false;
2013 for (ImmBranch &Br :
reverse(ImmBranches)) {
2021 if (FindCmpForCBZ(Br, Cmp, ExitBB) && TryConvertToLE(Br, Cmp)) {
2025 FindCmpForCBZ(Br, Cmp, DestBB);
2026 MadeChange |= TryShrinkBranch(Br);
2029 unsigned Opcode = Br.MI->getOpcode();
2030 if ((Opcode != ARM::tBcc && Opcode != ARM::t2LE) || !
Cmp.NewOpc)
2037 auto *
TRI = STI->getRegisterInfo();
2039 bool RegKilled =
false;
2042 if (KillMI->killsRegister(Reg,
TRI)) {
2043 KillMI->clearRegisterKills(Reg,
TRI);
2047 }
while (KillMI !=
Cmp.MI);
2055 .
addMBB(DestBB, Br.MI->getOperand(0).getTargetFlags());
2057 Cmp.MI->eraseFromParent();
2059 if (Br.MI->getOpcode() == ARM::tBcc) {
2060 Br.MI->eraseFromParent();
2062 BBUtils->adjustBBSize(
MBB, -2);
2073 BBUtils->adjustBBOffsetsAfter(
MBB);
2083 if (
I.getOpcode() != ARM::t2ADDrs)
2086 if (
I.getOperand(0).getReg() != EntryReg)
2089 if (
I.getOperand(1).getReg() != BaseReg)
2102bool ARMConstantIslands::preserveBaseRegister(
MachineInstr *JumpMI,
2106 bool &BaseRegKill) {
2129 CanDeleteLEA =
true;
2130 BaseRegKill =
false;
2133 for (++
I; &*
I != JumpMI; ++
I) {
2140 if (!MO.isReg() || !MO.getReg())
2142 if (MO.isDef() && MO.getReg() == BaseReg)
2144 if (MO.isUse() && MO.getReg() == BaseReg) {
2145 BaseRegKill = BaseRegKill || MO.isKill();
2146 CanDeleteLEA =
false;
2156 for (++
I; &*
I != JumpMI; ++
I) {
2158 if (!MO.isReg() || !MO.getReg())
2160 if (MO.isDef() && MO.getReg() == BaseReg)
2162 if (MO.isUse() && MO.getReg() == EntryReg)
2163 RemovableAdd =
nullptr;
2169 DeadSize += isThumb2 ? 4 : 2;
2170 }
else if (BaseReg == EntryReg) {
2196 unsigned &DeadSize) {
2205 for (++
I; &*
I != JumpMI; ++
I) {
2206 if (
I->getOpcode() == ARM::t2ADDrs &&
I->getOperand(0).getReg() == EntryReg)
2215 for (++J; &*J != JumpMI; ++J) {
2217 if (!MO.isReg() || !MO.getReg())
2219 if (MO.isDef() && MO.getReg() == EntryReg)
2221 if (MO.isUse() && MO.getReg() == EntryReg)
2233bool ARMConstantIslands::optimizeThumb2JumpTables() {
2234 bool MadeChange =
false;
2239 if (!MJTI)
return false;
2245 unsigned JTOpIdx = NumOps - (
MI->isPredicable() ? 2 : 1);
2247 unsigned JTI = JTOP.getIndex();
2251 bool HalfWordOk =
true;
2252 unsigned JTOffset = BBUtils->getOffsetOf(
MI) + 4;
2253 const std::vector<MachineBasicBlock*> &JTBBs =
JT[JTI].MBBs;
2259 if (ByteOk && (DstOffset - JTOffset) > ((1<<8)-1)*2)
2261 unsigned TBHLimit = ((1<<16)-1)*2;
2262 if (HalfWordOk && (DstOffset - JTOffset) > TBHLimit)
2264 if (!ByteOk && !HalfWordOk)
2268 if (!ByteOk && !HalfWordOk)
2271 CPUser &
User = CPUsers[JumpTableUserIndices[JTI]];
2273 if (!
MI->getOperand(0).isKill())
2276 unsigned DeadSize = 0;
2277 bool CanDeleteLEA =
false;
2278 bool BaseRegKill =
false;
2280 unsigned IdxReg = ~0
U;
2281 bool IdxRegKill =
true;
2283 IdxReg =
MI->getOperand(1).getReg();
2284 IdxRegKill =
MI->getOperand(1).isKill();
2286 bool PreservedBaseReg =
2287 preserveBaseRegister(
MI,
User.MI, DeadSize, CanDeleteLEA, BaseRegKill);
2299 if (Shift == UserMBB->
begin())
2303 if (Shift->getOpcode() != ARM::tLSLri ||
2304 Shift->getOperand(3).getImm() != 2 ||
2305 !Shift->getOperand(2).isKill())
2307 IdxReg = Shift->getOperand(2).getReg();
2308 Register ShiftedIdxReg = Shift->getOperand(0).getReg();
2317 if (
Load->getOpcode() != ARM::tLDRr)
2319 if (
Load->getOperand(1).getReg() != BaseReg ||
2320 Load->getOperand(2).getReg() != ShiftedIdxReg ||
2321 !
Load->getOperand(2).isKill())
2325 auto *
TRI = STI->getRegisterInfo();
2335 if (isPositionIndependentOrROPI) {
2337 if (
Add->getOpcode() != ARM::tADDrr ||
2338 Add->getOperand(2).getReg() != BaseReg ||
2339 Add->getOperand(3).getReg() !=
Load->getOperand(0).getReg() ||
2340 !
Add->getOperand(3).isKill())
2342 if (
Add->getOperand(0).getReg() !=
MI->getOperand(0).getReg())
2347 Add->eraseFromParent();
2350 if (
Load->getOperand(0).getReg() !=
MI->getOperand(0).getReg())
2358 CanDeleteLEA =
true;
2359 Shift->eraseFromParent();
2360 Load->eraseFromParent();
2366 unsigned Opc = ByteOk ? ARM::t2TBB_JT : ARM::t2TBH_JT;
2368 Opc = ByteOk ? ARM::tTBB_JT : ARM::tTBH_JT;
2380 unsigned JTOpc = ByteOk ? ARM::JUMPTABLE_TBB : ARM::JUMPTABLE_TBH;
2391 User.MI->eraseFromParent();
2392 DeadSize += isThumb2 ? 4 : 2;
2399 User.IsSoImm =
false;
2400 User.KnownAlignment =
false;
2404 int CPEntryIdx = JumpTableEntryIndices[JTI];
2405 auto &CPEs = CPEntries[CPEntryIdx];
2407 find_if(CPEs, [&](CPEntry &E) {
return E.CPEMI ==
User.CPEMI; });
2409 CPUsers.emplace_back(CPUser(NewJTMI,
User.CPEMI, 4,
false,
false));
2413 unsigned NewSize =
TII->getInstSizeInBytes(*NewJTMI);
2414 unsigned OrigSize =
TII->getInstSizeInBytes(*
MI);
2415 MI->eraseFromParent();
2417 int Delta = OrigSize - NewSize + DeadSize;
2419 BBUtils->adjustBBOffsetsAfter(
MBB);
2430bool ARMConstantIslands::reorderThumb2JumpTables() {
2431 bool MadeChange =
false;
2434 if (!MJTI)
return false;
2440 unsigned JTOpIdx = NumOps - (
MI->isPredicable() ? 2 : 1);
2442 unsigned JTI = JTOP.getIndex();
2448 int JTNumber =
MI->getParent()->getNumber();
2449 const std::vector<MachineBasicBlock*> &JTBBs =
JT[JTI].MBBs;
2453 if (DTNumber < JTNumber) {
2457 adjustJTTargetBlockForward(JTI,
MBB,
MI->getParent());
2487 if (!
B &&
Cond.empty() && BB != &MF->front() &&
2490 OldPrior->updateTerminator(BB);
2493 MF->RenumberBlocks();
2503 MF->insert(
MBBI, NewBB);
2522 MF->RenumberBlocks(NewBB);
2536 return new ARMConstantIslands();
unsigned const MachineRegisterInfo * MRI
static bool isThumb(const MCSubtargetInfo &STI)
static cl::opt< unsigned > CPMaxIteration("arm-constant-island-max-iteration", cl::Hidden, cl::init(30), cl::desc("The max number of iteration for converge"))
static bool isSimpleIndexCalc(MachineInstr &I, unsigned EntryReg, unsigned BaseReg)
static bool jumpTableFollowsTB(MachineInstr *JTMI, MachineInstr *CPEMI)
Returns whether CPEMI is the first instruction in the block immediately following JTMI (assumed to be...
static bool CompareMBBNumbers(const MachineBasicBlock *LHS, const MachineBasicBlock *RHS)
CompareMBBNumbers - Little predicate function to sort the WaterList by MBB ID.
static unsigned getUnconditionalBrDisp(int Opc)
getUnconditionalBrDisp - Returns the maximum displacement that can fit in the specific unconditional ...
static void RemoveDeadAddBetweenLEAAndJT(MachineInstr *LEAMI, MachineInstr *JumpMI, unsigned &DeadSize)
static bool isAlwaysIndirectTarget(const MachineBasicBlock &MBB)
static bool AlignBlocks(MachineFunction *MF, const ARMSubtarget *STI)
static cl::opt< bool > SynthesizeThumb1TBB("arm-synthesize-thumb-1-tbb", cl::Hidden, cl::init(true), cl::desc("Use compressed jump tables in Thumb-1 by synthesizing an " "equivalent to the TBB/TBH instructions"))
static cl::opt< bool > AdjustJumpTableBlocks("arm-adjust-jump-tables", cl::Hidden, cl::init(true), cl::desc("Adjust basic block layout to better use TB[BH]"))
#define ARM_CP_ISLANDS_OPT_NAME
static bool BBIsJumpedOver(MachineBasicBlock *MBB)
BBIsJumpedOver - Return true of the specified basic block's only predecessor unconditionally branches...
MachineBasicBlock MachineBasicBlock::iterator MBBI
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
#define LLVM_DUMP_METHOD
Mark debug helper function definitions like dump() that should not be stripped from debug builds.
This file defines the DenseMap class.
const HexagonInstrInfo * TII
This file implements the LivePhysRegs utility for tracking liveness of physical registers.
This file declares the MachineConstantPool class which is an abstract constant pool to keep track of ...
unsigned const TargetRegisterInfo * TRI
static bool BBHasFallthrough(MachineBasicBlock *MBB)
BBHasFallthrough - Return true if the specified basic block can fallthrough into the block immediatel...
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
const SmallVectorImpl< MachineOperand > MachineBasicBlock * TBB
const SmallVectorImpl< MachineOperand > & Cond
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file defines the SmallSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
ARMFunctionInfo - This class is derived from MachineFunctionInfo and contains private ARM-specific in...
bool branchTargetEnforcement() const
const ARMTargetLowering * getTargetLowering() const override
Represent the analysis usage information of a pass.
AnalysisUsage & addRequired()
A parsed version of the target data layout string in and methods for querying it.
std::enable_if_t< GraphHasNodeNumbers< T * >, void > updateBlockNumbers()
Update dominator tree after renumbering blocks.
bool dominates(const BasicBlock *BB, const Use &U) const
Return true if the (end of the) basic block BB dominates the use U.
FunctionPass class - This class is used to implement most global optimizations.
bool hasOptSize() const
Optimize this function for size (-Os) or minimum size (-Oz).
bool analyzeBranch(MachineBasicBlock &MBB, MachineBasicBlock *&TBB, MachineBasicBlock *&FBB, SmallVectorImpl< MachineOperand > &Cond, bool AllowModify) const override
Analyze the branching code at the end of MBB, returning true if it cannot be understood (e....
A set of physical registers with utility functions to track liveness when walking backward/forward th...
Describe properties that are true of each instruction in the target description file.
unsigned getNumOperands() const
Return the number of declared MachineOperands for this MachineInstruction.
unsigned getSize() const
Return the number of bytes in the encoding of this instruction, or zero if the encoding size cannot b...
unsigned pred_size() const
bool isEHPad() const
Returns true if the block is a landing pad.
void replaceSuccessor(MachineBasicBlock *Old, MachineBasicBlock *New)
Replace successor OLD with NEW and update probability info.
MachineBasicBlock * getFallThrough(bool JumpToFallThrough=true)
Return the fallthrough block if the block can implicitly transfer control to the block after it by fa...
void transferSuccessors(MachineBasicBlock *FromMBB)
Transfers all the successors from MBB to this machine basic block (i.e., copies all the successors Fr...
instr_iterator insert(instr_iterator I, MachineInstr *M)
Insert MI into the instruction list before I, possibly inside a bundle.
iterator_range< livein_iterator > liveins() const
int getNumber() const
MachineBasicBlocks are uniquely numbered at the function level, unless they're not in a MachineFuncti...
void push_back(MachineInstr *MI)
const BasicBlock * getBasicBlock() const
Return the LLVM basic block that this instance corresponded to originally.
void updateTerminator(MachineBasicBlock *PreviousLayoutSuccessor)
Update the terminator instructions in block to account for changes to block layout which may have bee...
bool canFallThrough()
Return true if the block can implicitly transfer control to the block after it by falling off the end...
succ_iterator succ_begin()
unsigned succ_size() const
bool hasAddressTaken() const
Test whether this block is used as something other than the target of a terminator,...
void setAlignment(Align A)
Set alignment of the basic block.
void addSuccessor(MachineBasicBlock *Succ, BranchProbability Prob=BranchProbability::getUnknown())
Add Succ as a successor of this MachineBasicBlock.
pred_iterator pred_begin()
iterator getLastNonDebugInstr(bool SkipPseudoOp=true)
Returns an iterator to the last non-debug instruction in the basic block, or end().
void addLiveIn(MCRegister PhysReg, LaneBitmask LaneMask=LaneBitmask::getAll())
Adds the specified register as a live in.
const MachineFunction * getParent() const
Return the MachineFunction containing this basic block.
iterator_range< iterator > terminators()
reverse_iterator rbegin()
bool isSuccessor(const MachineBasicBlock *MBB) const
Return true if the specified MBB is a successor of this block.
void splice(iterator Where, MachineBasicBlock *Other, iterator From)
Take an instruction from MBB 'Other' at the position From, and insert it into this MBB right before '...
Align getAlignment() const
Return alignment of the basic block.
MachineInstrBundleIterator< MachineInstr > iterator
void moveAfter(MachineBasicBlock *NewBefore)
The MachineConstantPool class keeps track of constants referenced by a function which must be spilled...
Analysis pass which computes a MachineDominatorTree.
DominatorTree Class - Concrete subclass of DominatorTreeBase that is used to compute a normal dominat...
MachineFunctionPass - This class adapts the FunctionPass interface to allow convenient creation of pa...
void getAnalysisUsage(AnalysisUsage &AU) const override
getAnalysisUsage - Subclasses that override getAnalysisUsage must call this.
virtual bool runOnMachineFunction(MachineFunction &MF)=0
runOnMachineFunction - This method must be overloaded to perform the desired machine code transformat...
virtual MachineFunctionProperties getRequiredProperties() const
Properties which a MachineFunction may have at a given point in time.
MachineFunctionProperties & set(Property P)
Function & getFunction()
Return the LLVM function that this machine code represents.
MachineConstantPool * getConstantPool()
getConstantPool - Return the constant pool object for the current function.
void RenumberBlocks(MachineBasicBlock *MBBFrom=nullptr)
RenumberBlocks - This discards all of the MachineBasicBlock numbers and recomputes them.
const MachineBasicBlock & front() const
const TargetMachine & getTarget() const
getTarget - Return the target machine this machine code is compiled with
BasicBlockListType::const_iterator const_iterator
const MachineInstrBuilder & addImm(int64_t Val) const
Add a new immediate operand.
const MachineInstrBuilder & add(const MachineOperand &MO) const
const MachineInstrBuilder & addConstantPoolIndex(unsigned Idx, int Offset=0, unsigned TargetFlags=0) const
const MachineInstrBuilder & addReg(Register RegNo, unsigned flags=0, unsigned SubReg=0) const
Add a new virtual register operand.
const MachineInstrBuilder & addJumpTableIndex(unsigned Idx, unsigned TargetFlags=0) const
const MachineInstrBuilder & addMBB(MachineBasicBlock *MBB, unsigned TargetFlags=0) const
reverse_iterator getReverse() const
Get a reverse iterator to the same node.
Representation of each machine instruction.
unsigned getOpcode() const
Returns the opcode of this MachineInstr.
const MachineBasicBlock * getParent() const
const MCInstrDesc & getDesc() const
Returns the target instruction descriptor of this MachineInstr.
void setDesc(const MCInstrDesc &TID)
Replace the instruction descriptor (thus opcode) of the current instruction with a new one.
iterator_range< mop_iterator > operands()
void eraseFromParent()
Unlink 'this' from the containing basic block and delete it.
const MachineOperand & getOperand(unsigned i) const
bool ReplaceMBBInJumpTable(unsigned Idx, MachineBasicBlock *Old, MachineBasicBlock *New)
ReplaceMBBInJumpTable - If Old is a target of the jump tables, update the jump table to branch to New...
@ EK_Inline
EK_Inline - Jump table entries are emitted inline at their point of use.
JTEntryKind getEntryKind() const
const std::vector< MachineJumpTableEntry > & getJumpTables() const
MachineOperand class - Representation of each machine instruction operand.
MachineBasicBlock * getMBB() const
bool isCPI() const
isCPI - Tests if this is a MO_ConstantPoolIndex operand.
void setReg(Register Reg)
Change the register this operand corresponds to.
void setIsKill(bool Val=true)
void setMBB(MachineBasicBlock *MBB)
Register getReg() const
getReg - Returns the register number.
MachineRegisterInfo - Keep track of information for virtual and physical registers,...
virtual StringRef getPassName() const
getPassName - Return a nice clean name for a pass.
Wrapper class representing virtual and physical registers.
SmallSet - This maintains a set of unique values, optimizing for the case when the set is small (less...
size_type count(const T &V) const
count - Return 1 if the element is in the set, 0 otherwise.
std::pair< const_iterator, bool > insert(const T &V)
insert - Insert an element into the set if it isn't already there.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
StringRef - Represent a constant reference to a string, i.e.
CodeGenOptLevel getOptLevel() const
Returns the optimization level: None, Less, Default, or Aggressive.
Value * getOperand(unsigned i) const
self_iterator getIterator()
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
static CondCodes getOppositeCondition(CondCodes CC)
@ MO_OPTION_MASK
MO_OPTION_MASK - Most flags are mutually exclusive; this mask selects just that part of the flag set.
@ MO_LO16
MO_LO16 - On a symbol operand, this represents a relocation containing lower 16 bit of the address.
@ MO_HI16
MO_HI16 - On a symbol operand, this represents a relocation containing higher 16 bit of the address.
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
Reg
All possible values of the reg field in the ModR/M byte.
initializer< Ty > init(const Ty &Val)
This is an optimization pass for GlobalISel generic memory operations.
MachineInstr * findCMPToFoldIntoCBZ(MachineInstr *Br, const TargetRegisterInfo *TRI)
Search backwards from a tBcc to find a tCMPi8 against 0, meaning we can convert them to a tCBZ or tCB...
auto find(R &&Range, const T &Val)
Provide wrappers to std::find which take ranges instead of having to pass begin/end explicitly.
auto size(R &&Range, std::enable_if_t< std::is_base_of< std::random_access_iterator_tag, typename std::iterator_traits< decltype(Range.begin())>::iterator_category >::value, void > *=nullptr)
Get the size of a range.
MachineInstrBuilder BuildMI(MachineFunction &MF, const MIMetadata &MIMD, const MCInstrDesc &MCID)
Builder interface. Specify how to create the initial instruction itself.
static bool isARMLowRegister(MCRegister Reg)
isARMLowRegister - Returns true if the register is a low register (r0-r7).
bool isAligned(Align Lhs, uint64_t SizeInBytes)
Checks that SizeInBytes is a multiple of the alignment.
iterator_range< T > make_range(T x, T y)
Convenience function for iterating over sub-ranges.
bool registerDefinedBetween(unsigned Reg, MachineBasicBlock::iterator From, MachineBasicBlock::iterator To, const TargetRegisterInfo *TRI)
Return true if Reg is defd between From and To.
static std::array< MachineOperand, 2 > predOps(ARMCC::CondCodes Pred, unsigned PredReg=0)
Get the operands corresponding to the given Pred value.
ARMCC::CondCodes getITInstrPredicate(const MachineInstr &MI, Register &PredReg)
getITInstrPredicate - Valid only in Thumb2 mode.
auto reverse(ContainerTy &&C)
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
void report_fatal_error(Error Err, bool gen_crash_diag=true)
Report a serious error, calling any installed error handler.
static bool isLoopStart(const MachineInstr &MI)
bool is_sorted(R &&Range, Compare C)
Wrapper function around std::is_sorted to check if elements in a range R are sorted with respect to a...
format_object< Ts... > format(const char *Fmt, const Ts &... Vals)
These are helper functions used to produce formatted output.
uint64_t offsetToAlignment(uint64_t Value, Align Alignment)
Returns the offset to the next integer (mod 2**64) that is greater than or equal to Value and is a mu...
unsigned getRegState(const MachineOperand &RegOp)
Get all register state flags from machine operand RegOp.
auto lower_bound(R &&Range, T &&Value)
Provide wrappers to std::lower_bound which take ranges instead of having to pass begin/end explicitly...
unsigned getKillRegState(bool B)
ARMCC::CondCodes getInstrPredicate(const MachineInstr &MI, Register &PredReg)
getInstrPredicate - If instruction is predicated, returns its predicate condition,...
FunctionPass * createARMConstantIslandPass()
createARMConstantIslandPass - returns an instance of the constpool island pass.
auto find_if(R &&Range, UnaryPredicate P)
Provide wrappers to std::find_if which take ranges instead of having to pass begin/end explicitly.
unsigned UnknownPadding(Align Alignment, unsigned KnownBits)
UnknownPadding - Return the worst case padding that could result from unknown offset bits.
APFloat neg(APFloat X)
Returns the negated value of the argument.
unsigned Log2(Align A)
Returns the log2 of the alignment.
static bool isSpeculationBarrierEndBBOpcode(int Opc)
IterT prev_nodbg(IterT It, IterT Begin, bool SkipPseudoOp=true)
Decrement It, then continue decrementing it while it points to a debug instruction.
Printable printMBBReference(const MachineBasicBlock &MBB)
Prints a machine basic block reference.
This struct is a compact representation of a valid (non-zero power of two) alignment.
uint64_t value() const
This is a hole in the type system and should not be abused.
BasicBlockInfo - Information about the offset and size of a single basic block.
unsigned internalKnownBits() const
Compute the number of known offset bits internally to this block.
unsigned postOffset(Align Alignment=Align(1)) const
Compute the offset immediately following this block.
unsigned Offset
Offset - Distance from the beginning of the function to the beginning of this basic block.
Pair of physical register and lane mask.
MachineJumpTableEntry - One jump table in the jump table info.