40#include "llvm/Config/llvm-config.h"
61#define DEBUG_TYPE "arm-cp-islands"
63#define ARM_CP_ISLANDS_OPT_NAME \
64 "ARM constant island placement and branch shortening pass"
66STATISTIC(NumSplit,
"Number of uncond branches inserted");
67STATISTIC(NumCBrFixed,
"Number of cond branches fixed");
68STATISTIC(NumUBrFixed,
"Number of uncond branches fixed");
69STATISTIC(NumTBs,
"Number of table branches generated");
70STATISTIC(NumT2CPShrunk,
"Number of Thumb2 constantpool instructions shrunk");
71STATISTIC(NumT2BrShrunk,
"Number of Thumb2 immediate branches shrunk");
73STATISTIC(NumJTMoved,
"Number of jump table destination blocks moved");
74STATISTIC(NumJTInserted,
"Number of jump table intermediate blocks inserted");
75STATISTIC(NumLEInserted,
"Number of LE backwards branches inserted");
79 cl::desc(
"Adjust basic block layout to better use TB[BH]"));
83 cl::desc(
"The max number of iteration for converge"));
87 cl::desc(
"Use compressed jump tables in Thumb-1 by synthesizing an "
88 "equivalent to the TBB/TBH instructions"));
104 std::unique_ptr<ARMBasicBlockUtils> BBUtils =
nullptr;
109 std::vector<MachineBasicBlock*> WaterList;
115 using water_iterator = std::vector<MachineBasicBlock *>::iterator;
136 bool KnownAlignment =
false;
139 bool neg,
bool soimm)
140 :
MI(mi), CPEMI(cpemi), MaxDisp(maxdisp), NegOk(
neg), IsSoImm(soimm) {
147 unsigned getMaxDisp()
const {
148 return (KnownAlignment ? MaxDisp : MaxDisp - 2) - 2;
154 std::vector<CPUser> CPUsers;
165 : CPEMI(cpemi), CPI(cpi), RefCount(
rc) {}
177 std::vector<std::vector<CPEntry>> CPEntries;
196 unsigned MaxDisp : 31;
200 ImmBranch(
MachineInstr *mi,
unsigned maxdisp,
bool cond,
unsigned ubr)
201 :
MI(mi), MaxDisp(maxdisp), isCond(cond), UncondBr(ubr) {}
205 std::vector<ImmBranch> ImmBranches;
222 bool isPositionIndependentOrROPI;
238 MachineFunctionProperties::Property::NoVRegs);
246 void doInitialConstPlacement(std::vector<MachineInstr *> &CPEMIs);
247 void doInitialJumpTablePlacement(std::vector<MachineInstr *> &CPEMIs);
249 CPEntry *findConstPoolEntry(
unsigned CPI,
const MachineInstr *CPEMI);
251 void scanFunctionJumpTables();
252 void initializeFunctionInfo(
const std::vector<MachineInstr*> &CPEMIs);
255 bool decrementCPEReferenceCount(
unsigned CPI,
MachineInstr* CPEMI);
257 int findInRangeCPEntry(CPUser& U,
unsigned UserOffset);
258 bool findAvailableWater(CPUser&U,
unsigned UserOffset,
259 water_iterator &WaterIter,
bool CloserWater);
260 void createNewWater(
unsigned CPUserIndex,
unsigned UserOffset,
262 bool handleConstantPoolUser(
unsigned CPUserIndex,
bool CloserWater);
264 bool removeUnusedCPEntries();
267 bool DoDump =
false);
269 CPUser &U,
unsigned &Growth);
270 bool fixupImmediateBr(ImmBranch &Br);
271 bool fixupConditionalBr(ImmBranch &Br);
272 bool fixupUnconditionalBr(ImmBranch &Br);
273 bool optimizeThumb2Instructions();
274 bool optimizeThumb2Branches();
275 bool reorderThumb2JumpTables();
277 unsigned &DeadSize,
bool &CanDeleteLEA,
279 bool optimizeThumb2JumpTables();
284 unsigned getUserOffset(CPUser&)
const;
288 bool isOffsetInRange(
unsigned UserOffset,
unsigned TrialOffset,
289 unsigned Disp,
bool NegativeOK,
bool IsSoImm =
false);
290 bool isOffsetInRange(
unsigned UserOffset,
unsigned TrialOffset,
292 return isOffsetInRange(UserOffset, TrialOffset,
293 U.getMaxDisp(),
U.NegOk,
U.IsSoImm);
299char ARMConstantIslands::ID = 0;
302void ARMConstantIslands::verify() {
307 return BBInfo[
LHS.getNumber()].postOffset() <
308 BBInfo[
RHS.getNumber()].postOffset();
310 LLVM_DEBUG(
dbgs() <<
"Verifying " << CPUsers.size() <<
" CP users.\n");
311 for (CPUser &U : CPUsers) {
312 unsigned UserOffset = getUserOffset(U);
315 if (isCPEntryInRange(
U.MI, UserOffset,
U.CPEMI,
U.getMaxDisp()+2,
U.NegOk,
328#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
333 for (
unsigned J = 0, E = BBInfo.
size(); J !=E; ++J) {
334 const BasicBlockInfo &BBI = BBInfo[J];
335 dbgs() << format(
"%08x %bb.%u\t", BBI.Offset, J)
336 <<
" kb=" << unsigned(BBI.KnownBits)
337 <<
" ua=" << unsigned(BBI.Unalign) <<
" pa=" << Log2(BBI.PostAlign)
338 << format(
" size=%#x\n", BBInfo[J].Size);
353 const Align Alignment = TLI->getPrefLoopAlignment();
357 bool Changed =
false;
358 bool PrevCanFallthough =
true;
359 for (
auto &
MBB : *MF) {
360 if (!PrevCanFallthough) {
371 if (
MI.getOpcode() == ARM::t2B &&
375 MI.getOpcode() == ARM::t2LoopEndDec) {
376 PrevCanFallthough =
true;
391 BBUtils = std::make_unique<ARMBasicBlockUtils>(mf);
394 << MCP->getConstants().size() <<
" CP entries, aligned to "
395 << MCP->getConstantPoolAlign().value() <<
" bytes *****\n");
398 TII = STI->getInstrInfo();
399 isPositionIndependentOrROPI =
400 STI->getTargetLowering()->isPositionIndependent() || STI->isROPI();
402 DT = &getAnalysis<MachineDominatorTreeWrapperPass>().getDomTree();
404 isThumb = AFI->isThumbFunction();
405 isThumb1 = AFI->isThumb1OnlyFunction();
406 isThumb2 = AFI->isThumb2Function();
411 if (STI->hardenSlsRetBr())
416 MF->RenumberBlocks();
417 DT->updateBlockNumbers();
421 bool MadeChange =
false;
423 scanFunctionJumpTables();
424 MadeChange |= reorderThumb2JumpTables();
426 T2JumpTables.clear();
428 MF->RenumberBlocks();
429 DT->updateBlockNumbers();
437 std::vector<MachineInstr*> CPEMIs;
439 doInitialConstPlacement(CPEMIs);
441 if (MF->getJumpTableInfo())
442 doInitialJumpTablePlacement(CPEMIs);
445 AFI->initPICLabelUId(CPEMIs.size());
450 initializeFunctionInfo(CPEMIs);
456 if (!T2JumpTables.empty())
457 MF->ensureAlignment(
Align(4));
460 MadeChange |= removeUnusedCPEntries();
464 unsigned NoCPIters = 0, NoBRIters = 0;
466 LLVM_DEBUG(
dbgs() <<
"Beginning CP iteration #" << NoCPIters <<
'\n');
467 bool CPChange =
false;
468 for (
unsigned i = 0, e = CPUsers.size(); i != e; ++i)
472 CPChange |= handleConstantPoolUser(i, NoCPIters >=
CPMaxIteration / 2);
479 NewWaterList.
clear();
481 LLVM_DEBUG(
dbgs() <<
"Beginning BR iteration #" << NoBRIters <<
'\n');
482 bool BRChange =
false;
483 for (
unsigned i = 0, e = ImmBranches.size(); i != e; ++i)
484 BRChange |= fixupImmediateBr(ImmBranches[i]);
485 if (BRChange && ++NoBRIters > 30)
489 if (!CPChange && !BRChange)
495 if (isThumb2 && !STI->prefers32BitThumb())
496 MadeChange |= optimizeThumb2Instructions();
499 if (
isThumb && STI->hasV8MBaselineOps())
500 MadeChange |= optimizeThumb2Branches();
503 if (GenerateTBB && !STI->genExecuteOnly())
504 MadeChange |= optimizeThumb2JumpTables();
510 for (
unsigned i = 0, e = CPEntries.size(); i != e; ++i) {
511 for (
unsigned j = 0, je = CPEntries[i].
size();
j != je; ++
j) {
512 const CPEntry & CPE = CPEntries[i][
j];
513 if (CPE.CPEMI && CPE.CPEMI->getOperand(1).isCPI())
514 AFI->recordCPEClone(i, CPE.CPI);
524 JumpTableEntryIndices.clear();
525 JumpTableUserIndices.clear();
526 BlockJumpTableRefCount.clear();
529 T2JumpTables.clear();
537ARMConstantIslands::doInitialConstPlacement(std::vector<MachineInstr*> &CPEMIs) {
543 const Align MaxAlign = MCP->getConstantPoolAlign();
544 const unsigned MaxLogAlign =
Log2(MaxAlign);
552 Align FuncAlign = MaxAlign;
554 FuncAlign =
Align(4);
555 MF->ensureAlignment(FuncAlign);
566 const std::vector<MachineConstantPoolEntry> &CPs = MCP->getConstants();
569 for (
unsigned i = 0, e = CPs.size(); i != e; ++i) {
570 unsigned Size = CPs[i].getSizeInBytes(TD);
571 Align Alignment = CPs[i].getAlign();
577 unsigned LogAlign =
Log2(Alignment);
582 CPEMIs.push_back(CPEMI);
586 for (
unsigned a = LogAlign + 1; a <= MaxLogAlign; ++a)
587 if (InsPoint[a] == InsAt)
591 CPEntries.emplace_back(1, CPEntry(CPEMI, i));
593 LLVM_DEBUG(
dbgs() <<
"Moved CPI#" << i <<
" to end of function, size = "
594 <<
Size <<
", align = " << Alignment.
value() <<
'\n');
604void ARMConstantIslands::doInitialJumpTablePlacement(
605 std::vector<MachineInstr *> &CPEMIs) {
606 unsigned i = CPEntries.size();
607 auto MJTI = MF->getJumpTableInfo();
608 const std::vector<MachineJumpTableEntry> &
JT = MJTI->getJumpTables();
627 switch (
MI->getOpcode()) {
633 case ARM::BR_JTm_i12:
639 "Branch protection must not be enabled for Arm or Thumb1 modes");
640 JTOpcode = ARM::JUMPTABLE_ADDRS;
643 JTOpcode = ARM::JUMPTABLE_INSTS;
647 JTOpcode = ARM::JUMPTABLE_TBB;
651 JTOpcode = ARM::JUMPTABLE_TBH;
655 unsigned NumOps =
MI->getDesc().getNumOperands();
657 MI->getOperand(NumOps - (
MI->isPredicable() ? 2 : 1));
667 CPEMIs.push_back(CPEMI);
668 CPEntries.emplace_back(1, CPEntry(CPEMI, JTI));
669 JumpTableEntryIndices.insert(std::make_pair(JTI, CPEntries.size() - 1));
670 if (!LastCorrectlyNumberedBB)
671 LastCorrectlyNumberedBB = &
MBB;
675 if (LastCorrectlyNumberedBB) {
676 MF->RenumberBlocks(LastCorrectlyNumberedBB);
677 DT->updateBlockNumbers();
699 return TooDifficult || FBB ==
nullptr;
704ARMConstantIslands::CPEntry *
705ARMConstantIslands::findConstPoolEntry(
unsigned CPI,
707 std::vector<CPEntry> &CPEs = CPEntries[CPI];
710 for (CPEntry &CPE : CPEs)
711 if (CPE.CPEMI == CPEMI)
720 case ARM::CONSTPOOL_ENTRY:
722 case ARM::JUMPTABLE_TBB:
724 case ARM::JUMPTABLE_TBH:
726 case ARM::JUMPTABLE_INSTS:
728 case ARM::JUMPTABLE_ADDRS:
734 unsigned CPI = getCombinedIndex(CPEMI);
735 assert(CPI < MCP->getConstants().
size() &&
"Invalid constant pool index.");
736 return MCP->getConstants()[CPI].getAlign();
750void ARMConstantIslands::scanFunctionJumpTables() {
754 (
I.getOpcode() == ARM::t2BR_JT ||
I.getOpcode() == ARM::tBR_JTr))
767 BlockJumpTableRefCount[
MBB] = std::numeric_limits<int>::max();
769 ++BlockJumpTableRefCount[
MBB];
776void ARMConstantIslands::
777initializeFunctionInfo(
const std::vector<MachineInstr*> &CPEMIs) {
779 BBUtils->computeAllBlockSizes();
783 BBInfo.
front().KnownBits =
Log2(MF->getAlignment());
786 BBUtils->adjustBBOffsetsAfter(&MF->front());
790 bool InlineJumpTables =
798 WaterList.push_back(&
MBB);
801 if (
I.isDebugInstr())
804 unsigned Opc =
I.getOpcode();
815 if (InlineJumpTables)
816 T2JumpTables.push_back(&
I);
849 unsigned MaxOffs = ((1 << (
Bits-1))-1) * Scale;
850 ImmBranches.push_back(ImmBranch(&
I, MaxOffs, isCond, UOpc));
853 if (Opc == ARM::tPUSH || Opc == ARM::tPOP_RET)
854 PushPopMIs.push_back(&
I);
856 if (Opc == ARM::CONSTPOOL_ENTRY || Opc == ARM::JUMPTABLE_ADDRS ||
857 Opc == ARM::JUMPTABLE_INSTS || Opc == ARM::JUMPTABLE_TBB ||
858 Opc == ARM::JUMPTABLE_TBH)
862 for (
unsigned op = 0, e =
I.getNumOperands();
op !=
e; ++
op)
863 if (
I.getOperand(
op).isCPI() ||
864 (
I.getOperand(
op).isJTI() && InlineJumpTables)) {
872 bool IsSoImm =
false;
880 case ARM::LEApcrelJT: {
888 unsigned CPI =
I.getOperand(
op).getIndex();
889 assert(CPI < CPEMIs.size());
891 const Align CPEAlign = getCPEAlign(CPEMI);
892 const unsigned LogCPEAlign =
Log2(CPEAlign);
893 if (LogCPEAlign >= 2)
901 case ARM::t2LEApcrel:
902 case ARM::t2LEApcrelJT:
907 case ARM::tLEApcrelJT:
917 case ARM::t2LDRSHpci:
919 case ARM::t2LDRSBpci:
943 unsigned CPI =
I.getOperand(
op).getIndex();
944 if (
I.getOperand(
op).isJTI()) {
945 JumpTableUserIndices.insert(std::make_pair(CPI, CPUsers.size()));
946 CPI = JumpTableEntryIndices[CPI];
950 unsigned MaxOffs = ((1 <<
Bits)-1) * Scale;
951 CPUsers.push_back(CPUser(&
I, CPEMI, MaxOffs, NegOk, IsSoImm));
954 CPEntry *CPE = findConstPoolEntry(CPI, CPEMI);
955 assert(CPE &&
"Cannot find a corresponding CPEntry!");
970 return LHS->getNumber() <
RHS->getNumber();
979 DT->updateBlockNumbers();
988 WaterList.insert(IP, NewBB);
998 LivePhysRegs LRs(*MF->getSubtarget().getRegisterInfo());
999 LRs.addLiveOuts(*OrigBB);
1002 LRs.stepBackward(LiveMI);
1008 MF->insert(
MBBI, NewBB);
1017 unsigned Opc =
isThumb ? (isThumb2 ? ARM::t2B : ARM::tB) : ARM::B;
1035 if (!
MRI.isReserved(L))
1041 MF->RenumberBlocks(NewBB);
1042 DT->updateBlockNumbers();
1054 if (WaterBB == OrigBB)
1055 WaterList.
insert(std::next(IP), NewBB);
1057 WaterList.insert(IP, OrigBB);
1058 NewWaterList.
insert(OrigBB);
1065 BBUtils->computeBlockSize(OrigBB);
1069 BBUtils->computeBlockSize(NewBB);
1072 BBUtils->adjustBBOffsetsAfter(OrigBB);
1080unsigned ARMConstantIslands::getUserOffset(CPUser &U)
const {
1081 unsigned UserOffset = BBUtils->getOffsetOf(
U.MI);
1088 UserOffset += (
isThumb ? 4 : 8);
1109bool ARMConstantIslands::isOffsetInRange(
unsigned UserOffset,
1110 unsigned TrialOffset,
unsigned MaxDisp,
1111 bool NegativeOK,
bool IsSoImm) {
1112 if (UserOffset <= TrialOffset) {
1114 if (TrialOffset - UserOffset <= MaxDisp)
1117 }
else if (NegativeOK) {
1118 if (UserOffset - TrialOffset <= MaxDisp)
1129bool ARMConstantIslands::isWaterInRange(
unsigned UserOffset,
1133 const Align CPEAlign = getCPEAlign(
U.CPEMI);
1134 const unsigned CPEOffset = BBInfo[Water->
getNumber()].postOffset(CPEAlign);
1135 unsigned NextBlockOffset;
1136 Align NextBlockAlignment;
1138 if (++NextBlock == MF->end()) {
1139 NextBlockOffset = BBInfo[Water->
getNumber()].postOffset();
1141 NextBlockOffset = BBInfo[NextBlock->getNumber()].Offset;
1142 NextBlockAlignment = NextBlock->getAlignment();
1144 unsigned Size =
U.CPEMI->getOperand(2).getImm();
1145 unsigned CPEEnd = CPEOffset +
Size;
1150 if (CPEEnd > NextBlockOffset) {
1151 Growth = CPEEnd - NextBlockOffset;
1159 if (CPEOffset < UserOffset)
1165 return isOffsetInRange(UserOffset, CPEOffset, U);
1170bool ARMConstantIslands::isCPEntryInRange(
MachineInstr *
MI,
unsigned UserOffset,
1172 bool NegOk,
bool DoDump) {
1173 unsigned CPEOffset = BBUtils->getOffsetOf(CPEMI);
1178 unsigned Block =
MI->getParent()->getNumber();
1181 <<
" max delta=" << MaxDisp
1182 <<
format(
" insn address=%#x", UserOffset) <<
" in "
1185 <<
format(
"CPE address=%#x offset=%+d: ", CPEOffset,
1186 int(CPEOffset - UserOffset));
1190 return isOffsetInRange(UserOffset, CPEOffset, MaxDisp, NegOk);
1214bool ARMConstantIslands::decrementCPEReferenceCount(
unsigned CPI,
1217 CPEntry *CPE = findConstPoolEntry(CPI, CPEMI);
1218 assert(CPE &&
"Unexpected!");
1219 if (--CPE->RefCount == 0) {
1220 removeDeadCPEMI(CPEMI);
1221 CPE->CPEMI =
nullptr;
1228unsigned ARMConstantIslands::getCombinedIndex(
const MachineInstr *CPEMI) {
1241int ARMConstantIslands::findInRangeCPEntry(CPUser& U,
unsigned UserOffset) {
1246 if (isCPEntryInRange(UserMI, UserOffset, CPEMI,
U.getMaxDisp(),
U.NegOk,
1253 unsigned CPI = getCombinedIndex(CPEMI);
1254 std::vector<CPEntry> &CPEs = CPEntries[CPI];
1255 for (CPEntry &CPE : CPEs) {
1257 if (CPE.CPEMI == CPEMI)
1260 if (CPE.CPEMI ==
nullptr)
1262 if (isCPEntryInRange(UserMI, UserOffset, CPE.CPEMI,
U.getMaxDisp(),
1264 LLVM_DEBUG(
dbgs() <<
"Replacing CPE#" << CPI <<
" with CPE#" << CPE.CPI
1267 U.CPEMI = CPE.CPEMI;
1271 MO.setIndex(CPE.CPI);
1278 return decrementCPEReferenceCount(CPI, CPEMI) ? 2 : 1;
1289 return ((1<<10)-1)*2;
1291 return ((1<<23)-1)*2;
1296 return ((1<<23)-1)*4;
1307bool ARMConstantIslands::findAvailableWater(CPUser &U,
unsigned UserOffset,
1308 water_iterator &WaterIter,
1310 if (WaterList.empty())
1313 unsigned BestGrowth = ~0
u;
1326 const Align CPEAlign = getCPEAlign(
U.CPEMI);
1327 unsigned MinNoSplitDisp = BBInfo[UserBB->
getNumber()].postOffset(CPEAlign);
1328 if (CloserWater && MinNoSplitDisp >
U.getMaxDisp() / 2)
1330 for (water_iterator IP = std::prev(WaterList.end()),
B = WaterList.begin();;
1344 if (isWaterInRange(UserOffset, WaterBB, U, Growth) &&
1345 (WaterBB->
getNumber() <
U.HighWaterMark->getNumber() ||
1346 NewWaterList.
count(WaterBB) || WaterBB ==
U.MI->getParent()) &&
1347 Growth < BestGrowth) {
1349 BestGrowth = Growth;
1352 <<
" Growth=" << Growth <<
'\n');
1354 if (CloserWater && WaterBB ==
U.MI->getParent())
1358 if (!CloserWater && BestGrowth == 0)
1364 return BestGrowth != ~0
u;
1374void ARMConstantIslands::createNewWater(
unsigned CPUserIndex,
1375 unsigned UserOffset,
1377 CPUser &
U = CPUsers[CPUserIndex];
1380 const Align CPEAlign = getCPEAlign(CPEMI);
1391 unsigned Delta = isThumb1 ? 2 : 4;
1393 unsigned CPEOffset = UserBBI.
postOffset(CPEAlign) + Delta;
1395 if (isOffsetInRange(UserOffset, CPEOffset, U)) {
1397 <<
format(
", expected CPE offset %#x\n", CPEOffset));
1404 int UncondBr =
isThumb ? ((isThumb2) ? ARM::t2B : ARM::tB) :
ARM::
B;
1412 ImmBranches.push_back(ImmBranch(&UserMBB->
back(),
1413 MaxDisp,
false, UncondBr));
1414 BBUtils->computeBlockSize(UserMBB);
1415 BBUtils->adjustBBOffsetsAfter(UserMBB);
1436 assert(
Align >= CPEAlign &&
"Over-aligned constant pool entry");
1439 unsigned BaseInsertOffset = UserOffset +
U.getMaxDisp() - UPad;
1446 BaseInsertOffset -= 4;
1450 <<
" up=" << UPad <<
'\n');
1456 if (BaseInsertOffset + 8 >= UserBBI.
postOffset()) {
1462 UserOffset +
TII->getInstSizeInBytes(*UserMI) + 1);
1477 for (
unsigned Offset = UserOffset +
TII->getInstSizeInBytes(*UserMI);
1478 I->getOpcode() != ARM::t2IT &&
1480 Offset +=
TII->getInstSizeInBytes(*
I),
I = std::next(
I)) {
1482 std::max(BaseInsertOffset,
Offset +
TII->getInstSizeInBytes(*
I) + 1);
1483 assert(
I != UserMBB->
end() &&
"Fell off end of block");
1487 unsigned EndInsertOffset = BaseInsertOffset + 4 + UPad +
1491 unsigned CPUIndex = CPUserIndex+1;
1492 unsigned NumCPUsers = CPUsers.size();
1494 for (
unsigned Offset = UserOffset +
TII->getInstSizeInBytes(*UserMI);
1495 Offset < BaseInsertOffset;
1497 assert(
MI != UserMBB->
end() &&
"Fell off end of block");
1498 if (CPUIndex < NumCPUsers && CPUsers[CPUIndex].
MI == &*
MI) {
1499 CPUser &
U = CPUsers[CPUIndex];
1500 if (!isOffsetInRange(
Offset, EndInsertOffset, U)) {
1509 EndInsertOffset +=
U.CPEMI->getOperand(2).getImm();
1514 if (
MI->getOpcode() == ARM::t2IT)
1535 if (STI->isTargetWindows() &&
isThumb &&
MI->getOpcode() == ARM::t2MOVTi16 &&
1539 assert(
MI->getOpcode() == ARM::t2MOVi16 &&
1549 NewMBB = splitBlockBeforeInstr(&*
MI);
1556bool ARMConstantIslands::handleConstantPoolUser(
unsigned CPUserIndex,
1558 CPUser &
U = CPUsers[CPUserIndex];
1561 unsigned CPI = getCombinedIndex(CPEMI);
1564 unsigned UserOffset = getUserOffset(U);
1568 int result = findInRangeCPEntry(U, UserOffset);
1569 if (result==1)
return false;
1570 else if (result==2)
return true;
1574 unsigned ID = AFI->createPICLabelUId();
1580 if (findAvailableWater(U, UserOffset, IP, CloserWater)) {
1587 if (NewWaterList.
erase(WaterBB))
1588 NewWaterList.
insert(NewIsland);
1595 createNewWater(CPUserIndex, UserOffset, NewMBB);
1603 IP =
find(WaterList, WaterBB);
1604 if (IP != WaterList.end())
1605 NewWaterList.
erase(WaterBB);
1608 NewWaterList.
insert(NewIsland);
1621 if (IP != WaterList.end())
1622 WaterList.erase(IP);
1628 updateForInsertedWaterBlock(NewIsland);
1632 U.HighWaterMark = NewIsland;
1637 CPEntries[CPI].push_back(CPEntry(
U.CPEMI,
ID, 1));
1641 decrementCPEReferenceCount(CPI, CPEMI);
1647 BBUtils->adjustBBSize(NewIsland,
Size);
1648 BBUtils->adjustBBOffsetsAfter(&*--NewIsland->
getIterator());
1658 dbgs() <<
" Moved CPE to #" <<
ID <<
" CPI=" << CPI
1659 <<
format(
" offset=%#x\n",
1660 BBUtils->getBBInfo()[NewIsland->
getNumber()].Offset));
1667void ARMConstantIslands::removeDeadCPEMI(
MachineInstr *CPEMI) {
1672 BBUtils->adjustBBSize(CPEBB, -
Size);
1674 if (CPEBB->
empty()) {
1684 BBUtils->adjustBBOffsetsAfter(CPEBB);
1694bool ARMConstantIslands::removeUnusedCPEntries() {
1695 unsigned MadeChange =
false;
1696 for (std::vector<CPEntry> &CPEs : CPEntries) {
1697 for (CPEntry &CPE : CPEs) {
1698 if (CPE.RefCount == 0 && CPE.CPEMI) {
1699 removeDeadCPEMI(CPE.CPEMI);
1700 CPE.CPEMI =
nullptr;
1711bool ARMConstantIslands::fixupImmediateBr(ImmBranch &Br) {
1716 if (BBUtils->isBBInRange(
MI, DestBB, Br.MaxDisp))
1720 return fixupUnconditionalBr(Br);
1721 return fixupConditionalBr(Br);
1729ARMConstantIslands::fixupUnconditionalBr(ImmBranch &Br) {
1735 if (!AFI->isLRSpilled())
1739 Br.MaxDisp = (1 << 21) * 2;
1740 MI->setDesc(
TII->get(ARM::tBfar));
1743 BBUtils->adjustBBOffsetsAfter(
MBB);
1755ARMConstantIslands::fixupConditionalBr(ImmBranch &Br) {
1789 if (BBUtils->isBBInRange(
MI, NewDest, Br.MaxDisp)) {
1791 dbgs() <<
" Invert Bcc condition and swap its destination with "
1794 MI->getOperand(0).setMBB(NewDest);
1795 MI->getOperand(1).setImm(
CC);
1802 splitBlockBeforeInstr(
MI);
1805 int delta =
TII->getInstSizeInBytes(
MBB->
back());
1806 BBUtils->adjustBBSize(
MBB, -delta);
1819 <<
" also invert condition and change dest. to "
1827 BBUtils->adjustBBSize(
MBB,
TII->getInstSizeInBytes(
MBB->
back()));
1834 BBUtils->adjustBBSize(
MBB,
TII->getInstSizeInBytes(
MBB->
back()));
1836 ImmBranches.push_back(ImmBranch(&
MBB->
back(), MaxDisp,
false, Br.UncondBr));
1839 BBUtils->adjustBBSize(
MI->getParent(), -
TII->getInstSizeInBytes(*
MI));
1840 MI->eraseFromParent();
1841 BBUtils->adjustBBOffsetsAfter(
MBB);
1845bool ARMConstantIslands::optimizeThumb2Instructions() {
1846 bool MadeChange =
false;
1849 for (CPUser &U : CPUsers) {
1850 unsigned Opcode =
U.MI->getOpcode();
1851 unsigned NewOpc = 0;
1856 case ARM::t2LEApcrel:
1858 NewOpc = ARM::tLEApcrel;
1865 NewOpc = ARM::tLDRpci;
1875 unsigned UserOffset = getUserOffset(U);
1876 unsigned MaxOffs = ((1 <<
Bits) - 1) * Scale;
1879 if (!
U.KnownAlignment)
1883 if (isCPEntryInRange(
U.MI, UserOffset,
U.CPEMI, MaxOffs,
false,
true)) {
1885 U.MI->setDesc(
TII->get(NewOpc));
1887 BBUtils->adjustBBSize(
MBB, -2);
1888 BBUtils->adjustBBOffsetsAfter(
MBB);
1898bool ARMConstantIslands::optimizeThumb2Branches() {
1900 auto TryShrinkBranch = [
this](ImmBranch &Br) {
1901 unsigned Opcode = Br.MI->getOpcode();
1902 unsigned NewOpc = 0;
1919 unsigned MaxOffs = ((1 << (
Bits-1))-1) * Scale;
1921 if (BBUtils->isBBInRange(Br.MI, DestBB, MaxOffs)) {
1923 Br.MI->setDesc(
TII->get(NewOpc));
1925 BBUtils->adjustBBSize(
MBB, -2);
1926 BBUtils->adjustBBOffsetsAfter(
MBB);
1936 unsigned NewOpc = 0;
1939 auto FindCmpForCBZ = [
this](ImmBranch &Br, ImmCompare &ImmCmp,
1941 ImmCmp.MI =
nullptr;
1946 if (!Br.MI->killsRegister(ARM::CPSR,
nullptr))
1950 unsigned NewOpc = 0;
1955 NewOpc = ARM::tCBNZ;
1961 unsigned BrOffset = BBUtils->getOffsetOf(Br.MI) + 4 - 2;
1963 unsigned DestOffset = BBInfo[DestBB->
getNumber()].Offset;
1964 if (BrOffset >= DestOffset || (DestOffset - BrOffset) > 126)
1968 auto *
TRI = STI->getRegisterInfo();
1970 if (!CmpMI || CmpMI->
getOpcode() != ARM::tCMPi8)
1974 ImmCmp.NewOpc = NewOpc;
1978 auto TryConvertToLE = [
this](ImmBranch &Br, ImmCompare &
Cmp) {
1979 if (Br.MI->getOpcode() != ARM::t2Bcc || !STI->hasLOB() ||
1985 if (BBUtils->getOffsetOf(
MBB) < BBUtils->getOffsetOf(DestBB) ||
1986 !BBUtils->isBBInRange(Br.MI, DestBB, 4094))
1989 if (!DT->dominates(DestBB,
MBB))
1994 Cmp.NewOpc =
Cmp.NewOpc == ARM::tCBZ ? ARM::tCBNZ : ARM::tCBZ;
1997 TII->get(ARM::t2LE));
1999 MIB.
add(Br.MI->getOperand(0));
2006 bool MadeChange =
false;
2013 for (ImmBranch &Br :
reverse(ImmBranches)) {
2021 if (FindCmpForCBZ(Br, Cmp, ExitBB) && TryConvertToLE(Br, Cmp)) {
2025 FindCmpForCBZ(Br, Cmp, DestBB);
2026 MadeChange |= TryShrinkBranch(Br);
2029 unsigned Opcode = Br.MI->getOpcode();
2030 if ((Opcode != ARM::tBcc && Opcode != ARM::t2LE) || !
Cmp.NewOpc)
2037 auto *
TRI = STI->getRegisterInfo();
2039 bool RegKilled =
false;
2042 if (KillMI->killsRegister(Reg,
TRI)) {
2043 KillMI->clearRegisterKills(Reg,
TRI);
2047 }
while (KillMI !=
Cmp.MI);
2055 .
addMBB(DestBB, Br.MI->getOperand(0).getTargetFlags());
2057 Cmp.MI->eraseFromParent();
2059 if (Br.MI->getOpcode() == ARM::tBcc) {
2060 Br.MI->eraseFromParent();
2062 BBUtils->adjustBBSize(
MBB, -2);
2073 BBUtils->adjustBBOffsetsAfter(
MBB);
2083 if (
I.getOpcode() != ARM::t2ADDrs)
2086 if (
I.getOperand(0).getReg() != EntryReg)
2089 if (
I.getOperand(1).getReg() != BaseReg)
2102bool ARMConstantIslands::preserveBaseRegister(
MachineInstr *JumpMI,
2106 bool &BaseRegKill) {
2129 CanDeleteLEA =
true;
2130 BaseRegKill =
false;
2133 for (++
I; &*
I != JumpMI; ++
I) {
2140 if (!MO.isReg() || !MO.getReg())
2142 if (MO.isDef() && MO.getReg() == BaseReg)
2144 if (MO.isUse() && MO.getReg() == BaseReg) {
2145 BaseRegKill = BaseRegKill || MO.isKill();
2146 CanDeleteLEA =
false;
2156 for (++
I; &*
I != JumpMI; ++
I) {
2158 if (!MO.isReg() || !MO.getReg())
2160 if (MO.isDef() && MO.getReg() == BaseReg)
2162 if (MO.isUse() && MO.getReg() == EntryReg)
2163 RemovableAdd =
nullptr;
2169 DeadSize += isThumb2 ? 4 : 2;
2170 }
else if (BaseReg == EntryReg) {
2196 unsigned &DeadSize) {
2205 for (++
I; &*
I != JumpMI; ++
I) {
2206 if (
I->getOpcode() == ARM::t2ADDrs &&
I->getOperand(0).getReg() == EntryReg)
2215 for (++J; &*J != JumpMI; ++J) {
2217 if (!MO.isReg() || !MO.getReg())
2219 if (MO.isDef() && MO.getReg() == EntryReg)
2221 if (MO.isUse() && MO.getReg() == EntryReg)
2233bool ARMConstantIslands::optimizeThumb2JumpTables() {
2234 bool MadeChange =
false;
2239 if (!MJTI)
return false;
2245 unsigned JTOpIdx = NumOps - (
MI->isPredicable() ? 2 : 1);
2247 unsigned JTI = JTOP.getIndex();
2251 bool HalfWordOk =
true;
2252 unsigned JTOffset = BBUtils->getOffsetOf(
MI) + 4;
2253 const std::vector<MachineBasicBlock*> &JTBBs =
JT[JTI].MBBs;
2259 if (ByteOk && (DstOffset - JTOffset) > ((1<<8)-1)*2)
2261 unsigned TBHLimit = ((1<<16)-1)*2;
2262 if (HalfWordOk && (DstOffset - JTOffset) > TBHLimit)
2264 if (!ByteOk && !HalfWordOk)
2268 if (!ByteOk && !HalfWordOk)
2271 CPUser &
User = CPUsers[JumpTableUserIndices[JTI]];
2273 if (!
MI->getOperand(0).isKill())
2276 unsigned DeadSize = 0;
2277 bool CanDeleteLEA =
false;
2278 bool BaseRegKill =
false;
2280 unsigned IdxReg = ~0
U;
2281 bool IdxRegKill =
true;
2283 IdxReg =
MI->getOperand(1).getReg();
2284 IdxRegKill =
MI->getOperand(1).isKill();
2286 bool PreservedBaseReg =
2287 preserveBaseRegister(
MI,
User.MI, DeadSize, CanDeleteLEA, BaseRegKill);
2299 if (Shift == UserMBB->
begin())
2303 if (Shift->getOpcode() != ARM::tLSLri ||
2304 Shift->getOperand(3).getImm() != 2 ||
2305 !Shift->getOperand(2).isKill())
2307 IdxReg = Shift->getOperand(2).getReg();
2308 Register ShiftedIdxReg = Shift->getOperand(0).getReg();
2317 if (
Load->getOpcode() != ARM::tLDRr)
2319 if (
Load->getOperand(1).getReg() != BaseReg ||
2320 Load->getOperand(2).getReg() != ShiftedIdxReg ||
2321 !
Load->getOperand(2).isKill())
2325 auto *
TRI = STI->getRegisterInfo();
2335 if (isPositionIndependentOrROPI) {
2337 if (
Add->getOpcode() != ARM::tADDrr ||
2338 Add->getOperand(2).getReg() != BaseReg ||
2339 Add->getOperand(3).getReg() !=
Load->getOperand(0).getReg() ||
2340 !
Add->getOperand(3).isKill())
2342 if (
Add->getOperand(0).getReg() !=
MI->getOperand(0).getReg())
2347 Add->eraseFromParent();
2350 if (
Load->getOperand(0).getReg() !=
MI->getOperand(0).getReg())
2358 CanDeleteLEA =
true;
2359 Shift->eraseFromParent();
2360 Load->eraseFromParent();
2366 unsigned Opc = ByteOk ? ARM::t2TBB_JT : ARM::t2TBH_JT;
2368 Opc = ByteOk ? ARM::tTBB_JT : ARM::tTBH_JT;
2380 unsigned JTOpc = ByteOk ? ARM::JUMPTABLE_TBB : ARM::JUMPTABLE_TBH;
2391 User.MI->eraseFromParent();
2392 DeadSize += isThumb2 ? 4 : 2;
2399 User.IsSoImm =
false;
2400 User.KnownAlignment =
false;
2404 int CPEntryIdx = JumpTableEntryIndices[JTI];
2405 auto &CPEs = CPEntries[CPEntryIdx];
2407 find_if(CPEs, [&](CPEntry &E) {
return E.CPEMI ==
User.CPEMI; });
2409 CPUsers.emplace_back(CPUser(NewJTMI,
User.CPEMI, 4,
false,
false));
2413 unsigned NewSize =
TII->getInstSizeInBytes(*NewJTMI);
2414 unsigned OrigSize =
TII->getInstSizeInBytes(*
MI);
2415 MI->eraseFromParent();
2417 int Delta = OrigSize - NewSize + DeadSize;
2419 BBUtils->adjustBBOffsetsAfter(
MBB);
2430bool ARMConstantIslands::reorderThumb2JumpTables() {
2431 bool MadeChange =
false;
2434 if (!MJTI)
return false;
2440 unsigned JTOpIdx = NumOps - (
MI->isPredicable() ? 2 : 1);
2442 unsigned JTI = JTOP.getIndex();
2448 int JTNumber =
MI->getParent()->getNumber();
2449 const std::vector<MachineBasicBlock*> &JTBBs =
JT[JTI].MBBs;
2453 if (DTNumber < JTNumber) {
2457 adjustJTTargetBlockForward(JTI,
MBB,
MI->getParent());
2487 if (!
B &&
Cond.empty() && BB != &MF->front() &&
2490 OldPrior->updateTerminator(BB);
2493 MF->RenumberBlocks();
2494 DT->updateBlockNumbers();
2503 MF->insert(
MBBI, NewBB);
2522 MF->RenumberBlocks(NewBB);
2523 DT->updateBlockNumbers();
2536 return new ARMConstantIslands();
unsigned const MachineRegisterInfo * MRI
static bool isThumb(const MCSubtargetInfo &STI)
static cl::opt< unsigned > CPMaxIteration("arm-constant-island-max-iteration", cl::Hidden, cl::init(30), cl::desc("The max number of iteration for converge"))
static bool isSimpleIndexCalc(MachineInstr &I, unsigned EntryReg, unsigned BaseReg)
static bool jumpTableFollowsTB(MachineInstr *JTMI, MachineInstr *CPEMI)
Returns whether CPEMI is the first instruction in the block immediately following JTMI (assumed to be...
static bool CompareMBBNumbers(const MachineBasicBlock *LHS, const MachineBasicBlock *RHS)
CompareMBBNumbers - Little predicate function to sort the WaterList by MBB ID.
static unsigned getUnconditionalBrDisp(int Opc)
getUnconditionalBrDisp - Returns the maximum displacement that can fit in the specific unconditional ...
static void RemoveDeadAddBetweenLEAAndJT(MachineInstr *LEAMI, MachineInstr *JumpMI, unsigned &DeadSize)
static bool isAlwaysIndirectTarget(const MachineBasicBlock &MBB)
static bool AlignBlocks(MachineFunction *MF, const ARMSubtarget *STI)
static cl::opt< bool > SynthesizeThumb1TBB("arm-synthesize-thumb-1-tbb", cl::Hidden, cl::init(true), cl::desc("Use compressed jump tables in Thumb-1 by synthesizing an " "equivalent to the TBB/TBH instructions"))
static cl::opt< bool > AdjustJumpTableBlocks("arm-adjust-jump-tables", cl::Hidden, cl::init(true), cl::desc("Adjust basic block layout to better use TB[BH]"))
#define ARM_CP_ISLANDS_OPT_NAME
static bool BBIsJumpedOver(MachineBasicBlock *MBB)
BBIsJumpedOver - Return true of the specified basic block's only predecessor unconditionally branches...
MachineBasicBlock MachineBasicBlock::iterator MBBI
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
#define LLVM_DUMP_METHOD
Mark debug helper function definitions like dump() that should not be stripped from debug builds.
This file defines the DenseMap class.
const HexagonInstrInfo * TII
This file implements the LivePhysRegs utility for tracking liveness of physical registers.
This file declares the MachineConstantPool class which is an abstract constant pool to keep track of ...
unsigned const TargetRegisterInfo * TRI
static bool BBHasFallthrough(MachineBasicBlock *MBB)
BBHasFallthrough - Return true if the specified basic block can fallthrough into the block immediatel...
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
const SmallVectorImpl< MachineOperand > MachineBasicBlock * TBB
const SmallVectorImpl< MachineOperand > & Cond
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file defines the SmallSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
ARMFunctionInfo - This class is derived from MachineFunctionInfo and contains private ARM-specific in...
bool branchTargetEnforcement() const
const ARMTargetLowering * getTargetLowering() const override
Represent the analysis usage information of a pass.
AnalysisUsage & addRequired()
A parsed version of the target data layout string in and methods for querying it.
FunctionPass class - This class is used to implement most global optimizations.
bool hasOptSize() const
Optimize this function for size (-Os) or minimum size (-Oz).
bool analyzeBranch(MachineBasicBlock &MBB, MachineBasicBlock *&TBB, MachineBasicBlock *&FBB, SmallVectorImpl< MachineOperand > &Cond, bool AllowModify) const override
Analyze the branching code at the end of MBB, returning true if it cannot be understood (e....
A set of physical registers with utility functions to track liveness when walking backward/forward th...
Describe properties that are true of each instruction in the target description file.
unsigned getNumOperands() const
Return the number of declared MachineOperands for this MachineInstruction.
unsigned getSize() const
Return the number of bytes in the encoding of this instruction, or zero if the encoding size cannot b...
unsigned pred_size() const
bool isEHPad() const
Returns true if the block is a landing pad.
void replaceSuccessor(MachineBasicBlock *Old, MachineBasicBlock *New)
Replace successor OLD with NEW and update probability info.
MachineBasicBlock * getFallThrough(bool JumpToFallThrough=true)
Return the fallthrough block if the block can implicitly transfer control to the block after it by fa...
void transferSuccessors(MachineBasicBlock *FromMBB)
Transfers all the successors from MBB to this machine basic block (i.e., copies all the successors Fr...
instr_iterator insert(instr_iterator I, MachineInstr *M)
Insert MI into the instruction list before I, possibly inside a bundle.
iterator_range< livein_iterator > liveins() const
int getNumber() const
MachineBasicBlocks are uniquely numbered at the function level, unless they're not in a MachineFuncti...
void push_back(MachineInstr *MI)
const BasicBlock * getBasicBlock() const
Return the LLVM basic block that this instance corresponded to originally.
void updateTerminator(MachineBasicBlock *PreviousLayoutSuccessor)
Update the terminator instructions in block to account for changes to block layout which may have bee...
bool canFallThrough()
Return true if the block can implicitly transfer control to the block after it by falling off the end...
succ_iterator succ_begin()
unsigned succ_size() const
bool hasAddressTaken() const
Test whether this block is used as something other than the target of a terminator,...
void setAlignment(Align A)
Set alignment of the basic block.
void addSuccessor(MachineBasicBlock *Succ, BranchProbability Prob=BranchProbability::getUnknown())
Add Succ as a successor of this MachineBasicBlock.
pred_iterator pred_begin()
iterator getLastNonDebugInstr(bool SkipPseudoOp=true)
Returns an iterator to the last non-debug instruction in the basic block, or end().
void addLiveIn(MCRegister PhysReg, LaneBitmask LaneMask=LaneBitmask::getAll())
Adds the specified register as a live in.
const MachineFunction * getParent() const
Return the MachineFunction containing this basic block.
iterator_range< iterator > terminators()
reverse_iterator rbegin()
bool isSuccessor(const MachineBasicBlock *MBB) const
Return true if the specified MBB is a successor of this block.
void splice(iterator Where, MachineBasicBlock *Other, iterator From)
Take an instruction from MBB 'Other' at the position From, and insert it into this MBB right before '...
Align getAlignment() const
Return alignment of the basic block.
MachineInstrBundleIterator< MachineInstr > iterator
void moveAfter(MachineBasicBlock *NewBefore)
The MachineConstantPool class keeps track of constants referenced by a function which must be spilled...
Analysis pass which computes a MachineDominatorTree.
DominatorTree Class - Concrete subclass of DominatorTreeBase that is used to compute a normal dominat...
MachineFunctionPass - This class adapts the FunctionPass interface to allow convenient creation of pa...
void getAnalysisUsage(AnalysisUsage &AU) const override
getAnalysisUsage - Subclasses that override getAnalysisUsage must call this.
virtual bool runOnMachineFunction(MachineFunction &MF)=0
runOnMachineFunction - This method must be overloaded to perform the desired machine code transformat...
virtual MachineFunctionProperties getRequiredProperties() const
Properties which a MachineFunction may have at a given point in time.
MachineFunctionProperties & set(Property P)
Function & getFunction()
Return the LLVM function that this machine code represents.
const LLVMTargetMachine & getTarget() const
getTarget - Return the target machine this machine code is compiled with
MachineConstantPool * getConstantPool()
getConstantPool - Return the constant pool object for the current function.
void RenumberBlocks(MachineBasicBlock *MBBFrom=nullptr)
RenumberBlocks - This discards all of the MachineBasicBlock numbers and recomputes them.
const MachineBasicBlock & front() const
BasicBlockListType::const_iterator const_iterator
const MachineInstrBuilder & addImm(int64_t Val) const
Add a new immediate operand.
const MachineInstrBuilder & add(const MachineOperand &MO) const
const MachineInstrBuilder & addConstantPoolIndex(unsigned Idx, int Offset=0, unsigned TargetFlags=0) const
const MachineInstrBuilder & addReg(Register RegNo, unsigned flags=0, unsigned SubReg=0) const
Add a new virtual register operand.
const MachineInstrBuilder & addJumpTableIndex(unsigned Idx, unsigned TargetFlags=0) const
const MachineInstrBuilder & addMBB(MachineBasicBlock *MBB, unsigned TargetFlags=0) const
reverse_iterator getReverse() const
Get a reverse iterator to the same node.
Representation of each machine instruction.
unsigned getOpcode() const
Returns the opcode of this MachineInstr.
const MachineBasicBlock * getParent() const
const MCInstrDesc & getDesc() const
Returns the target instruction descriptor of this MachineInstr.
void setDesc(const MCInstrDesc &TID)
Replace the instruction descriptor (thus opcode) of the current instruction with a new one.
iterator_range< mop_iterator > operands()
void eraseFromParent()
Unlink 'this' from the containing basic block and delete it.
const MachineOperand & getOperand(unsigned i) const
bool ReplaceMBBInJumpTable(unsigned Idx, MachineBasicBlock *Old, MachineBasicBlock *New)
ReplaceMBBInJumpTable - If Old is a target of the jump tables, update the jump table to branch to New...
@ EK_Inline
EK_Inline - Jump table entries are emitted inline at their point of use.
JTEntryKind getEntryKind() const
const std::vector< MachineJumpTableEntry > & getJumpTables() const
MachineOperand class - Representation of each machine instruction operand.
MachineBasicBlock * getMBB() const
bool isCPI() const
isCPI - Tests if this is a MO_ConstantPoolIndex operand.
void setReg(Register Reg)
Change the register this operand corresponds to.
void setIsKill(bool Val=true)
void setMBB(MachineBasicBlock *MBB)
Register getReg() const
getReg - Returns the register number.
MachineRegisterInfo - Keep track of information for virtual and physical registers,...
virtual StringRef getPassName() const
getPassName - Return a nice clean name for a pass.
Wrapper class representing virtual and physical registers.
SmallSet - This maintains a set of unique values, optimizing for the case when the set is small (less...
size_type count(const T &V) const
count - Return 1 if the element is in the set, 0 otherwise.
std::pair< const_iterator, bool > insert(const T &V)
insert - Insert an element into the set if it isn't already there.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
StringRef - Represent a constant reference to a string, i.e.
CodeGenOptLevel getOptLevel() const
Returns the optimization level: None, Less, Default, or Aggressive.
Value * getOperand(unsigned i) const
self_iterator getIterator()
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
static CondCodes getOppositeCondition(CondCodes CC)
@ MO_OPTION_MASK
MO_OPTION_MASK - Most flags are mutually exclusive; this mask selects just that part of the flag set.
@ MO_LO16
MO_LO16 - On a symbol operand, this represents a relocation containing lower 16 bit of the address.
@ MO_HI16
MO_HI16 - On a symbol operand, this represents a relocation containing higher 16 bit of the address.
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
Reg
All possible values of the reg field in the ModR/M byte.
initializer< Ty > init(const Ty &Val)
This is an optimization pass for GlobalISel generic memory operations.
MachineInstr * findCMPToFoldIntoCBZ(MachineInstr *Br, const TargetRegisterInfo *TRI)
Search backwards from a tBcc to find a tCMPi8 against 0, meaning we can convert them to a tCBZ or tCB...
auto find(R &&Range, const T &Val)
Provide wrappers to std::find which take ranges instead of having to pass begin/end explicitly.
auto size(R &&Range, std::enable_if_t< std::is_base_of< std::random_access_iterator_tag, typename std::iterator_traits< decltype(Range.begin())>::iterator_category >::value, void > *=nullptr)
Get the size of a range.
MachineInstrBuilder BuildMI(MachineFunction &MF, const MIMetadata &MIMD, const MCInstrDesc &MCID)
Builder interface. Specify how to create the initial instruction itself.
bool isAligned(Align Lhs, uint64_t SizeInBytes)
Checks that SizeInBytes is a multiple of the alignment.
iterator_range< T > make_range(T x, T y)
Convenience function for iterating over sub-ranges.
bool registerDefinedBetween(unsigned Reg, MachineBasicBlock::iterator From, MachineBasicBlock::iterator To, const TargetRegisterInfo *TRI)
Return true if Reg is defd between From and To.
static std::array< MachineOperand, 2 > predOps(ARMCC::CondCodes Pred, unsigned PredReg=0)
Get the operands corresponding to the given Pred value.
ARMCC::CondCodes getITInstrPredicate(const MachineInstr &MI, Register &PredReg)
getITInstrPredicate - Valid only in Thumb2 mode.
static bool isARMLowRegister(unsigned Reg)
isARMLowRegister - Returns true if the register is a low register (r0-r7).
auto reverse(ContainerTy &&C)
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
void report_fatal_error(Error Err, bool gen_crash_diag=true)
Report a serious error, calling any installed error handler.
static bool isLoopStart(const MachineInstr &MI)
bool is_sorted(R &&Range, Compare C)
Wrapper function around std::is_sorted to check if elements in a range R are sorted with respect to a...
format_object< Ts... > format(const char *Fmt, const Ts &... Vals)
These are helper functions used to produce formatted output.
uint64_t offsetToAlignment(uint64_t Value, Align Alignment)
Returns the offset to the next integer (mod 2**64) that is greater than or equal to Value and is a mu...
unsigned getRegState(const MachineOperand &RegOp)
Get all register state flags from machine operand RegOp.
auto lower_bound(R &&Range, T &&Value)
Provide wrappers to std::lower_bound which take ranges instead of having to pass begin/end explicitly...
unsigned getKillRegState(bool B)
ARMCC::CondCodes getInstrPredicate(const MachineInstr &MI, Register &PredReg)
getInstrPredicate - If instruction is predicated, returns its predicate condition,...
FunctionPass * createARMConstantIslandPass()
createARMConstantIslandPass - returns an instance of the constpool island pass.
auto find_if(R &&Range, UnaryPredicate P)
Provide wrappers to std::find_if which take ranges instead of having to pass begin/end explicitly.
unsigned UnknownPadding(Align Alignment, unsigned KnownBits)
UnknownPadding - Return the worst case padding that could result from unknown offset bits.
APFloat neg(APFloat X)
Returns the negated value of the argument.
unsigned Log2(Align A)
Returns the log2 of the alignment.
static bool isSpeculationBarrierEndBBOpcode(int Opc)
IterT prev_nodbg(IterT It, IterT Begin, bool SkipPseudoOp=true)
Decrement It, then continue decrementing it while it points to a debug instruction.
Printable printMBBReference(const MachineBasicBlock &MBB)
Prints a machine basic block reference.
This struct is a compact representation of a valid (non-zero power of two) alignment.
uint64_t value() const
This is a hole in the type system and should not be abused.
BasicBlockInfo - Information about the offset and size of a single basic block.
unsigned internalKnownBits() const
Compute the number of known offset bits internally to this block.
unsigned postOffset(Align Alignment=Align(1)) const
Compute the offset immediately following this block.
unsigned Offset
Offset - Distance from the beginning of the function to the beginning of this basic block.
Pair of physical register and lane mask.
MachineJumpTableEntry - One jump table in the jump table info.