46#define DEBUG_TYPE "ppc-instr-info"
48#define GET_INSTRMAP_INFO
49#define GET_INSTRINFO_CTOR_DTOR
50#include "PPCGenInstrInfo.inc"
53 "Number of spillvsrrc spilled to stack as vec");
55 "Number of spillvsrrc spilled to stack as gpr");
56STATISTIC(NumGPRtoVSRSpill,
"Number of gpr spills to spillvsrrc");
58 "Number of ISELs that depend on comparison of constants converted");
60 "Number of compare-immediate instructions fed by constants");
62 "Number of record-form rotates converted to record-form andi");
66 cl::desc(
"Disable analysis for CTR loops"));
72cl::desc(
"Causes the backend to crash instead of generating a nop VSX copy"),
77 cl::desc(
"Use the old (incorrect) instruction latency calculation"));
81 cl::desc(
"register pressure factor for the transformations."));
85 cl::desc(
"enable register pressure reduce in machine combiner pass."));
88void PPCInstrInfo::anchor() {}
93 STI.isPPC64() ? PPC::BLR8 : PPC::BLR),
94 Subtarget(STI), RI(STI.getTargetMachine()) {}
102 static_cast<const PPCSubtarget *
>(STI)->getCPUDirective();
106 static_cast<const PPCSubtarget *
>(STI)->getInstrItineraryData();
138 unsigned *PredCost)
const {
140 return PPCGenInstrInfo::getInstrLatency(ItinData,
MI, PredCost);
150 unsigned DefClass =
MI.getDesc().getSchedClass();
151 for (
unsigned i = 0, e =
MI.getNumOperands(); i != e; ++i) {
169 std::optional<unsigned>
Latency = PPCGenInstrInfo::getOperandLatency(
172 if (!
DefMI.getParent())
179 if (Reg.isVirtual()) {
182 IsRegCR =
MRI->getRegClass(Reg)->hasSuperClassEq(&PPC::CRRCRegClass) ||
183 MRI->getRegClass(Reg)->hasSuperClassEq(&PPC::CRBITRCRegClass);
185 IsRegCR = PPC::CRRCRegClass.contains(Reg) ||
186 PPC::CRBITRCRegClass.contains(Reg);
189 if (
UseMI.isBranch() && IsRegCR) {
269#define InfoArrayIdxFMAInst 0
270#define InfoArrayIdxFAddInst 1
271#define InfoArrayIdxFMULInst 2
272#define InfoArrayIdxAddOpIdx 3
273#define InfoArrayIdxMULOpIdx 4
274#define InfoArrayIdxFSubInst 5
285 {PPC::XSMADDADP, PPC::XSADDDP, PPC::XSMULDP, 1, 2, PPC::XSSUBDP},
286 {PPC::XSMADDASP, PPC::XSADDSP, PPC::XSMULSP, 1, 2, PPC::XSSUBSP},
287 {PPC::XVMADDADP, PPC::XVADDDP, PPC::XVMULDP, 1, 2, PPC::XVSUBDP},
288 {PPC::XVMADDASP, PPC::XVADDSP, PPC::XVMULSP, 1, 2, PPC::XVSUBSP},
289 {PPC::FMADD, PPC::FADD, PPC::FMUL, 3, 1, PPC::FSUB},
290 {PPC::FMADDS, PPC::FADDS, PPC::FMULS, 3, 1, PPC::FSUBS}};
294int16_t PPCInstrInfo::getFMAOpIdxInfo(
unsigned Opcode)
const {
351 bool DoRegPressureReduce)
const {
356 auto IsAllOpsVirtualReg = [](
const MachineInstr &Instr) {
357 for (
const auto &MO : Instr.explicit_operands())
358 if (!(MO.isReg() && MO.getReg().isVirtual()))
363 auto IsReassociableAddOrSub = [&](
const MachineInstr &Instr,
365 if (Instr.getOpcode() !=
376 if (!IsAllOpsVirtualReg(Instr))
382 !
MRI->hasOneNonDBGUse(Instr.getOperand(0).getReg()))
388 auto IsReassociableFMA = [&](
const MachineInstr &Instr, int16_t &AddOpIdx,
389 int16_t &MulOpIdx,
bool IsLeaf) {
390 int16_t
Idx = getFMAOpIdxInfo(Instr.getOpcode());
401 if (!IsAllOpsVirtualReg(Instr))
421 int16_t AddOpIdx = -1;
422 int16_t MulOpIdx = -1;
424 bool IsUsedOnceL =
false;
425 bool IsUsedOnceR =
false;
429 auto IsRPReductionCandidate = [&]() {
433 if (Opcode != PPC::XSMADDASP && Opcode != PPC::XSMADDADP)
438 if (IsReassociableFMA(Root, AddOpIdx, MulOpIdx,
true)) {
439 assert((MulOpIdx >= 0) &&
"mul operand index not right!");
440 Register MULRegL =
TRI->lookThruSingleUseCopyChain(
442 Register MULRegR =
TRI->lookThruSingleUseCopyChain(
444 if (!MULRegL && !MULRegR)
447 if (MULRegL && !MULRegR) {
451 }
else if (!MULRegL && MULRegR) {
463 MULInstrL =
MRI->getVRegDef(MULRegL);
464 MULInstrR =
MRI->getVRegDef(MULRegR);
471 if (DoRegPressureReduce && IsRPReductionCandidate()) {
472 assert((MULInstrL && MULInstrR) &&
"wrong register preduction candidate!");
493 if (!IsReassociableFMA(Root, AddOpIdx, MulOpIdx,
false))
496 assert((AddOpIdx >= 0) &&
"add operand index not right!");
503 if (!IsReassociableFMA(*Prev, AddOpIdx, MulOpIdx,
false))
506 assert((AddOpIdx >= 0) &&
"add operand index not right!");
511 if (IsReassociableFMA(*Leaf, AddOpIdx, MulOpIdx,
true)) {
527 assert(!InsInstrs.
empty() &&
"Instructions set to be inserted is empty!");
560 assert(isa<llvm::ConstantFP>(
C) &&
"not a valid constant!");
563 APFloat F1((dyn_cast<ConstantFP>(
C))->getValueAPF());
565 Constant *NegC = ConstantFP::get(dyn_cast<ConstantFP>(
C)->getContext(), F1);
573 for (
auto *Inst : InsInstrs) {
575 assert(Operand.isReg() &&
"Invalid instruction in InsInstrs!");
576 if (Operand.getReg() == PPC::ZERO8) {
577 Placeholder = &Operand;
583 assert(Placeholder &&
"Placeholder does not exist!");
588 generateLoadForNewConst(ConstPoolIdx, &Root,
C->getType(), InsInstrs);
591 Placeholder->setReg(LoadNewConst);
612 if (!(Subtarget.
isPPC64() && Subtarget.hasP9Vector() &&
620 auto GetMBBPressure =
630 if (
MI.isDebugValue() ||
MI.isDebugLabel())
636 RPTracker.
recede(RegOpers);
646 unsigned VSSRCLimit =
TRI->getRegPressureSetLimit(
650 return GetMBBPressure(
MBB)[PPC::RegisterPressureSets::VSSRC] >
656 if (!
I->hasOneMemOperand())
660 return Op->isLoad() &&
Op->getPseudoValue() &&
664Register PPCInstrInfo::generateLoadForNewConst(
672 "Target not supported!\n");
678 Register VReg1 =
MRI->createVirtualRegister(&PPC::G8RC_and_G8RC_NOX0RegClass);
680 BuildMI(*MF,
MI->getDebugLoc(),
get(PPC::ADDIStocHA8), VReg1)
685 "Only float and double are supported!");
690 LoadOpcode = PPC::DFLOADf32;
692 LoadOpcode = PPC::DFLOADf64;
722 assert(
I->mayLoad() &&
"Should be a load instruction.\n");
723 for (
auto MO :
I->uses()) {
727 if (Reg == 0 || !Reg.isVirtual())
733 return (MCP->
getConstants())[MO2.getIndex()].Val.ConstVal;
753 bool DoRegPressureReduce)
const {
763 DoRegPressureReduce);
776 reassociateFMA(Root,
Pattern, InsInstrs, DelInstrs, InstrIdxForVirtReg);
781 DelInstrs, InstrIdxForVirtReg);
786void PPCInstrInfo::reassociateFMA(
797 MRI.constrainRegClass(RegC, RC);
800 int16_t
Idx = getFMAOpIdxInfo(FmaOp);
801 assert(
Idx >= 0 &&
"Root must be a FMA instruction");
803 bool IsILPReassociate =
823 Leaf =
MRI.getVRegDef(MULReg);
829 Leaf =
MRI.getVRegDef(MULReg);
835 if (IsILPReassociate)
843 MRI.constrainRegClass(Reg, RC);
844 KillFlag = Operand.
isKill();
849 bool &MulOp1KillFlag,
bool &MulOp2KillFlag,
850 bool &AddOpKillFlag) {
851 GetOperandInfo(
Instr.getOperand(FirstMulOpIdx), MulOp1, MulOp1KillFlag);
852 GetOperandInfo(
Instr.getOperand(FirstMulOpIdx + 1), MulOp2, MulOp2KillFlag);
853 GetOperandInfo(
Instr.getOperand(AddOpIdx), AddOp, AddOpKillFlag);
856 Register RegM11, RegM12, RegX, RegY, RegM21, RegM22, RegM31, RegM32, RegA11,
858 bool KillX =
false, KillY =
false, KillM11 =
false, KillM12 =
false,
859 KillM21 =
false, KillM22 =
false, KillM31 =
false, KillM32 =
false,
860 KillA11 =
false, KillA21 =
false, KillB =
false;
862 GetFMAInstrInfo(Root, RegM31, RegM32, RegB, KillM31, KillM32, KillB);
864 if (IsILPReassociate)
865 GetFMAInstrInfo(*Prev, RegM21, RegM22, RegA21, KillM21, KillM22, KillA21);
868 GetFMAInstrInfo(*Leaf, RegM11, RegM12, RegA11, KillM11, KillM12, KillA11);
869 GetOperandInfo(Leaf->
getOperand(AddOpIdx), RegX, KillX);
871 GetOperandInfo(Leaf->
getOperand(1), RegX, KillX);
872 GetOperandInfo(Leaf->
getOperand(2), RegY, KillY);
875 GetOperandInfo(Leaf->
getOperand(1), RegX, KillX);
876 GetOperandInfo(Leaf->
getOperand(2), RegY, KillY);
886 InstrIdxForVirtReg.
insert(std::make_pair(NewVRA, 0));
889 if (IsILPReassociate) {
890 NewVRB =
MRI.createVirtualRegister(RC);
891 InstrIdxForVirtReg.
insert(std::make_pair(NewVRB, 1));
896 NewVRD =
MRI.createVirtualRegister(RC);
897 InstrIdxForVirtReg.
insert(std::make_pair(NewVRD, 2));
902 Register RegMul2,
bool KillRegMul2) {
903 MI->getOperand(AddOpIdx).setReg(RegAdd);
904 MI->getOperand(AddOpIdx).setIsKill(KillAdd);
905 MI->getOperand(FirstMulOpIdx).setReg(RegMul1);
906 MI->getOperand(FirstMulOpIdx).setIsKill(KillRegMul1);
907 MI->getOperand(FirstMulOpIdx + 1).setReg(RegMul2);
908 MI->getOperand(FirstMulOpIdx + 1).setIsKill(KillRegMul2);
929 AdjustOperandOrder(MINewB, RegX, KillX, RegM21, KillM21, RegM22, KillM22);
930 AdjustOperandOrder(MINewA, RegY, KillY, RegM31, KillM31, RegM32, KillM32);
951 assert(NewVRD &&
"new FMA register not created!");
970 AdjustOperandOrder(MINewB, RegX, KillX, RegM21, KillM21, RegM22, KillM22);
971 AdjustOperandOrder(MINewD, NewVRA,
true, RegM31, KillM31, RegM32,
997 bool KillVarReg =
false;
1000 KillVarReg = KillM31;
1003 KillVarReg = KillM32;
1027 if (!IsILPReassociate) {
1036 "Insertion instructions set should not be empty!");
1040 if (IsILPReassociate)
1048 unsigned &SubIdx)
const {
1049 switch (
MI.getOpcode()) {
1050 default:
return false;
1053 case PPC::EXTSW_32_64:
1054 SrcReg =
MI.getOperand(1).getReg();
1055 DstReg =
MI.getOperand(0).getReg();
1056 SubIdx = PPC::sub_32;
1062 int &FrameIndex)
const {
1066 if (
MI.getOperand(1).isImm() && !
MI.getOperand(1).getImm() &&
1067 MI.getOperand(2).isFI()) {
1068 FrameIndex =
MI.getOperand(2).getIndex();
1069 return MI.getOperand(0).getReg();
1079 switch (
MI.getOpcode()) {
1089 case PPC::ADDIStocHA:
1090 case PPC::ADDIStocHA8:
1092 case PPC::ADDItocL8:
1093 case PPC::LOAD_STACK_GUARD:
1094 case PPC::PPCLdFixedAddr:
1096 case PPC::XXLXORspz:
1097 case PPC::XXLXORdpz:
1098 case PPC::XXLEQVOnes:
1099 case PPC::XXSPLTI32DX:
1101 case PPC::XXSPLTIDP:
1105 case PPC::V_SETALLONESB:
1106 case PPC::V_SETALLONESH:
1107 case PPC::V_SETALLONES:
1110 case PPC::XXSETACCZ:
1111 case PPC::XXSETACCZW:
1118 int &FrameIndex)
const {
1120 if (
MI.getOperand(1).isImm() && !
MI.getOperand(1).getImm() &&
1121 MI.getOperand(2).isFI()) {
1122 FrameIndex =
MI.getOperand(2).getIndex();
1123 return MI.getOperand(0).getReg();
1131 unsigned OpIdx2)
const {
1135 if (
MI.getOpcode() != PPC::RLWIMI &&
MI.getOpcode() != PPC::RLWIMI_rec)
1143 if (
MI.getOperand(3).getImm() != 0)
1154 assert(((OpIdx1 == 1 && OpIdx2 == 2) || (OpIdx1 == 2 && OpIdx2 == 1)) &&
1155 "Only the operands 1 and 2 can be swapped in RLSIMI/RLWIMI_rec.");
1159 unsigned SubReg1 =
MI.getOperand(1).getSubReg();
1160 unsigned SubReg2 =
MI.getOperand(2).getSubReg();
1161 bool Reg1IsKill =
MI.getOperand(1).isKill();
1162 bool Reg2IsKill =
MI.getOperand(2).isKill();
1163 bool ChangeReg0 =
false;
1169 "Expecting a two-address instruction!");
1170 assert(
MI.getOperand(0).getSubReg() == SubReg1 &&
"Tied subreg mismatch");
1176 unsigned MB =
MI.getOperand(4).getImm();
1177 unsigned ME =
MI.getOperand(5).getImm();
1181 if (MB == 0 && ME == 31)
1186 Register Reg0 = ChangeReg0 ? Reg2 :
MI.getOperand(0).getReg();
1187 bool Reg0IsDead =
MI.getOperand(0).isDead();
1188 return BuildMI(MF,
MI.getDebugLoc(),
MI.getDesc())
1197 MI.getOperand(0).setReg(Reg2);
1198 MI.getOperand(0).setSubReg(SubReg2);
1200 MI.getOperand(2).setReg(Reg1);
1201 MI.getOperand(1).setReg(Reg2);
1202 MI.getOperand(2).setSubReg(SubReg1);
1203 MI.getOperand(1).setSubReg(SubReg2);
1204 MI.getOperand(2).setIsKill(Reg1IsKill);
1205 MI.getOperand(1).setIsKill(Reg2IsKill);
1208 MI.getOperand(4).setImm((ME + 1) & 31);
1209 MI.getOperand(5).setImm((MB - 1) & 31);
1214 unsigned &SrcOpIdx1,
1215 unsigned &SrcOpIdx2)
const {
1226 return fixCommutedOpIndices(SrcOpIdx1, SrcOpIdx2, 2, 3);
1236 default: Opcode = PPC::NOP;
break;
1262 bool AllowModify)
const {
1263 bool isPPC64 = Subtarget.
isPPC64();
1270 if (!isUnpredicatedTerminator(*
I))
1276 if (
I->getOpcode() == PPC::B &&
1278 I->eraseFromParent();
1282 if (
I ==
MBB.
end() || !isUnpredicatedTerminator(*
I))
1291 if (
I ==
MBB.
begin() || !isUnpredicatedTerminator(*--
I)) {
1297 }
else if (LastInst.
getOpcode() == PPC::BCC) {
1305 }
else if (LastInst.
getOpcode() == PPC::BC) {
1313 }
else if (LastInst.
getOpcode() == PPC::BCn) {
1321 }
else if (LastInst.
getOpcode() == PPC::BDNZ8 ||
1332 }
else if (LastInst.
getOpcode() == PPC::BDZ8 ||
1353 if (
I !=
MBB.
begin() && isUnpredicatedTerminator(*--
I))
1357 if (SecondLastInst.
getOpcode() == PPC::BCC &&
1367 }
else if (SecondLastInst.
getOpcode() == PPC::BC &&
1377 }
else if (SecondLastInst.
getOpcode() == PPC::BCn &&
1387 }
else if ((SecondLastInst.
getOpcode() == PPC::BDNZ8 ||
1388 SecondLastInst.
getOpcode() == PPC::BDNZ) &&
1401 }
else if ((SecondLastInst.
getOpcode() == PPC::BDZ8 ||
1402 SecondLastInst.
getOpcode() == PPC::BDZ) &&
1425 I->eraseFromParent();
1434 int *BytesRemoved)
const {
1435 assert(!BytesRemoved &&
"code size not handled");
1441 if (
I->getOpcode() != PPC::B &&
I->getOpcode() != PPC::BCC &&
1442 I->getOpcode() != PPC::BC &&
I->getOpcode() != PPC::BCn &&
1443 I->getOpcode() != PPC::BDNZ8 &&
I->getOpcode() != PPC::BDNZ &&
1444 I->getOpcode() != PPC::BDZ8 &&
I->getOpcode() != PPC::BDZ)
1448 I->eraseFromParent();
1454 if (
I->getOpcode() != PPC::BCC &&
1455 I->getOpcode() != PPC::BC &&
I->getOpcode() != PPC::BCn &&
1456 I->getOpcode() != PPC::BDNZ8 &&
I->getOpcode() != PPC::BDNZ &&
1457 I->getOpcode() != PPC::BDZ8 &&
I->getOpcode() != PPC::BDZ)
1461 I->eraseFromParent();
1470 int *BytesAdded)
const {
1472 assert(
TBB &&
"insertBranch must not be told to insert a fallthrough");
1474 "PPC branch conditions have two components!");
1475 assert(!BytesAdded &&
"code size not handled");
1477 bool isPPC64 = Subtarget.
isPPC64();
1485 (isPPC64 ? PPC::BDNZ8 : PPC::BDNZ) :
1486 (isPPC64 ? PPC::BDZ8 : PPC::BDZ))).
addMBB(
TBB);
1502 (isPPC64 ? PPC::BDNZ8 : PPC::BDNZ) :
1503 (isPPC64 ? PPC::BDZ8 : PPC::BDZ))).
addMBB(
TBB);
1521 Register FalseReg,
int &CondCycles,
1522 int &TrueCycles,
int &FalseCycles)
const {
1523 if (!Subtarget.hasISEL())
1526 if (
Cond.size() != 2)
1542 RI.getCommonSubClass(
MRI.getRegClass(TrueReg),
MRI.getRegClass(FalseReg));
1547 if (!PPC::GPRCRegClass.hasSubClassEq(RC) &&
1548 !PPC::GPRC_NOR0RegClass.hasSubClassEq(RC) &&
1549 !PPC::G8RCRegClass.hasSubClassEq(RC) &&
1550 !PPC::G8RC_NOX0RegClass.hasSubClassEq(RC))
1570 "PPC branch conditions have two components!");
1575 RI.getCommonSubClass(
MRI.getRegClass(TrueReg),
MRI.getRegClass(FalseReg));
1576 assert(RC &&
"TrueReg and FalseReg must have overlapping register classes");
1578 bool Is64Bit = PPC::G8RCRegClass.hasSubClassEq(RC) ||
1579 PPC::G8RC_NOX0RegClass.hasSubClassEq(RC);
1581 PPC::GPRCRegClass.hasSubClassEq(RC) ||
1582 PPC::GPRC_NOR0RegClass.hasSubClassEq(RC)) &&
1583 "isel is for regular integer GPRs only");
1585 unsigned OpCode = Is64Bit ? PPC::ISEL8 : PPC::ISEL;
1588 unsigned SubIdx = 0;
1589 bool SwapOps =
false;
1590 switch (SelectPred) {
1594 SubIdx = PPC::sub_eq; SwapOps =
false;
break;
1598 SubIdx = PPC::sub_eq; SwapOps =
true;
break;
1602 SubIdx = PPC::sub_lt; SwapOps =
false;
break;
1606 SubIdx = PPC::sub_lt; SwapOps =
true;
break;
1610 SubIdx = PPC::sub_gt; SwapOps =
false;
break;
1614 SubIdx = PPC::sub_gt; SwapOps =
true;
break;
1618 SubIdx = PPC::sub_un; SwapOps =
false;
break;
1622 SubIdx = PPC::sub_un; SwapOps =
true;
break;
1627 Register FirstReg = SwapOps ? FalseReg : TrueReg,
1628 SecondReg = SwapOps ? TrueReg : FalseReg;
1633 if (
MRI.getRegClass(FirstReg)->contains(PPC::R0) ||
1634 MRI.getRegClass(FirstReg)->contains(PPC::X0)) {
1636 MRI.getRegClass(FirstReg)->contains(PPC::X0) ?
1637 &PPC::G8RC_NOX0RegClass : &PPC::GPRC_NOR0RegClass;
1639 FirstReg =
MRI.createVirtualRegister(FirstRC);
1651 if (CRBit == PPC::CR0LT || CRBit == PPC::CR1LT ||
1652 CRBit == PPC::CR2LT || CRBit == PPC::CR3LT ||
1653 CRBit == PPC::CR4LT || CRBit == PPC::CR5LT ||
1654 CRBit == PPC::CR6LT || CRBit == PPC::CR7LT)
1656 if (CRBit == PPC::CR0GT || CRBit == PPC::CR1GT ||
1657 CRBit == PPC::CR2GT || CRBit == PPC::CR3GT ||
1658 CRBit == PPC::CR4GT || CRBit == PPC::CR5GT ||
1659 CRBit == PPC::CR6GT || CRBit == PPC::CR7GT)
1661 if (CRBit == PPC::CR0EQ || CRBit == PPC::CR1EQ ||
1662 CRBit == PPC::CR2EQ || CRBit == PPC::CR3EQ ||
1663 CRBit == PPC::CR4EQ || CRBit == PPC::CR5EQ ||
1664 CRBit == PPC::CR6EQ || CRBit == PPC::CR7EQ)
1666 if (CRBit == PPC::CR0UN || CRBit == PPC::CR1UN ||
1667 CRBit == PPC::CR2UN || CRBit == PPC::CR3UN ||
1668 CRBit == PPC::CR4UN || CRBit == PPC::CR5UN ||
1669 CRBit == PPC::CR6UN || CRBit == PPC::CR7UN)
1672 assert(Ret != 4 &&
"Invalid CR bit register");
1680 bool RenamableDest,
bool RenamableSrc)
const {
1684 if (PPC::F8RCRegClass.
contains(DestReg) &&
1685 PPC::VSRCRegClass.
contains(SrcReg)) {
1687 TRI->getMatchingSuperReg(DestReg, PPC::sub_64, &PPC::VSRCRegClass);
1693 }
else if (PPC::F8RCRegClass.
contains(SrcReg) &&
1694 PPC::VSRCRegClass.
contains(DestReg)) {
1696 TRI->getMatchingSuperReg(SrcReg, PPC::sub_64, &PPC::VSRCRegClass);
1705 if (PPC::CRBITRCRegClass.
contains(SrcReg) &&
1706 PPC::GPRCRegClass.
contains(DestReg)) {
1718 }
else if (PPC::CRRCRegClass.
contains(SrcReg) &&
1719 (PPC::G8RCRegClass.
contains(DestReg) ||
1720 PPC::GPRCRegClass.
contains(DestReg))) {
1721 bool Is64Bit = PPC::G8RCRegClass.contains(DestReg);
1722 unsigned MvCode = Is64Bit ? PPC::MFOCRF8 : PPC::MFOCRF;
1723 unsigned ShCode = Is64Bit ? PPC::RLWINM8 : PPC::RLWINM;
1724 unsigned CRNum =
TRI->getEncodingValue(SrcReg);
1736 }
else if (PPC::G8RCRegClass.
contains(SrcReg) &&
1737 PPC::VSFRCRegClass.
contains(DestReg)) {
1738 assert(Subtarget.hasDirectMove() &&
1739 "Subtarget doesn't support directmove, don't know how to copy.");
1744 }
else if (PPC::VSFRCRegClass.
contains(SrcReg) &&
1745 PPC::G8RCRegClass.
contains(DestReg)) {
1746 assert(Subtarget.hasDirectMove() &&
1747 "Subtarget doesn't support directmove, don't know how to copy.");
1751 }
else if (PPC::SPERCRegClass.
contains(SrcReg) &&
1752 PPC::GPRCRegClass.
contains(DestReg)) {
1756 }
else if (PPC::GPRCRegClass.
contains(SrcReg) &&
1757 PPC::SPERCRegClass.
contains(DestReg)) {
1764 if (PPC::GPRCRegClass.
contains(DestReg, SrcReg))
1766 else if (PPC::G8RCRegClass.
contains(DestReg, SrcReg))
1768 else if (PPC::F4RCRegClass.
contains(DestReg, SrcReg))
1770 else if (PPC::CRRCRegClass.
contains(DestReg, SrcReg))
1772 else if (PPC::VRRCRegClass.
contains(DestReg, SrcReg))
1774 else if (PPC::VSRCRegClass.
contains(DestReg, SrcReg))
1784 else if (PPC::VSFRCRegClass.
contains(DestReg, SrcReg) ||
1785 PPC::VSSRCRegClass.
contains(DestReg, SrcReg))
1786 Opc = (Subtarget.hasP9Vector()) ? PPC::XSCPSGNDP : PPC::XXLORf;
1787 else if (Subtarget.pairedVectorMemops() &&
1788 PPC::VSRpRCRegClass.contains(DestReg, SrcReg)) {
1789 if (SrcReg > PPC::VSRp15)
1790 SrcReg = PPC::V0 + (SrcReg - PPC::VSRp16) * 2;
1792 SrcReg = PPC::VSL0 + (SrcReg - PPC::VSRp0) * 2;
1793 if (DestReg > PPC::VSRp15)
1794 DestReg = PPC::V0 + (DestReg - PPC::VSRp16) * 2;
1796 DestReg = PPC::VSL0 + (DestReg - PPC::VSRp0) * 2;
1803 else if (PPC::CRBITRCRegClass.
contains(DestReg, SrcReg))
1805 else if (PPC::SPERCRegClass.
contains(DestReg, SrcReg))
1807 else if ((PPC::ACCRCRegClass.
contains(DestReg) ||
1808 PPC::UACCRCRegClass.
contains(DestReg)) &&
1809 (PPC::ACCRCRegClass.
contains(SrcReg) ||
1810 PPC::UACCRCRegClass.
contains(SrcReg))) {
1816 bool DestPrimed = PPC::ACCRCRegClass.contains(DestReg);
1817 bool SrcPrimed = PPC::ACCRCRegClass.contains(SrcReg);
1819 PPC::VSL0 + (SrcReg - (SrcPrimed ? PPC::ACC0 : PPC::UACC0)) * 4;
1821 PPC::VSL0 + (DestReg - (DestPrimed ? PPC::ACC0 : PPC::UACC0)) * 4;
1830 if (SrcPrimed && !KillSrc)
1833 }
else if (PPC::G8pRCRegClass.
contains(DestReg) &&
1834 PPC::G8pRCRegClass.
contains(SrcReg)) {
1836 unsigned DestRegIdx = DestReg - PPC::G8p0;
1837 MCRegister DestRegSub0 = PPC::X0 + 2 * DestRegIdx;
1838 MCRegister DestRegSub1 = PPC::X0 + 2 * DestRegIdx + 1;
1839 unsigned SrcRegIdx = SrcReg - PPC::G8p0;
1840 MCRegister SrcRegSub0 = PPC::X0 + 2 * SrcRegIdx;
1841 MCRegister SrcRegSub1 = PPC::X0 + 2 * SrcRegIdx + 1;
1863 if (PPC::GPRCRegClass.hasSubClassEq(RC) ||
1864 PPC::GPRC_NOR0RegClass.hasSubClassEq(RC)) {
1866 }
else if (PPC::G8RCRegClass.hasSubClassEq(RC) ||
1867 PPC::G8RC_NOX0RegClass.hasSubClassEq(RC)) {
1869 }
else if (PPC::F8RCRegClass.hasSubClassEq(RC)) {
1871 }
else if (PPC::F4RCRegClass.hasSubClassEq(RC)) {
1873 }
else if (PPC::SPERCRegClass.hasSubClassEq(RC)) {
1875 }
else if (PPC::CRRCRegClass.hasSubClassEq(RC)) {
1877 }
else if (PPC::CRBITRCRegClass.hasSubClassEq(RC)) {
1879 }
else if (PPC::VRRCRegClass.hasSubClassEq(RC)) {
1881 }
else if (PPC::VSRCRegClass.hasSubClassEq(RC)) {
1883 }
else if (PPC::VSFRCRegClass.hasSubClassEq(RC)) {
1885 }
else if (PPC::VSSRCRegClass.hasSubClassEq(RC)) {
1887 }
else if (PPC::SPILLTOVSRRCRegClass.hasSubClassEq(RC)) {
1889 }
else if (PPC::ACCRCRegClass.hasSubClassEq(RC)) {
1890 assert(Subtarget.pairedVectorMemops() &&
1891 "Register unexpected when paired memops are disabled.");
1893 }
else if (PPC::UACCRCRegClass.hasSubClassEq(RC)) {
1894 assert(Subtarget.pairedVectorMemops() &&
1895 "Register unexpected when paired memops are disabled.");
1897 }
else if (PPC::WACCRCRegClass.hasSubClassEq(RC)) {
1898 assert(Subtarget.pairedVectorMemops() &&
1899 "Register unexpected when paired memops are disabled.");
1901 }
else if (PPC::VSRpRCRegClass.hasSubClassEq(RC)) {
1902 assert(Subtarget.pairedVectorMemops() &&
1903 "Register unexpected when paired memops are disabled.");
1905 }
else if (PPC::G8pRCRegClass.hasSubClassEq(RC)) {
1916 return OpcodesForSpill[getSpillIndex(RC)];
1922 return OpcodesForSpill[getSpillIndex(RC)];
1925void PPCInstrInfo::StoreRegToStackSlot(
1939 if (PPC::CRRCRegClass.hasSubClassEq(RC) ||
1940 PPC::CRBITRCRegClass.hasSubClassEq(RC))
1954 StoreRegToStackSlot(MF, SrcReg, isKill, FrameIdx, RC, NewMIs);
1964 NewMIs.
back()->addMemOperand(MF, MMO);
1983 unsigned DestReg,
int FrameIdx,
2001 LoadRegFromStackSlot(MF,
DL, DestReg, FrameIdx, RC, NewMIs);
2011 NewMIs.
back()->addMemOperand(MF, MMO);
2034 assert(
Cond.size() == 2 &&
"Invalid PPC branch opcode!");
2036 Cond[0].setImm(
Cond[0].getImm() == 0 ? 1 : 0);
2049 unsigned DefOpc =
DefMI.getOpcode();
2050 if (DefOpc != PPC::LI && DefOpc != PPC::LI8)
2052 if (!
DefMI.getOperand(1).isImm())
2054 if (
DefMI.getOperand(1).getImm() != 0)
2070 for (UseIdx = 0; UseIdx <
UseMI.getNumOperands(); ++UseIdx)
2071 if (
UseMI.getOperand(UseIdx).isReg() &&
2075 assert(UseIdx <
UseMI.getNumOperands() &&
"Cannot find Reg in UseMI");
2086 if (UseInfo->
RegClass != PPC::GPRC_NOR0RegClassID &&
2087 UseInfo->
RegClass != PPC::G8RC_NOX0RegClassID)
2099 bool isPPC64 = Subtarget.
isPPC64();
2100 ZeroReg = isPPC64 ? PPC::ZERO8 : PPC::ZERO;
2102 ZeroReg = UseInfo->
RegClass == PPC::G8RC_NOX0RegClassID ?
2103 PPC::ZERO8 : PPC::ZERO;
2108 UseMI.getOperand(UseIdx).setReg(ZeroReg);
2120 if (
MRI->use_nodbg_empty(Reg))
2121 DefMI.eraseFromParent();
2127 if (
MI.definesRegister(PPC::CTR,
nullptr) ||
2128 MI.definesRegister(PPC::CTR8,
nullptr))
2140 unsigned NumT,
unsigned ExtraT,
2142 unsigned NumF,
unsigned ExtraF,
2162 switch (
MI.getOpcode()) {
2178 unsigned OpC =
MI.getOpcode();
2179 if (OpC == PPC::BLR || OpC == PPC::BLR8) {
2180 if (Pred[1].
getReg() == PPC::CTR8 || Pred[1].
getReg() == PPC::CTR) {
2181 bool isPPC64 = Subtarget.
isPPC64();
2182 MI.setDesc(
get(Pred[0].getImm() ? (isPPC64 ? PPC::BDNZLR8 : PPC::BDNZLR)
2183 : (isPPC64 ? PPC::BDZLR8 : PPC::BDZLR)));
2189 MI.setDesc(
get(PPC::BCLR));
2192 MI.setDesc(
get(PPC::BCLRn));
2195 MI.setDesc(
get(PPC::BCCLR));
2197 .
addImm(Pred[0].getImm())
2202 }
else if (OpC == PPC::B) {
2203 if (Pred[1].
getReg() == PPC::CTR8 || Pred[1].
getReg() == PPC::CTR) {
2204 bool isPPC64 = Subtarget.
isPPC64();
2205 MI.setDesc(
get(Pred[0].getImm() ? (isPPC64 ? PPC::BDNZ8 : PPC::BDNZ)
2206 : (isPPC64 ? PPC::BDZ8 : PPC::BDZ)));
2213 MI.removeOperand(0);
2215 MI.setDesc(
get(PPC::BC));
2221 MI.removeOperand(0);
2223 MI.setDesc(
get(PPC::BCn));
2229 MI.removeOperand(0);
2231 MI.setDesc(
get(PPC::BCC));
2233 .
addImm(Pred[0].getImm())
2239 }
else if (OpC == PPC::BCTR || OpC == PPC::BCTR8 || OpC == PPC::BCTRL ||
2240 OpC == PPC::BCTRL8 || OpC == PPC::BCTRL_RM ||
2241 OpC == PPC::BCTRL8_RM) {
2242 if (Pred[1].
getReg() == PPC::CTR8 || Pred[1].
getReg() == PPC::CTR)
2245 bool setLR = OpC == PPC::BCTRL || OpC == PPC::BCTRL8 ||
2246 OpC == PPC::BCTRL_RM || OpC == PPC::BCTRL8_RM;
2247 bool isPPC64 = Subtarget.
isPPC64();
2250 MI.setDesc(
get(isPPC64 ? (setLR ? PPC::BCCTRL8 : PPC::BCCTR8)
2251 : (setLR ? PPC::BCCTRL : PPC::BCCTR)));
2254 MI.setDesc(
get(isPPC64 ? (setLR ? PPC::BCCTRL8n : PPC::BCCTR8n)
2255 : (setLR ? PPC::BCCTRLn : PPC::BCCTRn)));
2258 MI.setDesc(
get(isPPC64 ? (setLR ? PPC::BCCCTRL8 : PPC::BCCCTR8)
2259 : (setLR ? PPC::BCCCTRL : PPC::BCCCTR)));
2261 .
addImm(Pred[0].getImm())
2270 if (OpC == PPC::BCTRL_RM || OpC == PPC::BCTRL8_RM)
2282 assert(Pred1.
size() == 2 &&
"Invalid PPC first predicate");
2283 assert(Pred2.
size() == 2 &&
"Invalid PPC second predicate");
2285 if (Pred1[1].
getReg() == PPC::CTR8 || Pred1[1].
getReg() == PPC::CTR)
2287 if (Pred2[1].
getReg() == PPC::CTR8 || Pred2[1].
getReg() == PPC::CTR)
2312 std::vector<MachineOperand> &Pred,
2313 bool SkipDead)
const {
2321 { &PPC::CRRCRegClass, &PPC::CRBITRCRegClass,
2322 &PPC::CTRRCRegClass, &PPC::CTRRC8RegClass };
2326 for (
unsigned c = 0; c < std::size(RCs) && !Found; ++c) {
2329 if (MO.isDef() && RC->
contains(MO.getReg())) {
2333 }
else if (MO.isRegMask()) {
2335 if (MO.clobbersPhysReg(R)) {
2348 int64_t &
Value)
const {
2349 unsigned Opc =
MI.getOpcode();
2352 default:
return false;
2357 SrcReg =
MI.getOperand(1).getReg();
2359 Value =
MI.getOperand(2).getImm();
2368 SrcReg =
MI.getOperand(1).getReg();
2369 SrcReg2 =
MI.getOperand(2).getReg();
2388 if (OpC == PPC::FCMPUS || OpC == PPC::FCMPUD)
2400 bool isPPC64 = Subtarget.
isPPC64();
2401 bool is32BitSignedCompare = OpC == PPC::CMPWI || OpC == PPC::CMPW;
2402 bool is32BitUnsignedCompare = OpC == PPC::CMPLWI || OpC == PPC::CMPLW;
2403 bool is64BitUnsignedCompare = OpC == PPC::CMPLDI || OpC == PPC::CMPLD;
2412 if (!
MI)
return false;
2414 bool equalityOnly =
false;
2417 if (is32BitSignedCompare) {
2423 }
else if (is32BitUnsignedCompare) {
2428 equalityOnly =
true;
2432 equalityOnly = is64BitUnsignedCompare;
2434 equalityOnly = is32BitUnsignedCompare;
2440 I =
MRI->use_instr_begin(CRReg), IE =
MRI->use_instr_end();
2452 if (SubIdx != PPC::sub_eq)
2464 bool FoundUse =
false;
2466 J =
MRI->use_instr_begin(CRReg), JE =
MRI->use_instr_end();
2493 else if (
Value != 0) {
2502 if (equalityOnly || !
MRI->hasOneUse(CRReg))
2512 int16_t Immed = (int16_t)
Value;
2546 for (;
I != E && !noSub; --
I) {
2548 unsigned IOpC = Instr.getOpcode();
2550 if (&*
I != &CmpInstr && (Instr.modifiesRegister(PPC::CR0,
TRI) ||
2551 Instr.readsRegister(PPC::CR0,
TRI)))
2560 if ((OpC == PPC::CMPW || OpC == PPC::CMPLW ||
2561 OpC == PPC::CMPD || OpC == PPC::CMPLD) &&
2562 (IOpC == PPC::SUBF || IOpC == PPC::SUBF8) &&
2563 ((Instr.getOperand(1).getReg() == SrcReg &&
2564 Instr.getOperand(2).getReg() == SrcReg2) ||
2565 (Instr.getOperand(1).getReg() == SrcReg2 &&
2566 Instr.getOperand(2).getReg() == SrcReg))) {
2584 int MIOpC =
MI->getOpcode();
2585 if (MIOpC == PPC::ANDI_rec || MIOpC == PPC::ANDI8_rec ||
2586 MIOpC == PPC::ANDIS_rec || MIOpC == PPC::ANDIS8_rec)
2589 NewOpC = PPC::getRecordFormOpcode(MIOpC);
2607 if (!equalityOnly && (NewOpC == PPC::SUBF_rec || NewOpC == PPC::SUBF8_rec) &&
2617 bool ShouldSwap =
false;
2618 if (Sub &&
Value == 0) {
2624 ShouldSwap = !ShouldSwap;
2629 I =
MRI->use_instr_begin(CRReg), IE =
MRI->use_instr_end();
2637 "Invalid predicate for equality-only optimization");
2644 assert((!equalityOnly || NewSubReg == PPC::sub_eq) &&
2645 "Invalid CR bit for equality-only optimization");
2647 if (NewSubReg == PPC::sub_lt)
2648 NewSubReg = PPC::sub_gt;
2649 else if (NewSubReg == PPC::sub_gt)
2650 NewSubReg = PPC::sub_lt;
2658 "Non-zero immediate support and ShouldSwap"
2659 "may conflict in updating predicate");
2667 BuildMI(*
MI->getParent(), std::next(MII),
MI->getDebugLoc(),
2668 get(TargetOpcode::COPY), CRReg)
2673 MI->clearRegisterDeads(PPC::CR0);
2675 if (MIOpC != NewOpC) {
2685 if (MIOpC == PPC::RLWINM || MIOpC == PPC::RLWINM8) {
2686 Register GPRRes =
MI->getOperand(0).getReg();
2687 int64_t SH =
MI->getOperand(2).getImm();
2688 int64_t MB =
MI->getOperand(3).getImm();
2689 int64_t ME =
MI->getOperand(4).getImm();
2692 bool MBInLoHWord = MB >= 16;
2693 bool MEInLoHWord = ME >= 16;
2696 if (MB <= ME && MBInLoHWord == MEInLoHWord && SH == 0) {
2697 Mask = ((1LLU << (32 - MB)) - 1) & ~((1LLU << (31 - ME)) - 1);
2699 Mask >>= MBInLoHWord ? 0 : 16;
2700 NewOpC = MIOpC == PPC::RLWINM
2701 ? (MBInLoHWord ? PPC::ANDI_rec : PPC::ANDIS_rec)
2702 : (MBInLoHWord ? PPC::ANDI8_rec : PPC::ANDIS8_rec);
2703 }
else if (
MRI->use_empty(GPRRes) && (ME == 31) &&
2704 (ME - MB + 1 == SH) && (MB >= 16)) {
2708 Mask = ((1LLU << 32) - 1) & ~((1LLU << (32 - SH)) - 1);
2710 NewOpC = MIOpC == PPC::RLWINM ? PPC::ANDIS_rec : PPC::ANDIS8_rec;
2713 if (Mask != ~0LLU) {
2714 MI->removeOperand(4);
2715 MI->removeOperand(3);
2716 MI->getOperand(2).setImm(Mask);
2717 NumRcRotatesConvertedToRcAnd++;
2719 }
else if (MIOpC == PPC::RLDICL &&
MI->getOperand(2).getImm() == 0) {
2720 int64_t MB =
MI->getOperand(3).getImm();
2722 uint64_t Mask = (1LLU << (63 - MB + 1)) - 1;
2723 NewOpC = PPC::ANDI8_rec;
2724 MI->removeOperand(3);
2725 MI->getOperand(2).setImm(Mask);
2726 NumRcRotatesConvertedToRcAnd++;
2731 MI->setDesc(NewDesc);
2734 if (!
MI->definesRegister(ImpDef,
nullptr)) {
2735 MI->addOperand(*
MI->getParent()->getParent(),
2740 if (!
MI->readsRegister(ImpUse,
nullptr)) {
2741 MI->addOperand(*
MI->getParent()->getParent(),
2746 assert(
MI->definesRegister(PPC::CR0,
nullptr) &&
2747 "Record-form instruction does not define cr0?");
2752 for (
unsigned i = 0, e = PredsToUpdate.
size(); i < e; i++)
2753 PredsToUpdate[i].first->setImm(PredsToUpdate[i].second);
2755 for (
unsigned i = 0, e = SubRegsToUpdate.
size(); i < e; i++)
2756 SubRegsToUpdate[i].first->setSubReg(SubRegsToUpdate[i].second);
2767 int64_t CmpMask, CmpValue;
2772 if (CmpValue || !CmpMask || SrcReg2)
2780 if (Opc == PPC::CMPLWI || Opc == PPC::CMPLDI)
2787 if (Subtarget.
isPPC64() && Opc == PPC::CMPWI)
2794 bool SrcRegHasOtherUse =
false;
2801 if (CRReg != PPC::CR0)
2805 bool SeenUseOfCRReg =
false;
2806 bool IsCRRegKilled =
false;
2807 if (!isRegElgibleForForwarding(RegMO, *SrcMI, CmpMI,
false, IsCRRegKilled,
2813 int NewOpC = PPC::getRecordFormOpcode(SrcMIOpc);
2827 "Record-form instruction does not define cr0?");
2841 OffsetIsScalable =
false;
2876 case PPC::DFSTOREf64:
2877 return FirstOpc == SecondOpc;
2883 return SecondOpc == PPC::STW || SecondOpc == PPC::STW8;
2890 int64_t OpOffset2,
bool OffsetIsScalable2,
unsigned ClusterSize,
2891 unsigned NumBytes)
const {
2897 "Only base registers and frame indices are supported.");
2902 if (ClusterSize > 2)
2916 unsigned FirstOpc = FirstLdSt.
getOpcode();
2917 unsigned SecondOpc = SecondLdSt.
getOpcode();
2929 int64_t Offset1 = 0, Offset2 = 0;
2937 assert(Base1 == &BaseOp1 && Base2 == &BaseOp2 &&
2938 "getMemOperandWithOffsetWidth return incorrect base op");
2940 assert(Offset1 <= Offset2 &&
"Caller should have ordered offsets.");
2941 return Offset1 + (int64_t)Width1.
getValue() == Offset2;
2948 unsigned Opcode =
MI.getOpcode();
2950 if (Opcode == PPC::INLINEASM || Opcode == PPC::INLINEASM_BR) {
2952 const char *AsmStr =
MI.getOperand(0).getSymbolName();
2954 }
else if (Opcode == TargetOpcode::STACKMAP) {
2957 }
else if (Opcode == TargetOpcode::PATCHPOINT) {
2961 return get(Opcode).getSize();
2965std::pair<unsigned, unsigned>
2968 return std::make_pair(TF, 0u);
2973 using namespace PPCII;
2974 static const std::pair<unsigned, const char *> TargetFlags[] = {
2975 {MO_PLT,
"ppc-plt"},
2976 {MO_PIC_FLAG,
"ppc-pic"},
2977 {MO_PCREL_FLAG,
"ppc-pcrel"},
2978 {MO_GOT_FLAG,
"ppc-got"},
2979 {MO_PCREL_OPT_FLAG,
"ppc-opt-pcrel"},
2980 {MO_TLSGD_FLAG,
"ppc-tlsgd"},
2981 {MO_TPREL_FLAG,
"ppc-tprel"},
2982 {MO_TLSLDM_FLAG,
"ppc-tlsldm"},
2983 {MO_TLSLD_FLAG,
"ppc-tlsld"},
2984 {MO_TLSGDM_FLAG,
"ppc-tlsgdm"},
2985 {MO_GOT_TLSGD_PCREL_FLAG,
"ppc-got-tlsgd-pcrel"},
2986 {MO_GOT_TLSLD_PCREL_FLAG,
"ppc-got-tlsld-pcrel"},
2987 {MO_GOT_TPREL_PCREL_FLAG,
"ppc-got-tprel-pcrel"},
2990 {MO_TPREL_LO,
"ppc-tprel-lo"},
2991 {MO_TPREL_HA,
"ppc-tprel-ha"},
2992 {MO_DTPREL_LO,
"ppc-dtprel-lo"},
2993 {MO_TLSLD_LO,
"ppc-tlsld-lo"},
2994 {MO_TOC_LO,
"ppc-toc-lo"},
2995 {MO_TLS,
"ppc-tls"},
2996 {MO_PIC_HA_FLAG,
"ppc-ha-pic"},
2997 {MO_PIC_LO_FLAG,
"ppc-lo-pic"},
2998 {MO_TPREL_PCREL_FLAG,
"ppc-tprel-pcrel"},
2999 {MO_TLS_PCREL_FLAG,
"ppc-tls-pcrel"},
3000 {MO_GOT_PCREL_FLAG,
"ppc-got-pcrel"},
3012 unsigned UpperOpcode, LowerOpcode;
3013 switch (
MI.getOpcode()) {
3014 case PPC::DFLOADf32:
3015 UpperOpcode = PPC::LXSSP;
3016 LowerOpcode = PPC::LFS;
3018 case PPC::DFLOADf64:
3019 UpperOpcode = PPC::LXSD;
3020 LowerOpcode = PPC::LFD;
3022 case PPC::DFSTOREf32:
3023 UpperOpcode = PPC::STXSSP;
3024 LowerOpcode = PPC::STFS;
3026 case PPC::DFSTOREf64:
3027 UpperOpcode = PPC::STXSD;
3028 LowerOpcode = PPC::STFD;
3030 case PPC::XFLOADf32:
3031 UpperOpcode = PPC::LXSSPX;
3032 LowerOpcode = PPC::LFSX;
3034 case PPC::XFLOADf64:
3035 UpperOpcode = PPC::LXSDX;
3036 LowerOpcode = PPC::LFDX;
3038 case PPC::XFSTOREf32:
3039 UpperOpcode = PPC::STXSSPX;
3040 LowerOpcode = PPC::STFSX;
3042 case PPC::XFSTOREf64:
3043 UpperOpcode = PPC::STXSDX;
3044 LowerOpcode = PPC::STFDX;
3047 UpperOpcode = PPC::LXSIWAX;
3048 LowerOpcode = PPC::LFIWAX;
3051 UpperOpcode = PPC::LXSIWZX;
3052 LowerOpcode = PPC::LFIWZX;
3055 UpperOpcode = PPC::STXSIWX;
3056 LowerOpcode = PPC::STFIWX;
3062 Register TargetReg =
MI.getOperand(0).getReg();
3064 if ((TargetReg >= PPC::F0 && TargetReg <= PPC::F31) ||
3065 (TargetReg >= PPC::VSL0 && TargetReg <= PPC::VSL31))
3066 Opcode = LowerOpcode;
3068 Opcode = UpperOpcode;
3069 MI.setDesc(
get(Opcode));
3078 auto &
MBB = *
MI.getParent();
3079 auto DL =
MI.getDebugLoc();
3081 switch (
MI.getOpcode()) {
3082 case PPC::BUILD_UACC: {
3085 if (ACC - PPC::ACC0 != UACC - PPC::UACC0) {
3086 MCRegister SrcVSR = PPC::VSL0 + (UACC - PPC::UACC0) * 4;
3087 MCRegister DstVSR = PPC::VSL0 + (ACC - PPC::ACC0) * 4;
3091 for (
int VecNo = 0; VecNo < 4; VecNo++)
3093 .addReg(SrcVSR + VecNo)
3101 case PPC::KILL_PAIR: {
3102 MI.setDesc(
get(PPC::UNENCODED_NOP));
3103 MI.removeOperand(1);
3104 MI.removeOperand(0);
3107 case TargetOpcode::LOAD_STACK_GUARD: {
3110 (Subtarget.
isTargetLinux() || M->getStackProtectorGuard() ==
"tls") &&
3111 "Only Linux target or tls mode are expected to contain "
3112 "LOAD_STACK_GUARD");
3114 if (M->getStackProtectorGuard() ==
"tls")
3115 Offset = M->getStackProtectorGuardOffset();
3118 const unsigned Reg = Subtarget.
isPPC64() ? PPC::X13 : PPC::R2;
3119 MI.setDesc(
get(Subtarget.
isPPC64() ? PPC::LD : PPC::LWZ));
3125 case PPC::PPCLdFixedAddr: {
3127 "Only targets with Glibc expected to contain PPCLdFixedAddr");
3129 const unsigned Reg = Subtarget.
isPPC64() ? PPC::X13 : PPC::R2;
3130 MI.setDesc(
get(PPC::LWZ));
3132#undef PPC_LNX_FEATURE
3134#define PPC_LNX_DEFINE_OFFSETS
3135#include "llvm/TargetParser/PPCTargetParser.def"
3137 bool Is64 = Subtarget.
isPPC64();
3138 if (FAType == PPC_FAWORD_HWCAP) {
3140 Offset = Is64 ? PPC_HWCAP_OFFSET_LE64 : PPC_HWCAP_OFFSET_LE32;
3142 Offset = Is64 ? PPC_HWCAP_OFFSET_BE64 : PPC_HWCAP_OFFSET_BE32;
3143 }
else if (FAType == PPC_FAWORD_HWCAP2) {
3145 Offset = Is64 ? PPC_HWCAP2_OFFSET_LE64 : PPC_HWCAP2_OFFSET_LE32;
3147 Offset = Is64 ? PPC_HWCAP2_OFFSET_BE64 : PPC_HWCAP2_OFFSET_BE32;
3148 }
else if (FAType == PPC_FAWORD_CPUID) {
3150 Offset = Is64 ? PPC_CPUID_OFFSET_LE64 : PPC_CPUID_OFFSET_LE32;
3152 Offset = Is64 ? PPC_CPUID_OFFSET_BE64 : PPC_CPUID_OFFSET_BE32;
3154 assert(
Offset &&
"Do not know the offset for this fixed addr load");
3155 MI.removeOperand(1);
3161#define PPC_TGT_PARSER_UNDEF_MACROS
3162#include "llvm/TargetParser/PPCTargetParser.def"
3163#undef PPC_TGT_PARSER_UNDEF_MACROS
3165 case PPC::DFLOADf32:
3166 case PPC::DFLOADf64:
3167 case PPC::DFSTOREf32:
3168 case PPC::DFSTOREf64: {
3169 assert(Subtarget.hasP9Vector() &&
3170 "Invalid D-Form Pseudo-ops on Pre-P9 target.");
3173 "D-form op must have register and immediate operands");
3176 case PPC::XFLOADf32:
3177 case PPC::XFSTOREf32:
3181 assert(Subtarget.hasP8Vector() &&
3182 "Invalid X-Form Pseudo-ops on Pre-P8 target.");
3183 assert(
MI.getOperand(2).isReg() &&
MI.getOperand(1).isReg() &&
3184 "X-form op must have register and register operands");
3187 case PPC::XFLOADf64:
3188 case PPC::XFSTOREf64: {
3189 assert(Subtarget.hasVSX() &&
3190 "Invalid X-Form Pseudo-ops on target that has no VSX.");
3191 assert(
MI.getOperand(2).isReg() &&
MI.getOperand(1).isReg() &&
3192 "X-form op must have register and register operands");
3195 case PPC::SPILLTOVSR_LD: {
3196 Register TargetReg =
MI.getOperand(0).getReg();
3197 if (PPC::VSFRCRegClass.
contains(TargetReg)) {
3198 MI.setDesc(
get(PPC::DFLOADf64));
3202 MI.setDesc(
get(PPC::LD));
3205 case PPC::SPILLTOVSR_ST: {
3207 if (PPC::VSFRCRegClass.
contains(SrcReg)) {
3208 NumStoreSPILLVSRRCAsVec++;
3209 MI.setDesc(
get(PPC::DFSTOREf64));
3212 NumStoreSPILLVSRRCAsGpr++;
3213 MI.setDesc(
get(PPC::STD));
3217 case PPC::SPILLTOVSR_LDX: {
3218 Register TargetReg =
MI.getOperand(0).getReg();
3219 if (PPC::VSFRCRegClass.
contains(TargetReg))
3220 MI.setDesc(
get(PPC::LXSDX));
3222 MI.setDesc(
get(PPC::LDX));
3225 case PPC::SPILLTOVSR_STX: {
3227 if (PPC::VSFRCRegClass.
contains(SrcReg)) {
3228 NumStoreSPILLVSRRCAsVec++;
3229 MI.setDesc(
get(PPC::STXSDX));
3231 NumStoreSPILLVSRRCAsGpr++;
3232 MI.setDesc(
get(PPC::STDX));
3239 case PPC::CFENCE8: {
3240 auto Val =
MI.getOperand(0).getReg();
3241 unsigned CmpOp = Subtarget.
isPPC64() ? PPC::CMPD : PPC::CMPW;
3247 MI.setDesc(
get(PPC::ISYNC));
3248 MI.removeOperand(0);
3259static unsigned selectReg(int64_t Imm1, int64_t Imm2,
unsigned CompareOpc,
3260 unsigned TrueReg,
unsigned FalseReg,
3261 unsigned CRSubReg) {
3263 if (CompareOpc == PPC::CMPWI || CompareOpc == PPC::CMPDI) {
3267 return Imm1 < Imm2 ? TrueReg : FalseReg;
3269 return Imm1 > Imm2 ? TrueReg : FalseReg;
3271 return Imm1 == Imm2 ? TrueReg : FalseReg;
3275 else if (CompareOpc == PPC::CMPLWI || CompareOpc == PPC::CMPLDI) {
3283 return Imm1 == Imm2 ? TrueReg : FalseReg;
3286 return PPC::NoRegister;
3291 int64_t Imm)
const {
3292 assert(
MI.getOperand(OpNo).isReg() &&
"Operand must be a REG");
3294 Register InUseReg =
MI.getOperand(OpNo).getReg();
3295 MI.getOperand(OpNo).ChangeToImmediate(Imm);
3303 int UseOpIdx =
MI.findRegisterUseOperandIdx(InUseReg,
TRI,
false);
3304 if (UseOpIdx >= 0) {
3314 MI.removeOperand(UseOpIdx);
3323 int OperandToKeep = LII.
SetCR ? 1 : 0;
3324 for (
int i =
MI.getNumOperands() - 1; i > OperandToKeep; i--)
3325 MI.removeOperand(i);
3329 MI.setDesc(
get(LII.
Is64Bit ? PPC::ANDI8_rec : PPC::ANDI_rec));
3344 bool &SeenIntermediateUse)
const {
3345 assert(!
MI.getParent()->getParent()->getRegInfo().isSSA() &&
3346 "Should be called after register allocation.");
3350 SeenIntermediateUse =
false;
3351 for (; It != E; ++It) {
3352 if (It->modifiesRegister(Reg,
TRI))
3354 if (It->readsRegister(Reg,
TRI))
3355 SeenIntermediateUse =
true;
3363 int64_t Imm)
const {
3365 "Register should be in non-SSA form after RA");
3366 bool isPPC64 = Subtarget.
isPPC64();
3370 if (isInt<16>(Imm)) {
3372 }
else if (isInt<32>(Imm)) {
3380 assert(isPPC64 &&
"Materializing 64-bit immediate to single register is "
3381 "only supported in PPC64");
3383 if ((Imm >> 32) & 0xFFFF)
3386 .
addImm((Imm >> 32) & 0xFFFF);
3393 .
addImm((Imm >> 16) & 0xFFFF);
3403 unsigned &OpNoForForwarding,
3404 bool &SeenIntermediateUse)
const {
3405 OpNoForForwarding = ~0U;
3413 for (
int i = 1, e =
MI.getNumOperands(); i < e; i++) {
3414 if (!
MI.getOperand(i).isReg())
3417 if (!Reg.isVirtual())
3422 if (DefMIForTrueReg->
getOpcode() == PPC::LI ||
3423 DefMIForTrueReg->
getOpcode() == PPC::LI8 ||
3424 DefMIForTrueReg->
getOpcode() == PPC::ADDI ||
3425 DefMIForTrueReg->
getOpcode() == PPC::ADDI8) {
3426 OpNoForForwarding = i;
3427 DefMI = DefMIForTrueReg;
3442 unsigned Opc =
MI.getOpcode();
3443 bool ConvertibleImmForm =
3444 Opc == PPC::CMPWI || Opc == PPC::CMPLWI || Opc == PPC::CMPDI ||
3445 Opc == PPC::CMPLDI || Opc == PPC::ADDI || Opc == PPC::ADDI8 ||
3446 Opc == PPC::ORI || Opc == PPC::ORI8 || Opc == PPC::XORI ||
3447 Opc == PPC::XORI8 || Opc == PPC::RLDICL || Opc == PPC::RLDICL_rec ||
3448 Opc == PPC::RLDICL_32 || Opc == PPC::RLDICL_32_64 ||
3449 Opc == PPC::RLWINM || Opc == PPC::RLWINM_rec || Opc == PPC::RLWINM8 ||
3450 Opc == PPC::RLWINM8_rec;
3451 bool IsVFReg = (
MI.getNumOperands() &&
MI.getOperand(0).isReg())
3458 if ((Opc == PPC::OR || Opc == PPC::OR8) &&
3459 MI.getOperand(1).getReg() ==
MI.getOperand(2).getReg())
3461 for (
int i = 1, e =
MI.getNumOperands(); i <
e; i++) {
3463 SeenIntermediateUse =
false;
3477 case PPC::ADDItocL8:
3480 OpNoForForwarding = i;
3487 return OpNoForForwarding == ~0
U ? nullptr :
DefMI;
3490unsigned PPCInstrInfo::getSpillTarget()
const {
3493 bool IsP10Variant = Subtarget.isISA3_1() || Subtarget.pairedVectorMemops();
3495 return Subtarget.isISAFuture() ? 3 : IsP10Variant ?
3496 2 : Subtarget.hasP9Vector() ?
3535 bool PostRA = !
MRI->isSSA();
3541 unsigned ToBeDeletedReg = 0;
3542 int64_t OffsetImm = 0;
3543 unsigned XFormOpcode = 0;
3551 bool OtherIntermediateUse =
false;
3555 if (OtherIntermediateUse || !ADDMI)
3562 unsigned ScaleRegIdx = 0;
3563 int64_t OffsetAddi = 0;
3577 assert(ADDIMI &&
"There should be ADDIMI for valid ToBeChangedReg.");
3582 for (
auto It = ++Start; It !=
End; It++)
3591 (ScaleReg == PPC::R0 || ScaleReg == PPC::X0))
3596 if (NewDefFor(ToBeChangedReg, *ADDMI,
MI) || NewDefFor(ScaleReg, *ADDMI,
MI))
3612 MI.setDesc(
get(XFormOpcode));
3614 .ChangeToRegister(ScaleReg,
false,
false,
3618 .ChangeToRegister(ToBeChangedReg,
false,
false,
true);
3630 int64_t &Imm)
const {
3634 if (Opc != PPC::ADDI && Opc != PPC::ADDI8)
3650 return Opc == PPC::ADD4 || Opc == PPC::ADD8;
3654 unsigned &ToBeDeletedReg,
3655 unsigned &XFormOpcode,
3659 if (!
MI.mayLoadOrStore())
3662 unsigned Opc =
MI.getOpcode();
3667 if (XFormOpcode == PPC::INSTRUCTION_LIST_END)
3681 if (!ImmOperand.
isImm())
3684 assert(RegOperand.
isReg() &&
"Instruction format is not right");
3687 if (!RegOperand.
isKill())
3690 ToBeDeletedReg = RegOperand.
getReg();
3691 OffsetImm = ImmOperand.
getImm();
3698 int64_t &OffsetAddi,
3699 int64_t OffsetImm)
const {
3700 assert((Index == 1 || Index == 2) &&
"Invalid operand index for add.");
3706 bool OtherIntermediateUse =
false;
3727 if (OtherIntermediateUse || !ADDIMI)
3733 if (isInt<16>(OffsetAddi + OffsetImm))
3746 bool PostRA = !
MRI->isSSA();
3747 bool SeenIntermediateUse =
true;
3748 unsigned ForwardingOperand = ~0U;
3750 SeenIntermediateUse);
3753 assert(ForwardingOperand <
MI.getNumOperands() &&
3754 "The forwarding operand needs to be valid at this point");
3755 bool IsForwardingOperandKilled =
MI.getOperand(ForwardingOperand).isKill();
3756 bool KillFwdDefMI = !SeenIntermediateUse && IsForwardingOperandKilled;
3757 if (KilledDef && KillFwdDefMI)
3772 PPC::INSTRUCTION_LIST_END &&
3773 transformToNewImmFormFedByAdd(
MI, *
DefMI, ForwardingOperand))
3777 bool IsVFReg =
MI.getOperand(0).isReg()
3785 transformToImmFormFedByAdd(
MI, III, ForwardingOperand, *
DefMI,
3792 transformToImmFormFedByLI(
MI, III, ForwardingOperand, *
DefMI))
3797 if (!HasImmForm && simplifyToLI(
MI, *
DefMI, ForwardingOperand, KilledDef))
3806 Register FoldingReg =
MI.getOperand(1).getReg();
3810 if (SrcMI->
getOpcode() != PPC::RLWINM &&
3811 SrcMI->
getOpcode() != PPC::RLWINM_rec &&
3815 assert((
MI.getOperand(2).isImm() &&
MI.getOperand(3).isImm() &&
3818 "Invalid PPC::RLWINM Instruction!");
3826 assert((MEMI < 32 && MESrc < 32 && MBMI < 32 && MBSrc < 32) &&
3827 "Invalid PPC::RLWINM Instruction!");
3849 bool SrcMaskFull = (MBSrc - MESrc == 1) || (MBSrc == 0 && MESrc == 31);
3852 if ((MBMI > MEMI) && !SrcMaskFull)
3862 APInt RotatedSrcMask = MaskSrc.
rotl(SHMI);
3863 APInt FinalMask = RotatedSrcMask & MaskMI;
3865 bool Simplified =
false;
3868 if (FinalMask.
isZero()) {
3870 (
MI.getOpcode() == PPC::RLWINM8 ||
MI.getOpcode() == PPC::RLWINM8_rec);
3875 if (
MI.getOpcode() == PPC::RLWINM ||
MI.getOpcode() == PPC::RLWINM8) {
3877 MI.removeOperand(4);
3878 MI.removeOperand(3);
3879 MI.removeOperand(2);
3880 MI.getOperand(1).ChangeToImmediate(0);
3881 MI.setDesc(
get(Is64Bit ? PPC::LI8 : PPC::LI));
3884 MI.removeOperand(4);
3885 MI.removeOperand(3);
3886 MI.getOperand(2).setImm(0);
3887 MI.setDesc(
get(Is64Bit ? PPC::ANDI8_rec : PPC::ANDI_rec));
3890 MI.getOperand(1).setIsKill(
true);
3894 MI.getOperand(1).setIsKill(
false);
3910 uint16_t NewSH = (SHSrc + SHMI) % 32;
3911 MI.getOperand(2).setImm(NewSH);
3914 MI.getOperand(3).setImm(NewMB);
3915 MI.getOperand(4).setImm(NewME);
3919 MI.getOperand(1).setIsKill(
true);
3923 MI.getOperand(1).setIsKill(
false);
3928 if (Simplified &
MRI->use_nodbg_empty(FoldingReg) &&
3953 default:
return false;
3961 III.
ImmOpcode = Opc == PPC::ADD4 ? PPC::ADDI : PPC::ADDI8;
3970 III.
ImmOpcode = Opc == PPC::ADDC ? PPC::ADDIC : PPC::ADDIC8;
3986 III.
ImmOpcode = Opc == PPC::SUBFC ? PPC::SUBFIC : PPC::SUBFIC8;
3994 III.
ImmOpcode = Opc == PPC::CMPW ? PPC::CMPWI : PPC::CMPDI;
4002 III.
ImmOpcode = Opc == PPC::CMPLW ? PPC::CMPLWI : PPC::CMPLDI;
4022 case PPC::OR: III.
ImmOpcode = PPC::ORI;
break;
4023 case PPC::OR8: III.
ImmOpcode = PPC::ORI8;
break;
4024 case PPC::XOR: III.
ImmOpcode = PPC::XORI;
break;
4025 case PPC::XOR8: III.
ImmOpcode = PPC::XORI8;
break;
4030 case PPC::RLWNM_rec:
4031 case PPC::RLWNM8_rec:
4051 if (Opc == PPC::RLWNM || Opc == PPC::RLWNM8 || Opc == PPC::RLWNM_rec ||
4052 Opc == PPC::RLWNM8_rec)
4058 case PPC::RLWNM: III.
ImmOpcode = PPC::RLWINM;
break;
4059 case PPC::RLWNM8: III.
ImmOpcode = PPC::RLWINM8;
break;
4060 case PPC::RLWNM_rec:
4063 case PPC::RLWNM8_rec:
4066 case PPC::SLW: III.
ImmOpcode = PPC::RLWINM;
break;
4067 case PPC::SLW8: III.
ImmOpcode = PPC::RLWINM8;
break;
4074 case PPC::SRW: III.
ImmOpcode = PPC::RLWINM;
break;
4075 case PPC::SRW8: III.
ImmOpcode = PPC::RLWINM8;
break;
4095 case PPC::RLDCL_rec:
4097 case PPC::RLDCR_rec:
4113 if (Opc == PPC::RLDCL || Opc == PPC::RLDCL_rec || Opc == PPC::RLDCR ||
4114 Opc == PPC::RLDCR_rec)
4120 case PPC::RLDCL: III.
ImmOpcode = PPC::RLDICL;
break;
4121 case PPC::RLDCL_rec:
4124 case PPC::RLDCR: III.
ImmOpcode = PPC::RLDICR;
break;
4125 case PPC::RLDCR_rec:
4128 case PPC::SLD: III.
ImmOpcode = PPC::RLDICR;
break;
4132 case PPC::SRD: III.
ImmOpcode = PPC::RLDICL;
break;
4179 case PPC::LBZX: III.
ImmOpcode = PPC::LBZ;
break;
4180 case PPC::LBZX8: III.
ImmOpcode = PPC::LBZ8;
break;
4181 case PPC::LHZX: III.
ImmOpcode = PPC::LHZ;
break;
4182 case PPC::LHZX8: III.
ImmOpcode = PPC::LHZ8;
break;
4183 case PPC::LHAX: III.
ImmOpcode = PPC::LHA;
break;
4184 case PPC::LHAX8: III.
ImmOpcode = PPC::LHA8;
break;
4185 case PPC::LWZX: III.
ImmOpcode = PPC::LWZ;
break;
4186 case PPC::LWZX8: III.
ImmOpcode = PPC::LWZ8;
break;
4192 case PPC::LFSX: III.
ImmOpcode = PPC::LFS;
break;
4193 case PPC::LFDX: III.
ImmOpcode = PPC::LFD;
break;
4194 case PPC::STBX: III.
ImmOpcode = PPC::STB;
break;
4195 case PPC::STBX8: III.
ImmOpcode = PPC::STB8;
break;
4196 case PPC::STHX: III.
ImmOpcode = PPC::STH;
break;
4197 case PPC::STHX8: III.
ImmOpcode = PPC::STH8;
break;
4198 case PPC::STWX: III.
ImmOpcode = PPC::STW;
break;
4199 case PPC::STWX8: III.
ImmOpcode = PPC::STW8;
break;
4204 case PPC::STFSX: III.
ImmOpcode = PPC::STFS;
break;
4205 case PPC::STFDX: III.
ImmOpcode = PPC::STFD;
break;
4237 case PPC::LBZUX: III.
ImmOpcode = PPC::LBZU;
break;
4238 case PPC::LBZUX8: III.
ImmOpcode = PPC::LBZU8;
break;
4239 case PPC::LHZUX: III.
ImmOpcode = PPC::LHZU;
break;
4240 case PPC::LHZUX8: III.
ImmOpcode = PPC::LHZU8;
break;
4241 case PPC::LHAUX: III.
ImmOpcode = PPC::LHAU;
break;
4242 case PPC::LHAUX8: III.
ImmOpcode = PPC::LHAU8;
break;
4243 case PPC::LWZUX: III.
ImmOpcode = PPC::LWZU;
break;
4244 case PPC::LWZUX8: III.
ImmOpcode = PPC::LWZU8;
break;
4249 case PPC::LFSUX: III.
ImmOpcode = PPC::LFSU;
break;
4250 case PPC::LFDUX: III.
ImmOpcode = PPC::LFDU;
break;
4251 case PPC::STBUX: III.
ImmOpcode = PPC::STBU;
break;
4252 case PPC::STBUX8: III.
ImmOpcode = PPC::STBU8;
break;
4253 case PPC::STHUX: III.
ImmOpcode = PPC::STHU;
break;
4254 case PPC::STHUX8: III.
ImmOpcode = PPC::STHU8;
break;
4255 case PPC::STWUX: III.
ImmOpcode = PPC::STWU;
break;
4256 case PPC::STWUX8: III.
ImmOpcode = PPC::STWU8;
break;
4261 case PPC::STFSUX: III.
ImmOpcode = PPC::STFSU;
break;
4262 case PPC::STFDUX: III.
ImmOpcode = PPC::STFDU;
break;
4275 case PPC::XFLOADf32:
4276 case PPC::XFLOADf64:
4277 case PPC::XFSTOREf32:
4278 case PPC::XFSTOREf64:
4279 if (!Subtarget.hasP9Vector())
4306 case PPC::XFLOADf32:
4320 case PPC::XFLOADf64:
4338 case PPC::XFSTOREf32:
4352 case PPC::XFSTOREf64:
4363 assert(Op1 != Op2 &&
"Cannot swap operand with itself.");
4365 unsigned MaxOp = std::max(Op1, Op2);
4366 unsigned MinOp = std::min(Op1, Op2);
4369 MI.removeOperand(std::max(Op1, Op2));
4370 MI.removeOperand(std::min(Op1, Op2));
4374 if (MaxOp - MinOp == 1 &&
MI.getNumOperands() == MinOp) {
4375 MI.addOperand(MOp2);
4376 MI.addOperand(MOp1);
4381 unsigned TotalOps =
MI.getNumOperands() + 2;
4382 for (
unsigned i =
MI.getNumOperands() - 1; i >= MinOp; i--) {
4384 MI.removeOperand(i);
4387 MI.addOperand(MOp2);
4389 for (
unsigned i =
MI.getNumOperands(); i < TotalOps; i++) {
4391 MI.addOperand(MOp1);
4393 MI.addOperand(MOps.
back());
4404 unsigned OpNoForForwarding
4444 unsigned Opc =
DefMI.getOpcode();
4445 if (Opc != PPC::ADDItocL8 && Opc != PPC::ADDI && Opc != PPC::ADDI8)
4451 if (Opc == PPC::ADDItocL8 && Subtarget.isAIX())
4455 "Add inst must have at least three operands");
4456 RegMO = &
DefMI.getOperand(1);
4457 ImmMO = &
DefMI.getOperand(2);
4460 if (!RegMO->
isReg())
4469bool PPCInstrInfo::isRegElgibleForForwarding(
4472 bool &IsFwdFeederRegKilled,
bool &SeenIntermediateUse)
const {
4489 for (; It != E; ++It) {
4493 IsFwdFeederRegKilled =
true;
4495 SeenIntermediateUse =
true;
4497 if ((&*It) == &
DefMI)
4510bool PPCInstrInfo::isImmElgibleForForwarding(
const MachineOperand &ImmMO,
4514 int64_t BaseImm)
const {
4516 if (
DefMI.getOpcode() == PPC::ADDItocL8) {
4537 if (ImmMO.
isImm()) {
4542 APInt ActualValue(64, ImmMO.
getImm() + BaseImm,
true);
4547 Imm = SignExtend64<16>(ImmMO.
getImm() + BaseImm);
4563 unsigned OpNoForForwarding,
4565 if ((
DefMI.getOpcode() != PPC::LI &&
DefMI.getOpcode() != PPC::LI8) ||
4566 !
DefMI.getOperand(1).isImm())
4573 int64_t Immediate =
DefMI.getOperand(1).getImm();
4575 int64_t SExtImm = SignExtend64<16>(Immediate);
4577 bool ReplaceWithLI =
false;
4578 bool Is64BitLI =
false;
4581 unsigned Opc =
MI.getOpcode();
4602 bool Changed =
false;
4604 int64_t Comparand =
MI.getOperand(2).getImm();
4605 int64_t SExtComparand = ((
uint64_t)Comparand & ~0x7FFFuLL) != 0
4606 ? (Comparand | 0xFFFFFFFFFFFF0000)
4609 for (
auto &CompareUseMI :
MRI->use_instructions(DefReg)) {
4610 unsigned UseOpc = CompareUseMI.getOpcode();
4611 if (UseOpc != PPC::ISEL && UseOpc != PPC::ISEL8)
4613 unsigned CRSubReg = CompareUseMI.getOperand(3).getSubReg();
4614 Register TrueReg = CompareUseMI.getOperand(1).getReg();
4615 Register FalseReg = CompareUseMI.getOperand(2).getReg();
4616 unsigned RegToCopy =
4617 selectReg(SExtImm, SExtComparand, Opc, TrueReg, FalseReg, CRSubReg);
4618 if (RegToCopy == PPC::NoRegister)
4621 if (RegToCopy == PPC::ZERO || RegToCopy == PPC::ZERO8) {
4622 CompareUseMI.setDesc(
get(UseOpc == PPC::ISEL8 ? PPC::LI8 : PPC::LI));
4624 CompareUseMI.removeOperand(3);
4625 CompareUseMI.removeOperand(2);
4629 dbgs() <<
"Found LI -> CMPI -> ISEL, replacing with a copy.\n");
4633 CompareUseMI.setDesc(
get(PPC::COPY));
4634 CompareUseMI.removeOperand(3);
4635 CompareUseMI.removeOperand(RegToCopy == TrueReg ? 2 : 1);
4636 CmpIselsConverted++;
4645 MissedConvertibleImmediateInstrs++;
4653 int64_t Addend =
MI.getOperand(2).getImm();
4654 if (isInt<16>(Addend + SExtImm)) {
4655 ReplaceWithLI =
true;
4656 Is64BitLI = Opc == PPC::ADDI8;
4657 NewImm = Addend + SExtImm;
4663 case PPC::SUBFIC8: {
4665 if (
MI.getNumOperands() > 3 && !
MI.getOperand(3).isDead())
4667 int64_t Minuend =
MI.getOperand(2).getImm();
4668 if (isInt<16>(Minuend - SExtImm)) {
4669 ReplaceWithLI =
true;
4670 Is64BitLI = Opc == PPC::SUBFIC8;
4671 NewImm = Minuend - SExtImm;
4677 case PPC::RLDICL_rec:
4678 case PPC::RLDICL_32:
4679 case PPC::RLDICL_32_64: {
4681 int64_t SH =
MI.getOperand(2).getImm();
4682 int64_t MB =
MI.getOperand(3).getImm();
4683 APInt InVal((Opc == PPC::RLDICL || Opc == PPC::RLDICL_rec) ? 64 : 32,
4685 InVal = InVal.rotl(SH);
4691 if (isUInt<15>(InVal.getSExtValue()) ||
4692 (Opc == PPC::RLDICL_rec && isUInt<16>(InVal.getSExtValue()))) {
4693 ReplaceWithLI =
true;
4694 Is64BitLI = Opc != PPC::RLDICL_32;
4695 NewImm = InVal.getSExtValue();
4696 SetCR = Opc == PPC::RLDICL_rec;
4703 case PPC::RLWINM_rec:
4704 case PPC::RLWINM8_rec: {
4705 int64_t SH =
MI.getOperand(2).getImm();
4706 int64_t MB =
MI.getOperand(3).getImm();
4707 int64_t ME =
MI.getOperand(4).getImm();
4708 APInt InVal(32, SExtImm,
true);
4709 InVal = InVal.rotl(SH);
4715 bool ValueFits = isUInt<15>(InVal.getSExtValue());
4716 ValueFits |= ((Opc == PPC::RLWINM_rec || Opc == PPC::RLWINM8_rec) &&