48#define DEBUG_TYPE "ppc-instr-info"
50#define GET_INSTRMAP_INFO
51#define GET_INSTRINFO_CTOR_DTOR
52#include "PPCGenInstrInfo.inc"
55 "Number of spillvsrrc spilled to stack as vec");
57 "Number of spillvsrrc spilled to stack as gpr");
58STATISTIC(NumGPRtoVSRSpill,
"Number of gpr spills to spillvsrrc");
60 "Number of ISELs that depend on comparison of constants converted");
62 "Number of compare-immediate instructions fed by constants");
64 "Number of record-form rotates converted to record-form andi");
68 cl::desc(
"Disable analysis for CTR loops"));
74cl::desc(
"Causes the backend to crash instead of generating a nop VSX copy"),
79 cl::desc(
"Use the old (incorrect) instruction latency calculation"));
83 cl::desc(
"register pressure factor for the transformations."));
87 cl::desc(
"enable register pressure reduce in machine combiner pass."));
90void PPCInstrInfo::anchor() {}
95 STI.isPPC64() ? PPC::BLR8 : PPC::BLR),
96 Subtarget(STI), RI(STI.getTargetMachine()) {}
104 static_cast<const PPCSubtarget *
>(STI)->getCPUDirective();
108 static_cast<const PPCSubtarget *
>(STI)->getInstrItineraryData();
140 unsigned *PredCost)
const {
142 return PPCGenInstrInfo::getInstrLatency(ItinData,
MI, PredCost);
152 unsigned DefClass =
MI.getDesc().getSchedClass();
153 for (
unsigned i = 0, e =
MI.getNumOperands(); i != e; ++i) {
171 std::optional<unsigned>
Latency = PPCGenInstrInfo::getOperandLatency(
174 if (!
DefMI.getParent())
181 if (Reg.isVirtual()) {
184 IsRegCR =
MRI->getRegClass(Reg)->hasSuperClassEq(&PPC::CRRCRegClass) ||
185 MRI->getRegClass(Reg)->hasSuperClassEq(&PPC::CRBITRCRegClass);
187 IsRegCR = PPC::CRRCRegClass.contains(Reg) ||
188 PPC::CRBITRCRegClass.contains(Reg);
191 if (
UseMI.isBranch() && IsRegCR) {
271#define InfoArrayIdxFMAInst 0
272#define InfoArrayIdxFAddInst 1
273#define InfoArrayIdxFMULInst 2
274#define InfoArrayIdxAddOpIdx 3
275#define InfoArrayIdxMULOpIdx 4
276#define InfoArrayIdxFSubInst 5
287 {PPC::XSMADDADP, PPC::XSADDDP, PPC::XSMULDP, 1, 2, PPC::XSSUBDP},
288 {PPC::XSMADDASP, PPC::XSADDSP, PPC::XSMULSP, 1, 2, PPC::XSSUBSP},
289 {PPC::XVMADDADP, PPC::XVADDDP, PPC::XVMULDP, 1, 2, PPC::XVSUBDP},
290 {PPC::XVMADDASP, PPC::XVADDSP, PPC::XVMULSP, 1, 2, PPC::XVSUBSP},
291 {PPC::FMADD, PPC::FADD, PPC::FMUL, 3, 1, PPC::FSUB},
292 {PPC::FMADDS, PPC::FADDS, PPC::FMULS, 3, 1, PPC::FSUBS}};
296int16_t PPCInstrInfo::getFMAOpIdxInfo(
unsigned Opcode)
const {
353 bool DoRegPressureReduce)
const {
358 auto IsAllOpsVirtualReg = [](
const MachineInstr &Instr) {
359 for (
const auto &MO : Instr.explicit_operands())
360 if (!(MO.isReg() && MO.getReg().isVirtual()))
365 auto IsReassociableAddOrSub = [&](
const MachineInstr &Instr,
367 if (Instr.getOpcode() !=
378 if (!IsAllOpsVirtualReg(Instr))
384 !
MRI->hasOneNonDBGUse(Instr.getOperand(0).getReg()))
390 auto IsReassociableFMA = [&](
const MachineInstr &Instr, int16_t &AddOpIdx,
391 int16_t &MulOpIdx,
bool IsLeaf) {
392 int16_t
Idx = getFMAOpIdxInfo(Instr.getOpcode());
403 if (!IsAllOpsVirtualReg(Instr))
423 int16_t AddOpIdx = -1;
424 int16_t MulOpIdx = -1;
426 bool IsUsedOnceL =
false;
427 bool IsUsedOnceR =
false;
431 auto IsRPReductionCandidate = [&]() {
435 if (Opcode != PPC::XSMADDASP && Opcode != PPC::XSMADDADP)
440 if (IsReassociableFMA(Root, AddOpIdx, MulOpIdx,
true)) {
441 assert((MulOpIdx >= 0) &&
"mul operand index not right!");
442 Register MULRegL =
TRI->lookThruSingleUseCopyChain(
444 Register MULRegR =
TRI->lookThruSingleUseCopyChain(
446 if (!MULRegL && !MULRegR)
449 if (MULRegL && !MULRegR) {
453 }
else if (!MULRegL && MULRegR) {
465 MULInstrL =
MRI->getVRegDef(MULRegL);
466 MULInstrR =
MRI->getVRegDef(MULRegR);
473 if (DoRegPressureReduce && IsRPReductionCandidate()) {
474 assert((MULInstrL && MULInstrR) &&
"wrong register preduction candidate!");
495 if (!IsReassociableFMA(Root, AddOpIdx, MulOpIdx,
false))
498 assert((AddOpIdx >= 0) &&
"add operand index not right!");
505 if (!IsReassociableFMA(*Prev, AddOpIdx, MulOpIdx,
false))
508 assert((AddOpIdx >= 0) &&
"add operand index not right!");
513 if (IsReassociableFMA(*Leaf, AddOpIdx, MulOpIdx,
true)) {
529 assert(!InsInstrs.
empty() &&
"Instructions set to be inserted is empty!");
562 assert(isa<llvm::ConstantFP>(
C) &&
"not a valid constant!");
565 APFloat F1((dyn_cast<ConstantFP>(
C))->getValueAPF());
567 Constant *NegC = ConstantFP::get(dyn_cast<ConstantFP>(
C)->getContext(), F1);
575 for (
auto *Inst : InsInstrs) {
577 assert(Operand.isReg() &&
"Invalid instruction in InsInstrs!");
578 if (Operand.getReg() == PPC::ZERO8) {
579 Placeholder = &Operand;
585 assert(Placeholder &&
"Placeholder does not exist!");
590 generateLoadForNewConst(ConstPoolIdx, &Root,
C->getType(), InsInstrs);
593 Placeholder->setReg(LoadNewConst);
614 if (!(Subtarget.
isPPC64() && Subtarget.hasP9Vector() &&
622 auto GetMBBPressure =
632 if (
MI.isDebugValue() ||
MI.isDebugLabel())
638 RPTracker.
recede(RegOpers);
648 unsigned VSSRCLimit =
TRI->getRegPressureSetLimit(
652 return GetMBBPressure(
MBB)[PPC::RegisterPressureSets::VSSRC] >
658 if (!
I->hasOneMemOperand())
662 return Op->isLoad() &&
Op->getPseudoValue() &&
666Register PPCInstrInfo::generateLoadForNewConst(
674 "Target not supported!\n");
680 Register VReg1 =
MRI->createVirtualRegister(&PPC::G8RC_and_G8RC_NOX0RegClass);
682 BuildMI(*MF,
MI->getDebugLoc(),
get(PPC::ADDIStocHA8), VReg1)
687 "Only float and double are supported!");
692 LoadOpcode = PPC::DFLOADf32;
694 LoadOpcode = PPC::DFLOADf64;
724 assert(
I->mayLoad() &&
"Should be a load instruction.\n");
725 for (
auto MO :
I->uses()) {
729 if (Reg == 0 || !Reg.isVirtual())
735 return (MCP->
getConstants())[MO2.getIndex()].Val.ConstVal;
755 bool DoRegPressureReduce)
const {
765 DoRegPressureReduce);
778 reassociateFMA(Root,
Pattern, InsInstrs, DelInstrs, InstrIdxForVirtReg);
783 DelInstrs, InstrIdxForVirtReg);
788void PPCInstrInfo::reassociateFMA(
799 MRI.constrainRegClass(RegC, RC);
802 int16_t
Idx = getFMAOpIdxInfo(FmaOp);
803 assert(
Idx >= 0 &&
"Root must be a FMA instruction");
805 bool IsILPReassociate =
825 Leaf =
MRI.getVRegDef(MULReg);
831 Leaf =
MRI.getVRegDef(MULReg);
837 if (IsILPReassociate)
845 MRI.constrainRegClass(Reg, RC);
846 KillFlag = Operand.
isKill();
851 bool &MulOp1KillFlag,
bool &MulOp2KillFlag,
852 bool &AddOpKillFlag) {
853 GetOperandInfo(
Instr.getOperand(FirstMulOpIdx), MulOp1, MulOp1KillFlag);
854 GetOperandInfo(
Instr.getOperand(FirstMulOpIdx + 1), MulOp2, MulOp2KillFlag);
855 GetOperandInfo(
Instr.getOperand(AddOpIdx), AddOp, AddOpKillFlag);
858 Register RegM11, RegM12, RegX, RegY, RegM21, RegM22, RegM31, RegM32, RegA11,
860 bool KillX =
false, KillY =
false, KillM11 =
false, KillM12 =
false,
861 KillM21 =
false, KillM22 =
false, KillM31 =
false, KillM32 =
false,
862 KillA11 =
false, KillA21 =
false, KillB =
false;
864 GetFMAInstrInfo(Root, RegM31, RegM32, RegB, KillM31, KillM32, KillB);
866 if (IsILPReassociate)
867 GetFMAInstrInfo(*Prev, RegM21, RegM22, RegA21, KillM21, KillM22, KillA21);
870 GetFMAInstrInfo(*Leaf, RegM11, RegM12, RegA11, KillM11, KillM12, KillA11);
871 GetOperandInfo(Leaf->
getOperand(AddOpIdx), RegX, KillX);
873 GetOperandInfo(Leaf->
getOperand(1), RegX, KillX);
874 GetOperandInfo(Leaf->
getOperand(2), RegY, KillY);
877 GetOperandInfo(Leaf->
getOperand(1), RegX, KillX);
878 GetOperandInfo(Leaf->
getOperand(2), RegY, KillY);
888 InstrIdxForVirtReg.
insert(std::make_pair(NewVRA, 0));
891 if (IsILPReassociate) {
892 NewVRB =
MRI.createVirtualRegister(RC);
893 InstrIdxForVirtReg.
insert(std::make_pair(NewVRB, 1));
898 NewVRD =
MRI.createVirtualRegister(RC);
899 InstrIdxForVirtReg.
insert(std::make_pair(NewVRD, 2));
904 Register RegMul2,
bool KillRegMul2) {
905 MI->getOperand(AddOpIdx).setReg(RegAdd);
906 MI->getOperand(AddOpIdx).setIsKill(KillAdd);
907 MI->getOperand(FirstMulOpIdx).setReg(RegMul1);
908 MI->getOperand(FirstMulOpIdx).setIsKill(KillRegMul1);
909 MI->getOperand(FirstMulOpIdx + 1).setReg(RegMul2);
910 MI->getOperand(FirstMulOpIdx + 1).setIsKill(KillRegMul2);
931 AdjustOperandOrder(MINewB, RegX, KillX, RegM21, KillM21, RegM22, KillM22);
932 AdjustOperandOrder(MINewA, RegY, KillY, RegM31, KillM31, RegM32, KillM32);
953 assert(NewVRD &&
"new FMA register not created!");
972 AdjustOperandOrder(MINewB, RegX, KillX, RegM21, KillM21, RegM22, KillM22);
973 AdjustOperandOrder(MINewD, NewVRA,
true, RegM31, KillM31, RegM32,
999 bool KillVarReg =
false;
1002 KillVarReg = KillM31;
1005 KillVarReg = KillM32;
1029 if (!IsILPReassociate) {
1038 "Insertion instructions set should not be empty!");
1042 if (IsILPReassociate)
1050 unsigned &SubIdx)
const {
1051 switch (
MI.getOpcode()) {
1052 default:
return false;
1055 case PPC::EXTSW_32_64:
1056 SrcReg =
MI.getOperand(1).getReg();
1057 DstReg =
MI.getOperand(0).getReg();
1058 SubIdx = PPC::sub_32;
1064 int &FrameIndex)
const {
1068 if (
MI.getOperand(1).isImm() && !
MI.getOperand(1).getImm() &&
1069 MI.getOperand(2).isFI()) {
1070 FrameIndex =
MI.getOperand(2).getIndex();
1071 return MI.getOperand(0).getReg();
1081 switch (
MI.getOpcode()) {
1091 case PPC::ADDIStocHA:
1092 case PPC::ADDIStocHA8:
1094 case PPC::ADDItocL8:
1095 case PPC::LOAD_STACK_GUARD:
1096 case PPC::PPCLdFixedAddr:
1098 case PPC::XXLXORspz:
1099 case PPC::XXLXORdpz:
1100 case PPC::XXLEQVOnes:
1101 case PPC::XXSPLTI32DX:
1103 case PPC::XXSPLTIDP:
1107 case PPC::V_SETALLONESB:
1108 case PPC::V_SETALLONESH:
1109 case PPC::V_SETALLONES:
1112 case PPC::XXSETACCZ:
1113 case PPC::XXSETACCZW:
1120 int &FrameIndex)
const {
1122 if (
MI.getOperand(1).isImm() && !
MI.getOperand(1).getImm() &&
1123 MI.getOperand(2).isFI()) {
1124 FrameIndex =
MI.getOperand(2).getIndex();
1125 return MI.getOperand(0).getReg();
1133 unsigned OpIdx2)
const {
1137 if (
MI.getOpcode() != PPC::RLWIMI &&
MI.getOpcode() != PPC::RLWIMI_rec)
1145 if (
MI.getOperand(3).getImm() != 0)
1156 assert(((OpIdx1 == 1 && OpIdx2 == 2) || (OpIdx1 == 2 && OpIdx2 == 1)) &&
1157 "Only the operands 1 and 2 can be swapped in RLSIMI/RLWIMI_rec.");
1161 unsigned SubReg1 =
MI.getOperand(1).getSubReg();
1162 unsigned SubReg2 =
MI.getOperand(2).getSubReg();
1163 bool Reg1IsKill =
MI.getOperand(1).isKill();
1164 bool Reg2IsKill =
MI.getOperand(2).isKill();
1165 bool ChangeReg0 =
false;
1171 "Expecting a two-address instruction!");
1172 assert(
MI.getOperand(0).getSubReg() == SubReg1 &&
"Tied subreg mismatch");
1178 unsigned MB =
MI.getOperand(4).getImm();
1179 unsigned ME =
MI.getOperand(5).getImm();
1183 if (MB == 0 && ME == 31)
1188 Register Reg0 = ChangeReg0 ? Reg2 :
MI.getOperand(0).getReg();
1189 bool Reg0IsDead =
MI.getOperand(0).isDead();
1190 return BuildMI(MF,
MI.getDebugLoc(),
MI.getDesc())
1199 MI.getOperand(0).setReg(Reg2);
1200 MI.getOperand(0).setSubReg(SubReg2);
1202 MI.getOperand(2).setReg(Reg1);
1203 MI.getOperand(1).setReg(Reg2);
1204 MI.getOperand(2).setSubReg(SubReg1);
1205 MI.getOperand(1).setSubReg(SubReg2);
1206 MI.getOperand(2).setIsKill(Reg1IsKill);
1207 MI.getOperand(1).setIsKill(Reg2IsKill);
1210 MI.getOperand(4).setImm((ME + 1) & 31);
1211 MI.getOperand(5).setImm((MB - 1) & 31);
1216 unsigned &SrcOpIdx1,
1217 unsigned &SrcOpIdx2)
const {
1228 return fixCommutedOpIndices(SrcOpIdx1, SrcOpIdx2, 2, 3);
1238 default: Opcode = PPC::NOP;
break;
1264 bool AllowModify)
const {
1265 bool isPPC64 = Subtarget.
isPPC64();
1272 if (!isUnpredicatedTerminator(*
I))
1278 if (
I->getOpcode() == PPC::B &&
1280 I->eraseFromParent();
1284 if (
I ==
MBB.
end() || !isUnpredicatedTerminator(*
I))
1293 if (
I ==
MBB.
begin() || !isUnpredicatedTerminator(*--
I)) {
1299 }
else if (LastInst.
getOpcode() == PPC::BCC) {
1307 }
else if (LastInst.
getOpcode() == PPC::BC) {
1315 }
else if (LastInst.
getOpcode() == PPC::BCn) {
1323 }
else if (LastInst.
getOpcode() == PPC::BDNZ8 ||
1334 }
else if (LastInst.
getOpcode() == PPC::BDZ8 ||
1355 if (
I !=
MBB.
begin() && isUnpredicatedTerminator(*--
I))
1359 if (SecondLastInst.
getOpcode() == PPC::BCC &&
1369 }
else if (SecondLastInst.
getOpcode() == PPC::BC &&
1379 }
else if (SecondLastInst.
getOpcode() == PPC::BCn &&
1389 }
else if ((SecondLastInst.
getOpcode() == PPC::BDNZ8 ||
1390 SecondLastInst.
getOpcode() == PPC::BDNZ) &&
1403 }
else if ((SecondLastInst.
getOpcode() == PPC::BDZ8 ||
1404 SecondLastInst.
getOpcode() == PPC::BDZ) &&
1427 I->eraseFromParent();
1436 int *BytesRemoved)
const {
1437 assert(!BytesRemoved &&
"code size not handled");
1443 if (
I->getOpcode() != PPC::B &&
I->getOpcode() != PPC::BCC &&
1444 I->getOpcode() != PPC::BC &&
I->getOpcode() != PPC::BCn &&
1445 I->getOpcode() != PPC::BDNZ8 &&
I->getOpcode() != PPC::BDNZ &&
1446 I->getOpcode() != PPC::BDZ8 &&
I->getOpcode() != PPC::BDZ)
1450 I->eraseFromParent();
1456 if (
I->getOpcode() != PPC::BCC &&
1457 I->getOpcode() != PPC::BC &&
I->getOpcode() != PPC::BCn &&
1458 I->getOpcode() != PPC::BDNZ8 &&
I->getOpcode() != PPC::BDNZ &&
1459 I->getOpcode() != PPC::BDZ8 &&
I->getOpcode() != PPC::BDZ)
1463 I->eraseFromParent();
1472 int *BytesAdded)
const {
1474 assert(
TBB &&
"insertBranch must not be told to insert a fallthrough");
1476 "PPC branch conditions have two components!");
1477 assert(!BytesAdded &&
"code size not handled");
1479 bool isPPC64 = Subtarget.
isPPC64();
1487 (isPPC64 ? PPC::BDNZ8 : PPC::BDNZ) :
1488 (isPPC64 ? PPC::BDZ8 : PPC::BDZ))).
addMBB(
TBB);
1504 (isPPC64 ? PPC::BDNZ8 : PPC::BDNZ) :
1505 (isPPC64 ? PPC::BDZ8 : PPC::BDZ))).
addMBB(
TBB);
1523 Register FalseReg,
int &CondCycles,
1524 int &TrueCycles,
int &FalseCycles)
const {
1525 if (!Subtarget.hasISEL())
1528 if (
Cond.size() != 2)
1544 RI.getCommonSubClass(
MRI.getRegClass(TrueReg),
MRI.getRegClass(FalseReg));
1549 if (!PPC::GPRCRegClass.hasSubClassEq(RC) &&
1550 !PPC::GPRC_NOR0RegClass.hasSubClassEq(RC) &&
1551 !PPC::G8RCRegClass.hasSubClassEq(RC) &&
1552 !PPC::G8RC_NOX0RegClass.hasSubClassEq(RC))
1572 "PPC branch conditions have two components!");
1577 RI.getCommonSubClass(
MRI.getRegClass(TrueReg),
MRI.getRegClass(FalseReg));
1578 assert(RC &&
"TrueReg and FalseReg must have overlapping register classes");
1580 bool Is64Bit = PPC::G8RCRegClass.hasSubClassEq(RC) ||
1581 PPC::G8RC_NOX0RegClass.hasSubClassEq(RC);
1583 PPC::GPRCRegClass.hasSubClassEq(RC) ||
1584 PPC::GPRC_NOR0RegClass.hasSubClassEq(RC)) &&
1585 "isel is for regular integer GPRs only");
1587 unsigned OpCode = Is64Bit ? PPC::ISEL8 : PPC::ISEL;
1590 unsigned SubIdx = 0;
1591 bool SwapOps =
false;
1592 switch (SelectPred) {
1596 SubIdx = PPC::sub_eq; SwapOps =
false;
break;
1600 SubIdx = PPC::sub_eq; SwapOps =
true;
break;
1604 SubIdx = PPC::sub_lt; SwapOps =
false;
break;
1608 SubIdx = PPC::sub_lt; SwapOps =
true;
break;
1612 SubIdx = PPC::sub_gt; SwapOps =
false;
break;
1616 SubIdx = PPC::sub_gt; SwapOps =
true;
break;
1620 SubIdx = PPC::sub_un; SwapOps =
false;
break;
1624 SubIdx = PPC::sub_un; SwapOps =
true;
break;
1629 Register FirstReg = SwapOps ? FalseReg : TrueReg,
1630 SecondReg = SwapOps ? TrueReg : FalseReg;
1635 if (
MRI.getRegClass(FirstReg)->contains(PPC::R0) ||
1636 MRI.getRegClass(FirstReg)->contains(PPC::X0)) {
1638 MRI.getRegClass(FirstReg)->contains(PPC::X0) ?
1639 &PPC::G8RC_NOX0RegClass : &PPC::GPRC_NOR0RegClass;
1641 FirstReg =
MRI.createVirtualRegister(FirstRC);
1653 if (CRBit == PPC::CR0LT || CRBit == PPC::CR1LT ||
1654 CRBit == PPC::CR2LT || CRBit == PPC::CR3LT ||
1655 CRBit == PPC::CR4LT || CRBit == PPC::CR5LT ||
1656 CRBit == PPC::CR6LT || CRBit == PPC::CR7LT)
1658 if (CRBit == PPC::CR0GT || CRBit == PPC::CR1GT ||
1659 CRBit == PPC::CR2GT || CRBit == PPC::CR3GT ||
1660 CRBit == PPC::CR4GT || CRBit == PPC::CR5GT ||
1661 CRBit == PPC::CR6GT || CRBit == PPC::CR7GT)
1663 if (CRBit == PPC::CR0EQ || CRBit == PPC::CR1EQ ||
1664 CRBit == PPC::CR2EQ || CRBit == PPC::CR3EQ ||
1665 CRBit == PPC::CR4EQ || CRBit == PPC::CR5EQ ||
1666 CRBit == PPC::CR6EQ || CRBit == PPC::CR7EQ)
1668 if (CRBit == PPC::CR0UN || CRBit == PPC::CR1UN ||
1669 CRBit == PPC::CR2UN || CRBit == PPC::CR3UN ||
1670 CRBit == PPC::CR4UN || CRBit == PPC::CR5UN ||
1671 CRBit == PPC::CR6UN || CRBit == PPC::CR7UN)
1674 assert(Ret != 4 &&
"Invalid CR bit register");
1685 if (PPC::F8RCRegClass.
contains(DestReg) &&
1686 PPC::VSRCRegClass.
contains(SrcReg)) {
1688 TRI->getMatchingSuperReg(DestReg, PPC::sub_64, &PPC::VSRCRegClass);
1694 }
else if (PPC::F8RCRegClass.
contains(SrcReg) &&
1695 PPC::VSRCRegClass.
contains(DestReg)) {
1697 TRI->getMatchingSuperReg(SrcReg, PPC::sub_64, &PPC::VSRCRegClass);
1706 if (PPC::CRBITRCRegClass.
contains(SrcReg) &&
1707 PPC::GPRCRegClass.
contains(DestReg)) {
1719 }
else if (PPC::CRRCRegClass.
contains(SrcReg) &&
1720 (PPC::G8RCRegClass.
contains(DestReg) ||
1721 PPC::GPRCRegClass.
contains(DestReg))) {
1722 bool Is64Bit = PPC::G8RCRegClass.contains(DestReg);
1723 unsigned MvCode = Is64Bit ? PPC::MFOCRF8 : PPC::MFOCRF;
1724 unsigned ShCode = Is64Bit ? PPC::RLWINM8 : PPC::RLWINM;
1725 unsigned CRNum =
TRI->getEncodingValue(SrcReg);
1737 }
else if (PPC::G8RCRegClass.
contains(SrcReg) &&
1738 PPC::VSFRCRegClass.
contains(DestReg)) {
1739 assert(Subtarget.hasDirectMove() &&
1740 "Subtarget doesn't support directmove, don't know how to copy.");
1745 }
else if (PPC::VSFRCRegClass.
contains(SrcReg) &&
1746 PPC::G8RCRegClass.
contains(DestReg)) {
1747 assert(Subtarget.hasDirectMove() &&
1748 "Subtarget doesn't support directmove, don't know how to copy.");
1752 }
else if (PPC::SPERCRegClass.
contains(SrcReg) &&
1753 PPC::GPRCRegClass.
contains(DestReg)) {
1757 }
else if (PPC::GPRCRegClass.
contains(SrcReg) &&
1758 PPC::SPERCRegClass.
contains(DestReg)) {
1765 if (PPC::GPRCRegClass.
contains(DestReg, SrcReg))
1767 else if (PPC::G8RCRegClass.
contains(DestReg, SrcReg))
1769 else if (PPC::F4RCRegClass.
contains(DestReg, SrcReg))
1771 else if (PPC::CRRCRegClass.
contains(DestReg, SrcReg))
1773 else if (PPC::VRRCRegClass.
contains(DestReg, SrcReg))
1775 else if (PPC::VSRCRegClass.
contains(DestReg, SrcReg))
1785 else if (PPC::VSFRCRegClass.
contains(DestReg, SrcReg) ||
1786 PPC::VSSRCRegClass.
contains(DestReg, SrcReg))
1787 Opc = (Subtarget.hasP9Vector()) ? PPC::XSCPSGNDP : PPC::XXLORf;
1788 else if (Subtarget.pairedVectorMemops() &&
1789 PPC::VSRpRCRegClass.contains(DestReg, SrcReg)) {
1790 if (SrcReg > PPC::VSRp15)
1791 SrcReg = PPC::V0 + (SrcReg - PPC::VSRp16) * 2;
1793 SrcReg = PPC::VSL0 + (SrcReg - PPC::VSRp0) * 2;
1794 if (DestReg > PPC::VSRp15)
1795 DestReg = PPC::V0 + (DestReg - PPC::VSRp16) * 2;
1797 DestReg = PPC::VSL0 + (DestReg - PPC::VSRp0) * 2;
1804 else if (PPC::CRBITRCRegClass.
contains(DestReg, SrcReg))
1806 else if (PPC::SPERCRegClass.
contains(DestReg, SrcReg))
1808 else if ((PPC::ACCRCRegClass.
contains(DestReg) ||
1809 PPC::UACCRCRegClass.
contains(DestReg)) &&
1810 (PPC::ACCRCRegClass.
contains(SrcReg) ||
1811 PPC::UACCRCRegClass.
contains(SrcReg))) {
1817 bool DestPrimed = PPC::ACCRCRegClass.contains(DestReg);
1818 bool SrcPrimed = PPC::ACCRCRegClass.contains(SrcReg);
1820 PPC::VSL0 + (SrcReg - (SrcPrimed ? PPC::ACC0 : PPC::UACC0)) * 4;
1822 PPC::VSL0 + (DestReg - (DestPrimed ? PPC::ACC0 : PPC::UACC0)) * 4;
1831 if (SrcPrimed && !KillSrc)
1834 }
else if (PPC::G8pRCRegClass.
contains(DestReg) &&
1835 PPC::G8pRCRegClass.
contains(SrcReg)) {
1837 unsigned DestRegIdx = DestReg - PPC::G8p0;
1838 MCRegister DestRegSub0 = PPC::X0 + 2 * DestRegIdx;
1839 MCRegister DestRegSub1 = PPC::X0 + 2 * DestRegIdx + 1;
1840 unsigned SrcRegIdx = SrcReg - PPC::G8p0;
1841 MCRegister SrcRegSub0 = PPC::X0 + 2 * SrcRegIdx;
1842 MCRegister SrcRegSub1 = PPC::X0 + 2 * SrcRegIdx + 1;
1864 if (PPC::GPRCRegClass.hasSubClassEq(RC) ||
1865 PPC::GPRC_NOR0RegClass.hasSubClassEq(RC)) {
1867 }
else if (PPC::G8RCRegClass.hasSubClassEq(RC) ||
1868 PPC::G8RC_NOX0RegClass.hasSubClassEq(RC)) {
1870 }
else if (PPC::F8RCRegClass.hasSubClassEq(RC)) {
1872 }
else if (PPC::F4RCRegClass.hasSubClassEq(RC)) {
1874 }
else if (PPC::SPERCRegClass.hasSubClassEq(RC)) {
1876 }
else if (PPC::CRRCRegClass.hasSubClassEq(RC)) {
1878 }
else if (PPC::CRBITRCRegClass.hasSubClassEq(RC)) {
1880 }
else if (PPC::VRRCRegClass.hasSubClassEq(RC)) {
1882 }
else if (PPC::VSRCRegClass.hasSubClassEq(RC)) {
1884 }
else if (PPC::VSFRCRegClass.hasSubClassEq(RC)) {
1886 }
else if (PPC::VSSRCRegClass.hasSubClassEq(RC)) {
1888 }
else if (PPC::SPILLTOVSRRCRegClass.hasSubClassEq(RC)) {
1890 }
else if (PPC::ACCRCRegClass.hasSubClassEq(RC)) {
1891 assert(Subtarget.pairedVectorMemops() &&
1892 "Register unexpected when paired memops are disabled.");
1894 }
else if (PPC::UACCRCRegClass.hasSubClassEq(RC)) {
1895 assert(Subtarget.pairedVectorMemops() &&
1896 "Register unexpected when paired memops are disabled.");
1898 }
else if (PPC::WACCRCRegClass.hasSubClassEq(RC)) {
1899 assert(Subtarget.pairedVectorMemops() &&
1900 "Register unexpected when paired memops are disabled.");
1902 }
else if (PPC::VSRpRCRegClass.hasSubClassEq(RC)) {
1903 assert(Subtarget.pairedVectorMemops() &&
1904 "Register unexpected when paired memops are disabled.");
1906 }
else if (PPC::G8pRCRegClass.hasSubClassEq(RC)) {
1917 return OpcodesForSpill[getSpillIndex(RC)];
1923 return OpcodesForSpill[getSpillIndex(RC)];
1926void PPCInstrInfo::StoreRegToStackSlot(
1940 if (PPC::CRRCRegClass.hasSubClassEq(RC) ||
1941 PPC::CRBITRCRegClass.hasSubClassEq(RC))
1955 StoreRegToStackSlot(MF, SrcReg, isKill, FrameIdx, RC, NewMIs);
1965 NewMIs.
back()->addMemOperand(MF, MMO);
1984 unsigned DestReg,
int FrameIdx,
2002 LoadRegFromStackSlot(MF,
DL, DestReg, FrameIdx, RC, NewMIs);
2012 NewMIs.
back()->addMemOperand(MF, MMO);
2035 assert(
Cond.size() == 2 &&
"Invalid PPC branch opcode!");
2037 Cond[0].setImm(
Cond[0].getImm() == 0 ? 1 : 0);
2050 unsigned DefOpc =
DefMI.getOpcode();
2051 if (DefOpc != PPC::LI && DefOpc != PPC::LI8)
2053 if (!
DefMI.getOperand(1).isImm())
2055 if (
DefMI.getOperand(1).getImm() != 0)
2071 for (UseIdx = 0; UseIdx <
UseMI.getNumOperands(); ++UseIdx)
2072 if (
UseMI.getOperand(UseIdx).isReg() &&
2076 assert(UseIdx <
UseMI.getNumOperands() &&
"Cannot find Reg in UseMI");
2087 if (UseInfo->
RegClass != PPC::GPRC_NOR0RegClassID &&
2088 UseInfo->
RegClass != PPC::G8RC_NOX0RegClassID)
2100 bool isPPC64 = Subtarget.
isPPC64();
2101 ZeroReg = isPPC64 ? PPC::ZERO8 : PPC::ZERO;
2103 ZeroReg = UseInfo->
RegClass == PPC::G8RC_NOX0RegClassID ?
2104 PPC::ZERO8 : PPC::ZERO;
2109 UseMI.getOperand(UseIdx).setReg(ZeroReg);
2121 if (
MRI->use_nodbg_empty(Reg))
2122 DefMI.eraseFromParent();
2128 if (
MI.definesRegister(PPC::CTR,
nullptr) ||
2129 MI.definesRegister(PPC::CTR8,
nullptr))
2141 unsigned NumT,
unsigned ExtraT,
2143 unsigned NumF,
unsigned ExtraF,
2163 switch (
MI.getOpcode()) {
2179 unsigned OpC =
MI.getOpcode();
2180 if (OpC == PPC::BLR || OpC == PPC::BLR8) {
2181 if (Pred[1].
getReg() == PPC::CTR8 || Pred[1].
getReg() == PPC::CTR) {
2182 bool isPPC64 = Subtarget.
isPPC64();
2183 MI.setDesc(
get(Pred[0].getImm() ? (isPPC64 ? PPC::BDNZLR8 : PPC::BDNZLR)
2184 : (isPPC64 ? PPC::BDZLR8 : PPC::BDZLR)));
2190 MI.setDesc(
get(PPC::BCLR));
2193 MI.setDesc(
get(PPC::BCLRn));
2196 MI.setDesc(
get(PPC::BCCLR));
2198 .
addImm(Pred[0].getImm())
2203 }
else if (OpC == PPC::B) {
2204 if (Pred[1].
getReg() == PPC::CTR8 || Pred[1].
getReg() == PPC::CTR) {
2205 bool isPPC64 = Subtarget.
isPPC64();
2206 MI.setDesc(
get(Pred[0].getImm() ? (isPPC64 ? PPC::BDNZ8 : PPC::BDNZ)
2207 : (isPPC64 ? PPC::BDZ8 : PPC::BDZ)));
2214 MI.removeOperand(0);
2216 MI.setDesc(
get(PPC::BC));
2222 MI.removeOperand(0);
2224 MI.setDesc(
get(PPC::BCn));
2230 MI.removeOperand(0);
2232 MI.setDesc(
get(PPC::BCC));
2234 .
addImm(Pred[0].getImm())
2240 }
else if (OpC == PPC::BCTR || OpC == PPC::BCTR8 || OpC == PPC::BCTRL ||
2241 OpC == PPC::BCTRL8 || OpC == PPC::BCTRL_RM ||
2242 OpC == PPC::BCTRL8_RM) {
2243 if (Pred[1].
getReg() == PPC::CTR8 || Pred[1].
getReg() == PPC::CTR)
2246 bool setLR = OpC == PPC::BCTRL || OpC == PPC::BCTRL8 ||
2247 OpC == PPC::BCTRL_RM || OpC == PPC::BCTRL8_RM;
2248 bool isPPC64 = Subtarget.
isPPC64();
2251 MI.setDesc(
get(isPPC64 ? (setLR ? PPC::BCCTRL8 : PPC::BCCTR8)
2252 : (setLR ? PPC::BCCTRL : PPC::BCCTR)));
2255 MI.setDesc(
get(isPPC64 ? (setLR ? PPC::BCCTRL8n : PPC::BCCTR8n)
2256 : (setLR ? PPC::BCCTRLn : PPC::BCCTRn)));
2259 MI.setDesc(
get(isPPC64 ? (setLR ? PPC::BCCCTRL8 : PPC::BCCCTR8)
2260 : (setLR ? PPC::BCCCTRL : PPC::BCCCTR)));
2262 .
addImm(Pred[0].getImm())
2271 if (OpC == PPC::BCTRL_RM || OpC == PPC::BCTRL8_RM)
2283 assert(Pred1.
size() == 2 &&
"Invalid PPC first predicate");
2284 assert(Pred2.
size() == 2 &&
"Invalid PPC second predicate");
2286 if (Pred1[1].
getReg() == PPC::CTR8 || Pred1[1].
getReg() == PPC::CTR)
2288 if (Pred2[1].
getReg() == PPC::CTR8 || Pred2[1].
getReg() == PPC::CTR)
2313 std::vector<MachineOperand> &Pred,
2314 bool SkipDead)
const {
2322 { &PPC::CRRCRegClass, &PPC::CRBITRCRegClass,
2323 &PPC::CTRRCRegClass, &PPC::CTRRC8RegClass };
2327 for (
unsigned c = 0; c < std::size(RCs) && !Found; ++c) {
2330 if (MO.isDef() && RC->
contains(MO.getReg())) {
2334 }
else if (MO.isRegMask()) {
2336 if (MO.clobbersPhysReg(R)) {
2349 int64_t &
Value)
const {
2350 unsigned Opc =
MI.getOpcode();
2353 default:
return false;
2358 SrcReg =
MI.getOperand(1).getReg();
2360 Value =
MI.getOperand(2).getImm();
2369 SrcReg =
MI.getOperand(1).getReg();
2370 SrcReg2 =
MI.getOperand(2).getReg();
2389 if (OpC == PPC::FCMPUS || OpC == PPC::FCMPUD)
2401 bool isPPC64 = Subtarget.
isPPC64();
2402 bool is32BitSignedCompare = OpC == PPC::CMPWI || OpC == PPC::CMPW;
2403 bool is32BitUnsignedCompare = OpC == PPC::CMPLWI || OpC == PPC::CMPLW;
2404 bool is64BitUnsignedCompare = OpC == PPC::CMPLDI || OpC == PPC::CMPLD;
2413 if (!
MI)
return false;
2415 bool equalityOnly =
false;
2418 if (is32BitSignedCompare) {
2424 }
else if (is32BitUnsignedCompare) {
2429 equalityOnly =
true;
2433 equalityOnly = is64BitUnsignedCompare;
2435 equalityOnly = is32BitUnsignedCompare;
2441 I =
MRI->use_instr_begin(CRReg), IE =
MRI->use_instr_end();
2453 if (SubIdx != PPC::sub_eq)
2465 bool FoundUse =
false;
2467 J =
MRI->use_instr_begin(CRReg), JE =
MRI->use_instr_end();
2494 else if (
Value != 0) {
2503 if (equalityOnly || !
MRI->hasOneUse(CRReg))
2513 int16_t Immed = (int16_t)
Value;
2547 for (;
I != E && !noSub; --
I) {
2549 unsigned IOpC = Instr.getOpcode();
2551 if (&*
I != &CmpInstr && (Instr.modifiesRegister(PPC::CR0,
TRI) ||
2552 Instr.readsRegister(PPC::CR0,
TRI)))
2561 if ((OpC == PPC::CMPW || OpC == PPC::CMPLW ||
2562 OpC == PPC::CMPD || OpC == PPC::CMPLD) &&
2563 (IOpC == PPC::SUBF || IOpC == PPC::SUBF8) &&
2564 ((Instr.getOperand(1).getReg() == SrcReg &&
2565 Instr.getOperand(2).getReg() == SrcReg2) ||
2566 (Instr.getOperand(1).getReg() == SrcReg2 &&
2567 Instr.getOperand(2).getReg() == SrcReg))) {
2585 int MIOpC =
MI->getOpcode();
2586 if (MIOpC == PPC::ANDI_rec || MIOpC == PPC::ANDI8_rec ||
2587 MIOpC == PPC::ANDIS_rec || MIOpC == PPC::ANDIS8_rec)
2590 NewOpC = PPC::getRecordFormOpcode(MIOpC);
2608 if (!equalityOnly && (NewOpC == PPC::SUBF_rec || NewOpC == PPC::SUBF8_rec) &&
2618 bool ShouldSwap =
false;
2619 if (Sub &&
Value == 0) {
2625 ShouldSwap = !ShouldSwap;
2630 I =
MRI->use_instr_begin(CRReg), IE =
MRI->use_instr_end();
2638 "Invalid predicate for equality-only optimization");
2645 assert((!equalityOnly || NewSubReg == PPC::sub_eq) &&
2646 "Invalid CR bit for equality-only optimization");
2648 if (NewSubReg == PPC::sub_lt)
2649 NewSubReg = PPC::sub_gt;
2650 else if (NewSubReg == PPC::sub_gt)
2651 NewSubReg = PPC::sub_lt;
2659 "Non-zero immediate support and ShouldSwap"
2660 "may conflict in updating predicate");
2668 BuildMI(*
MI->getParent(), std::next(MII),
MI->getDebugLoc(),
2669 get(TargetOpcode::COPY), CRReg)
2674 MI->clearRegisterDeads(PPC::CR0);
2676 if (MIOpC != NewOpC) {
2686 if (MIOpC == PPC::RLWINM || MIOpC == PPC::RLWINM8) {
2687 Register GPRRes =
MI->getOperand(0).getReg();
2688 int64_t SH =
MI->getOperand(2).getImm();
2689 int64_t MB =
MI->getOperand(3).getImm();
2690 int64_t ME =
MI->getOperand(4).getImm();
2693 bool MBInLoHWord = MB >= 16;
2694 bool MEInLoHWord = ME >= 16;
2697 if (MB <= ME && MBInLoHWord == MEInLoHWord && SH == 0) {
2698 Mask = ((1LLU << (32 - MB)) - 1) & ~((1LLU << (31 - ME)) - 1);
2700 Mask >>= MBInLoHWord ? 0 : 16;
2701 NewOpC = MIOpC == PPC::RLWINM
2702 ? (MBInLoHWord ? PPC::ANDI_rec : PPC::ANDIS_rec)
2703 : (MBInLoHWord ? PPC::ANDI8_rec : PPC::ANDIS8_rec);
2704 }
else if (
MRI->use_empty(GPRRes) && (ME == 31) &&
2705 (ME - MB + 1 == SH) && (MB >= 16)) {
2709 Mask = ((1LLU << 32) - 1) & ~((1LLU << (32 - SH)) - 1);
2711 NewOpC = MIOpC == PPC::RLWINM ? PPC::ANDIS_rec : PPC::ANDIS8_rec;
2714 if (Mask != ~0LLU) {
2715 MI->removeOperand(4);
2716 MI->removeOperand(3);
2717 MI->getOperand(2).setImm(Mask);
2718 NumRcRotatesConvertedToRcAnd++;
2720 }
else if (MIOpC == PPC::RLDICL &&
MI->getOperand(2).getImm() == 0) {
2721 int64_t MB =
MI->getOperand(3).getImm();
2723 uint64_t Mask = (1LLU << (63 - MB + 1)) - 1;
2724 NewOpC = PPC::ANDI8_rec;
2725 MI->removeOperand(3);
2726 MI->getOperand(2).setImm(Mask);
2727 NumRcRotatesConvertedToRcAnd++;
2732 MI->setDesc(NewDesc);
2735 if (!
MI->definesRegister(ImpDef,
nullptr)) {
2736 MI->addOperand(*
MI->getParent()->getParent(),
2741 if (!
MI->readsRegister(ImpUse,
nullptr)) {
2742 MI->addOperand(*
MI->getParent()->getParent(),
2747 assert(
MI->definesRegister(PPC::CR0,
nullptr) &&
2748 "Record-form instruction does not define cr0?");
2753 for (
unsigned i = 0, e = PredsToUpdate.
size(); i < e; i++)
2754 PredsToUpdate[i].first->setImm(PredsToUpdate[i].second);
2756 for (
unsigned i = 0, e = SubRegsToUpdate.
size(); i < e; i++)
2757 SubRegsToUpdate[i].first->setSubReg(SubRegsToUpdate[i].second);
2768 int64_t CmpMask, CmpValue;
2773 if (CmpValue || !CmpMask || SrcReg2)
2781 if (Opc == PPC::CMPLWI || Opc == PPC::CMPLDI)
2788 if (Subtarget.
isPPC64() && Opc == PPC::CMPWI)
2795 bool SrcRegHasOtherUse =
false;
2802 if (CRReg != PPC::CR0)
2806 bool SeenUseOfCRReg =
false;
2807 bool IsCRRegKilled =
false;
2808 if (!isRegElgibleForForwarding(RegMO, *SrcMI, CmpMI,
false, IsCRRegKilled,
2814 int NewOpC = PPC::getRecordFormOpcode(SrcMIOpc);
2828 "Record-form instruction does not define cr0?");
2842 OffsetIsScalable =
false;
2877 case PPC::DFSTOREf64:
2878 return FirstOpc == SecondOpc;
2884 return SecondOpc == PPC::STW || SecondOpc == PPC::STW8;
2891 int64_t OpOffset2,
bool OffsetIsScalable2,
unsigned ClusterSize,
2892 unsigned NumBytes)
const {
2898 "Only base registers and frame indices are supported.");
2903 if (ClusterSize > 2)
2917 unsigned FirstOpc = FirstLdSt.
getOpcode();
2918 unsigned SecondOpc = SecondLdSt.
getOpcode();
2930 int64_t Offset1 = 0, Offset2 = 0;
2938 assert(Base1 == &BaseOp1 && Base2 == &BaseOp2 &&
2939 "getMemOperandWithOffsetWidth return incorrect base op");
2941 assert(Offset1 <= Offset2 &&
"Caller should have ordered offsets.");
2942 return Offset1 + (int64_t)Width1.
getValue() == Offset2;
2949 unsigned Opcode =
MI.getOpcode();
2951 if (Opcode == PPC::INLINEASM || Opcode == PPC::INLINEASM_BR) {
2953 const char *AsmStr =
MI.getOperand(0).getSymbolName();
2955 }
else if (Opcode == TargetOpcode::STACKMAP) {
2958 }
else if (Opcode == TargetOpcode::PATCHPOINT) {
2962 return get(Opcode).getSize();
2966std::pair<unsigned, unsigned>
2969 return std::make_pair(TF, 0u);
2974 using namespace PPCII;
2975 static const std::pair<unsigned, const char *> TargetFlags[] = {
2976 {MO_PLT,
"ppc-plt"},
2977 {MO_PIC_FLAG,
"ppc-pic"},
2978 {MO_PCREL_FLAG,
"ppc-pcrel"},
2979 {MO_GOT_FLAG,
"ppc-got"},
2980 {MO_PCREL_OPT_FLAG,
"ppc-opt-pcrel"},
2981 {MO_TLSGD_FLAG,
"ppc-tlsgd"},
2982 {MO_TPREL_FLAG,
"ppc-tprel"},
2983 {MO_TLSLDM_FLAG,
"ppc-tlsldm"},
2984 {MO_TLSLD_FLAG,
"ppc-tlsld"},
2985 {MO_TLSGDM_FLAG,
"ppc-tlsgdm"},
2986 {MO_GOT_TLSGD_PCREL_FLAG,
"ppc-got-tlsgd-pcrel"},
2987 {MO_GOT_TLSLD_PCREL_FLAG,
"ppc-got-tlsld-pcrel"},
2988 {MO_GOT_TPREL_PCREL_FLAG,
"ppc-got-tprel-pcrel"},
2991 {MO_TPREL_LO,
"ppc-tprel-lo"},
2992 {MO_TPREL_HA,
"ppc-tprel-ha"},
2993 {MO_DTPREL_LO,
"ppc-dtprel-lo"},
2994 {MO_TLSLD_LO,
"ppc-tlsld-lo"},
2995 {MO_TOC_LO,
"ppc-toc-lo"},
2996 {MO_TLS,
"ppc-tls"},
2997 {MO_PIC_HA_FLAG,
"ppc-ha-pic"},
2998 {MO_PIC_LO_FLAG,
"ppc-lo-pic"},
2999 {MO_TPREL_PCREL_FLAG,
"ppc-tprel-pcrel"},
3000 {MO_TLS_PCREL_FLAG,
"ppc-tls-pcrel"},
3001 {MO_GOT_PCREL_FLAG,
"ppc-got-pcrel"},
3013 unsigned UpperOpcode, LowerOpcode;
3014 switch (
MI.getOpcode()) {
3015 case PPC::DFLOADf32:
3016 UpperOpcode = PPC::LXSSP;
3017 LowerOpcode = PPC::LFS;
3019 case PPC::DFLOADf64:
3020 UpperOpcode = PPC::LXSD;
3021 LowerOpcode = PPC::LFD;
3023 case PPC::DFSTOREf32:
3024 UpperOpcode = PPC::STXSSP;
3025 LowerOpcode = PPC::STFS;
3027 case PPC::DFSTOREf64:
3028 UpperOpcode = PPC::STXSD;
3029 LowerOpcode = PPC::STFD;
3031 case PPC::XFLOADf32:
3032 UpperOpcode = PPC::LXSSPX;
3033 LowerOpcode = PPC::LFSX;
3035 case PPC::XFLOADf64:
3036 UpperOpcode = PPC::LXSDX;
3037 LowerOpcode = PPC::LFDX;
3039 case PPC::XFSTOREf32:
3040 UpperOpcode = PPC::STXSSPX;
3041 LowerOpcode = PPC::STFSX;
3043 case PPC::XFSTOREf64:
3044 UpperOpcode = PPC::STXSDX;
3045 LowerOpcode = PPC::STFDX;
3048 UpperOpcode = PPC::LXSIWAX;
3049 LowerOpcode = PPC::LFIWAX;
3052 UpperOpcode = PPC::LXSIWZX;
3053 LowerOpcode = PPC::LFIWZX;
3056 UpperOpcode = PPC::STXSIWX;
3057 LowerOpcode = PPC::STFIWX;
3063 Register TargetReg =
MI.getOperand(0).getReg();
3065 if ((TargetReg >= PPC::F0 && TargetReg <= PPC::F31) ||
3066 (TargetReg >= PPC::VSL0 && TargetReg <= PPC::VSL31))
3067 Opcode = LowerOpcode;
3069 Opcode = UpperOpcode;
3070 MI.setDesc(
get(Opcode));
3079 auto &
MBB = *
MI.getParent();
3080 auto DL =
MI.getDebugLoc();
3082 switch (
MI.getOpcode()) {
3083 case PPC::BUILD_UACC: {
3086 if (ACC - PPC::ACC0 != UACC - PPC::UACC0) {
3087 MCRegister SrcVSR = PPC::VSL0 + (UACC - PPC::UACC0) * 4;
3088 MCRegister DstVSR = PPC::VSL0 + (ACC - PPC::ACC0) * 4;
3092 for (
int VecNo = 0; VecNo < 4; VecNo++)
3094 .addReg(SrcVSR + VecNo)
3102 case PPC::KILL_PAIR: {
3103 MI.setDesc(
get(PPC::UNENCODED_NOP));
3104 MI.removeOperand(1);
3105 MI.removeOperand(0);
3108 case TargetOpcode::LOAD_STACK_GUARD: {
3110 "Only Linux target is expected to contain LOAD_STACK_GUARD");
3111 const int64_t
Offset = Subtarget.
isPPC64() ? -0x7010 : -0x7008;
3112 const unsigned Reg = Subtarget.
isPPC64() ? PPC::X13 : PPC::R2;
3113 MI.setDesc(
get(Subtarget.
isPPC64() ? PPC::LD : PPC::LWZ));
3119 case PPC::PPCLdFixedAddr: {
3121 "Only targets with Glibc expected to contain PPCLdFixedAddr");
3123 const unsigned Reg = Subtarget.
isPPC64() ? PPC::X13 : PPC::R2;
3124 MI.setDesc(
get(PPC::LWZ));
3126#undef PPC_LNX_FEATURE
3128#define PPC_LNX_DEFINE_OFFSETS
3129#include "llvm/TargetParser/PPCTargetParser.def"
3131 bool Is64 = Subtarget.
isPPC64();
3132 if (FAType == PPC_FAWORD_HWCAP) {
3134 Offset = Is64 ? PPC_HWCAP_OFFSET_LE64 : PPC_HWCAP_OFFSET_LE32;
3136 Offset = Is64 ? PPC_HWCAP_OFFSET_BE64 : PPC_HWCAP_OFFSET_BE32;
3137 }
else if (FAType == PPC_FAWORD_HWCAP2) {
3139 Offset = Is64 ? PPC_HWCAP2_OFFSET_LE64 : PPC_HWCAP2_OFFSET_LE32;
3141 Offset = Is64 ? PPC_HWCAP2_OFFSET_BE64 : PPC_HWCAP2_OFFSET_BE32;
3142 }
else if (FAType == PPC_FAWORD_CPUID) {
3144 Offset = Is64 ? PPC_CPUID_OFFSET_LE64 : PPC_CPUID_OFFSET_LE32;
3146 Offset = Is64 ? PPC_CPUID_OFFSET_BE64 : PPC_CPUID_OFFSET_BE32;
3148 assert(
Offset &&
"Do not know the offset for this fixed addr load");
3149 MI.removeOperand(1);
3155#define PPC_TGT_PARSER_UNDEF_MACROS
3156#include "llvm/TargetParser/PPCTargetParser.def"
3157#undef PPC_TGT_PARSER_UNDEF_MACROS
3159 case PPC::DFLOADf32:
3160 case PPC::DFLOADf64:
3161 case PPC::DFSTOREf32:
3162 case PPC::DFSTOREf64: {
3163 assert(Subtarget.hasP9Vector() &&
3164 "Invalid D-Form Pseudo-ops on Pre-P9 target.");
3167 "D-form op must have register and immediate operands");
3170 case PPC::XFLOADf32:
3171 case PPC::XFSTOREf32:
3175 assert(Subtarget.hasP8Vector() &&
3176 "Invalid X-Form Pseudo-ops on Pre-P8 target.");
3177 assert(
MI.getOperand(2).isReg() &&
MI.getOperand(1).isReg() &&
3178 "X-form op must have register and register operands");
3181 case PPC::XFLOADf64:
3182 case PPC::XFSTOREf64: {
3183 assert(Subtarget.hasVSX() &&
3184 "Invalid X-Form Pseudo-ops on target that has no VSX.");
3185 assert(
MI.getOperand(2).isReg() &&
MI.getOperand(1).isReg() &&
3186 "X-form op must have register and register operands");
3189 case PPC::SPILLTOVSR_LD: {
3190 Register TargetReg =
MI.getOperand(0).getReg();
3191 if (PPC::VSFRCRegClass.
contains(TargetReg)) {
3192 MI.setDesc(
get(PPC::DFLOADf64));
3196 MI.setDesc(
get(PPC::LD));
3199 case PPC::SPILLTOVSR_ST: {
3201 if (PPC::VSFRCRegClass.
contains(SrcReg)) {
3202 NumStoreSPILLVSRRCAsVec++;
3203 MI.setDesc(
get(PPC::DFSTOREf64));
3206 NumStoreSPILLVSRRCAsGpr++;
3207 MI.setDesc(
get(PPC::STD));
3211 case PPC::SPILLTOVSR_LDX: {
3212 Register TargetReg =
MI.getOperand(0).getReg();
3213 if (PPC::VSFRCRegClass.
contains(TargetReg))
3214 MI.setDesc(
get(PPC::LXSDX));
3216 MI.setDesc(
get(PPC::LDX));
3219 case PPC::SPILLTOVSR_STX: {
3221 if (PPC::VSFRCRegClass.
contains(SrcReg)) {
3222 NumStoreSPILLVSRRCAsVec++;
3223 MI.setDesc(
get(PPC::STXSDX));
3225 NumStoreSPILLVSRRCAsGpr++;
3226 MI.setDesc(
get(PPC::STDX));
3233 case PPC::CFENCE8: {
3234 auto Val =
MI.getOperand(0).getReg();
3235 unsigned CmpOp = Subtarget.
isPPC64() ? PPC::CMPD : PPC::CMPW;
3241 MI.setDesc(
get(PPC::ISYNC));
3242 MI.removeOperand(0);
3253static unsigned selectReg(int64_t Imm1, int64_t Imm2,
unsigned CompareOpc,
3254 unsigned TrueReg,
unsigned FalseReg,
3255 unsigned CRSubReg) {
3257 if (CompareOpc == PPC::CMPWI || CompareOpc == PPC::CMPDI) {
3261 return Imm1 < Imm2 ? TrueReg : FalseReg;
3263 return Imm1 > Imm2 ? TrueReg : FalseReg;
3265 return Imm1 == Imm2 ? TrueReg : FalseReg;
3269 else if (CompareOpc == PPC::CMPLWI || CompareOpc == PPC::CMPLDI) {
3277 return Imm1 == Imm2 ? TrueReg : FalseReg;
3280 return PPC::NoRegister;
3285 int64_t Imm)
const {
3286 assert(
MI.getOperand(OpNo).isReg() &&
"Operand must be a REG");
3288 Register InUseReg =
MI.getOperand(OpNo).getReg();
3289 MI.getOperand(OpNo).ChangeToImmediate(Imm);
3297 int UseOpIdx =
MI.findRegisterUseOperandIdx(InUseReg,
TRI,
false);
3298 if (UseOpIdx >= 0) {
3308 MI.removeOperand(UseOpIdx);
3317 int OperandToKeep = LII.
SetCR ? 1 : 0;
3318 for (
int i =
MI.getNumOperands() - 1; i > OperandToKeep; i--)
3319 MI.removeOperand(i);
3323 MI.setDesc(
get(LII.
Is64Bit ? PPC::ANDI8_rec : PPC::ANDI_rec));
3338 bool &SeenIntermediateUse)
const {
3339 assert(!
MI.getParent()->getParent()->getRegInfo().isSSA() &&
3340 "Should be called after register allocation.");
3344 SeenIntermediateUse =
false;
3345 for (; It != E; ++It) {
3346 if (It->modifiesRegister(Reg,
TRI))
3348 if (It->readsRegister(Reg,
TRI))
3349 SeenIntermediateUse =
true;
3357 int64_t Imm)
const {
3359 "Register should be in non-SSA form after RA");
3360 bool isPPC64 = Subtarget.
isPPC64();
3364 if (isInt<16>(Imm)) {
3366 }
else if (isInt<32>(Imm)) {
3374 assert(isPPC64 &&
"Materializing 64-bit immediate to single register is "
3375 "only supported in PPC64");
3377 if ((Imm >> 32) & 0xFFFF)
3380 .
addImm((Imm >> 32) & 0xFFFF);
3387 .
addImm((Imm >> 16) & 0xFFFF);
3397 unsigned &OpNoForForwarding,
3398 bool &SeenIntermediateUse)
const {
3399 OpNoForForwarding = ~0U;
3407 for (
int i = 1, e =
MI.getNumOperands(); i < e; i++) {
3408 if (!
MI.getOperand(i).isReg())
3411 if (!Reg.isVirtual())
3416 if (DefMIForTrueReg->
getOpcode() == PPC::LI ||
3417 DefMIForTrueReg->
getOpcode() == PPC::LI8 ||
3418 DefMIForTrueReg->
getOpcode() == PPC::ADDI ||
3419 DefMIForTrueReg->
getOpcode() == PPC::ADDI8) {
3420 OpNoForForwarding = i;
3421 DefMI = DefMIForTrueReg;
3436 unsigned Opc =
MI.getOpcode();
3437 bool ConvertibleImmForm =
3438 Opc == PPC::CMPWI || Opc == PPC::CMPLWI || Opc == PPC::CMPDI ||
3439 Opc == PPC::CMPLDI || Opc == PPC::ADDI || Opc == PPC::ADDI8 ||
3440 Opc == PPC::ORI || Opc == PPC::ORI8 || Opc == PPC::XORI ||
3441 Opc == PPC::XORI8 || Opc == PPC::RLDICL || Opc == PPC::RLDICL_rec ||
3442 Opc == PPC::RLDICL_32 || Opc == PPC::RLDICL_32_64 ||
3443 Opc == PPC::RLWINM || Opc == PPC::RLWINM_rec || Opc == PPC::RLWINM8 ||
3444 Opc == PPC::RLWINM8_rec;
3445 bool IsVFReg = (
MI.getNumOperands() &&
MI.getOperand(0).isReg())
3452 if ((Opc == PPC::OR || Opc == PPC::OR8) &&
3453 MI.getOperand(1).getReg() ==
MI.getOperand(2).getReg())
3455 for (
int i = 1, e =
MI.getNumOperands(); i <
e; i++) {
3457 SeenIntermediateUse =
false;
3471 case PPC::ADDItocL8:
3474 OpNoForForwarding = i;
3481 return OpNoForForwarding == ~0
U ? nullptr :
DefMI;
3484unsigned PPCInstrInfo::getSpillTarget()
const {
3487 bool IsP10Variant = Subtarget.isISA3_1() || Subtarget.pairedVectorMemops();
3489 return Subtarget.isISAFuture() ? 3 : IsP10Variant ?
3490 2 : Subtarget.hasP9Vector() ?
3529 bool PostRA = !
MRI->isSSA();
3535 unsigned ToBeDeletedReg = 0;
3536 int64_t OffsetImm = 0;
3537 unsigned XFormOpcode = 0;
3545 bool OtherIntermediateUse =
false;
3549 if (OtherIntermediateUse || !ADDMI)
3556 unsigned ScaleRegIdx = 0;
3557 int64_t OffsetAddi = 0;
3571 assert(ADDIMI &&
"There should be ADDIMI for valid ToBeChangedReg.");
3576 for (
auto It = ++Start; It !=
End; It++)
3585 (ScaleReg == PPC::R0 || ScaleReg == PPC::X0))
3590 if (NewDefFor(ToBeChangedReg, *ADDMI,
MI) || NewDefFor(ScaleReg, *ADDMI,
MI))
3606 MI.setDesc(
get(XFormOpcode));
3608 .ChangeToRegister(ScaleReg,
false,
false,
3612 .ChangeToRegister(ToBeChangedReg,
false,
false,
true);
3624 int64_t &Imm)
const {
3628 if (Opc != PPC::ADDI && Opc != PPC::ADDI8)
3644 return Opc == PPC::ADD4 || Opc == PPC::ADD8;
3648 unsigned &ToBeDeletedReg,
3649 unsigned &XFormOpcode,
3653 if (!
MI.mayLoadOrStore())
3656 unsigned Opc =
MI.getOpcode();
3661 if (XFormOpcode == PPC::INSTRUCTION_LIST_END)
3675 if (!ImmOperand.
isImm())
3678 assert(RegOperand.
isReg() &&
"Instruction format is not right");
3681 if (!RegOperand.
isKill())
3684 ToBeDeletedReg = RegOperand.
getReg();
3685 OffsetImm = ImmOperand.
getImm();
3692 int64_t &OffsetAddi,
3693 int64_t OffsetImm)
const {
3700 bool OtherIntermediateUse =
false;
3721 if (OtherIntermediateUse || !ADDIMI)
3727 if (isInt<16>(OffsetAddi + OffsetImm))
3740 bool PostRA = !
MRI->isSSA();
3741 bool SeenIntermediateUse =
true;
3742 unsigned ForwardingOperand = ~0U;
3744 SeenIntermediateUse);
3747 assert(ForwardingOperand <
MI.getNumOperands() &&
3748 "The forwarding operand needs to be valid at this point");
3749 bool IsForwardingOperandKilled =
MI.getOperand(ForwardingOperand).isKill();
3750 bool KillFwdDefMI = !SeenIntermediateUse && IsForwardingOperandKilled;
3751 if (KilledDef && KillFwdDefMI)
3766 PPC::INSTRUCTION_LIST_END &&
3767 transformToNewImmFormFedByAdd(
MI, *
DefMI, ForwardingOperand))
3771 bool IsVFReg =
MI.getOperand(0).isReg()
3779 transformToImmFormFedByAdd(
MI, III, ForwardingOperand, *
DefMI,
3786 transformToImmFormFedByLI(
MI, III, ForwardingOperand, *
DefMI))
3791 if (!HasImmForm && simplifyToLI(
MI, *
DefMI, ForwardingOperand, KilledDef))
3800 Register FoldingReg =
MI.getOperand(1).getReg();
3804 if (SrcMI->
getOpcode() != PPC::RLWINM &&
3805 SrcMI->
getOpcode() != PPC::RLWINM_rec &&
3809 assert((
MI.getOperand(2).isImm() &&
MI.getOperand(3).isImm() &&
3812 "Invalid PPC::RLWINM Instruction!");
3820 assert((MEMI < 32 && MESrc < 32 && MBMI < 32 && MBSrc < 32) &&
3821 "Invalid PPC::RLWINM Instruction!");
3843 bool SrcMaskFull = (MBSrc - MESrc == 1) || (MBSrc == 0 && MESrc == 31);
3846 if ((MBMI > MEMI) && !SrcMaskFull)
3856 APInt RotatedSrcMask = MaskSrc.
rotl(SHMI);
3857 APInt FinalMask = RotatedSrcMask & MaskMI;
3859 bool Simplified =
false;
3862 if (FinalMask.
isZero()) {
3864 (
MI.getOpcode() == PPC::RLWINM8 ||
MI.getOpcode() == PPC::RLWINM8_rec);
3869 if (
MI.getOpcode() == PPC::RLWINM ||
MI.getOpcode() == PPC::RLWINM8) {
3871 MI.removeOperand(4);
3872 MI.removeOperand(3);
3873 MI.removeOperand(2);
3874 MI.getOperand(1).ChangeToImmediate(0);
3875 MI.setDesc(
get(Is64Bit ? PPC::LI8 : PPC::LI));
3878 MI.removeOperand(4);
3879 MI.removeOperand(3);
3880 MI.getOperand(2).setImm(0);
3881 MI.setDesc(
get(Is64Bit ? PPC::ANDI8_rec : PPC::ANDI_rec));
3884 MI.getOperand(1).setIsKill(
true);
3888 MI.getOperand(1).setIsKill(
false);
3904 uint16_t NewSH = (SHSrc + SHMI) % 32;
3905 MI.getOperand(2).setImm(NewSH);
3908 MI.getOperand(3).setImm(NewMB);
3909 MI.getOperand(4).setImm(NewME);
3913 MI.getOperand(1).setIsKill(
true);
3917 MI.getOperand(1).setIsKill(
false);
3922 if (Simplified &
MRI->use_nodbg_empty(FoldingReg) &&
3947 default:
return false;
3955 III.
ImmOpcode = Opc == PPC::ADD4 ? PPC::ADDI : PPC::ADDI8;
3964 III.
ImmOpcode = Opc == PPC::ADDC ? PPC::ADDIC : PPC::ADDIC8;
3980 III.
ImmOpcode = Opc == PPC::SUBFC ? PPC::SUBFIC : PPC::SUBFIC8;
3988 III.
ImmOpcode = Opc == PPC::CMPW ? PPC::CMPWI : PPC::CMPDI;
3996 III.
ImmOpcode = Opc == PPC::CMPLW ? PPC::CMPLWI : PPC::CMPLDI;
4016 case PPC::OR: III.
ImmOpcode = PPC::ORI;
break;
4017 case PPC::OR8: III.
ImmOpcode = PPC::ORI8;
break;
4018 case PPC::XOR: III.
ImmOpcode = PPC::XORI;
break;
4019 case PPC::XOR8: III.
ImmOpcode = PPC::XORI8;
break;
4024 case PPC::RLWNM_rec:
4025 case PPC::RLWNM8_rec:
4045 if (Opc == PPC::RLWNM || Opc == PPC::RLWNM8 || Opc == PPC::RLWNM_rec ||
4046 Opc == PPC::RLWNM8_rec)
4052 case PPC::RLWNM: III.
ImmOpcode = PPC::RLWINM;
break;
4053 case PPC::RLWNM8: III.
ImmOpcode = PPC::RLWINM8;
break;
4054 case PPC::RLWNM_rec:
4057 case PPC::RLWNM8_rec:
4060 case PPC::SLW: III.
ImmOpcode = PPC::RLWINM;
break;
4061 case PPC::SLW8: III.
ImmOpcode = PPC::RLWINM8;
break;
4068 case PPC::SRW: III.
ImmOpcode = PPC::RLWINM;
break;
4069 case PPC::SRW8: III.
ImmOpcode = PPC::RLWINM8;
break;
4089 case PPC::RLDCL_rec:
4091 case PPC::RLDCR_rec:
4107 if (Opc == PPC::RLDCL || Opc == PPC::RLDCL_rec || Opc == PPC::RLDCR ||
4108 Opc == PPC::RLDCR_rec)
4114 case PPC::RLDCL: III.
ImmOpcode = PPC::RLDICL;
break;
4115 case PPC::RLDCL_rec:
4118 case PPC::RLDCR: III.
ImmOpcode = PPC::RLDICR;
break;
4119 case PPC::RLDCR_rec:
4122 case PPC::SLD: III.
ImmOpcode = PPC::RLDICR;
break;
4126 case PPC::SRD: III.
ImmOpcode = PPC::RLDICL;
break;
4173 case PPC::LBZX: III.
ImmOpcode = PPC::LBZ;
break;
4174 case PPC::LBZX8: III.
ImmOpcode = PPC::LBZ8;
break;
4175 case PPC::LHZX: III.
ImmOpcode = PPC::LHZ;
break;
4176 case PPC::LHZX8: III.
ImmOpcode = PPC::LHZ8;
break;
4177 case PPC::LHAX: III.
ImmOpcode = PPC::LHA;
break;
4178 case PPC::LHAX8: III.
ImmOpcode = PPC::LHA8;
break;
4179 case PPC::LWZX: III.
ImmOpcode = PPC::LWZ;
break;
4180 case PPC::LWZX8: III.
ImmOpcode = PPC::LWZ8;
break;
4186 case PPC::LFSX: III.
ImmOpcode = PPC::LFS;
break;
4187 case PPC::LFDX: III.
ImmOpcode = PPC::LFD;
break;
4188 case PPC::STBX: III.
ImmOpcode = PPC::STB;
break;
4189 case PPC::STBX8: III.
ImmOpcode = PPC::STB8;
break;
4190 case PPC::STHX: III.
ImmOpcode = PPC::STH;
break;
4191 case PPC::STHX8: III.
ImmOpcode = PPC::STH8;
break;
4192 case PPC::STWX: III.
ImmOpcode = PPC::STW;
break;
4193 case PPC::STWX8: III.
ImmOpcode = PPC::STW8;
break;
4198 case PPC::STFSX: III.
ImmOpcode = PPC::STFS;
break;
4199 case PPC::STFDX: III.
ImmOpcode = PPC::STFD;
break;
4231 case PPC::LBZUX: III.
ImmOpcode = PPC::LBZU;
break;
4232 case PPC::LBZUX8: III.
ImmOpcode = PPC::LBZU8;
break;
4233 case PPC::LHZUX: III.
ImmOpcode = PPC::LHZU;
break;
4234 case PPC::LHZUX8: III.
ImmOpcode = PPC::LHZU8;
break;
4235 case PPC::LHAUX: III.
ImmOpcode = PPC::LHAU;
break;
4236 case PPC::LHAUX8: III.
ImmOpcode = PPC::LHAU8;
break;
4237 case PPC::LWZUX: III.
ImmOpcode = PPC::LWZU;
break;
4238 case PPC::LWZUX8: III.
ImmOpcode = PPC::LWZU8;
break;
4243 case PPC::LFSUX: III.
ImmOpcode = PPC::LFSU;
break;
4244 case PPC::LFDUX: III.
ImmOpcode = PPC::LFDU;
break;
4245 case PPC::STBUX: III.
ImmOpcode = PPC::STBU;
break;
4246 case PPC::STBUX8: III.
ImmOpcode = PPC::STBU8;
break;
4247 case PPC::STHUX: III.
ImmOpcode = PPC::STHU;
break;
4248 case PPC::STHUX8: III.
ImmOpcode = PPC::STHU8;
break;
4249 case PPC::STWUX: III.
ImmOpcode = PPC::STWU;
break;
4250 case PPC::STWUX8: III.
ImmOpcode = PPC::STWU8;
break;
4255 case PPC::STFSUX: III.
ImmOpcode = PPC::STFSU;
break;
4256 case PPC::STFDUX: III.
ImmOpcode = PPC::STFDU;
break;
4269 case PPC::XFLOADf32:
4270 case PPC::XFLOADf64:
4271 case PPC::XFSTOREf32:
4272 case PPC::XFSTOREf64:
4273 if (!Subtarget.hasP9Vector())
4300 case PPC::XFLOADf32:
4314 case PPC::XFLOADf64:
4332 case PPC::XFSTOREf32:
4346 case PPC::XFSTOREf64:
4357 assert(Op1 != Op2 &&
"Cannot swap operand with itself.");
4359 unsigned MaxOp = std::max(Op1, Op2);
4360 unsigned MinOp = std::min(Op1, Op2);
4363 MI.removeOperand(std::max(Op1, Op2));
4364 MI.removeOperand(std::min(Op1, Op2));
4368 if (MaxOp - MinOp == 1 &&
MI.getNumOperands() == MinOp) {
4369 MI.addOperand(MOp2);
4370 MI.addOperand(MOp1);
4375 unsigned TotalOps =
MI.getNumOperands() + 2;
4376 for (
unsigned i =
MI.getNumOperands() - 1; i >= MinOp; i--) {
4378 MI.removeOperand(i);
4381 MI.addOperand(MOp2);
4383 for (
unsigned i =
MI.getNumOperands(); i < TotalOps; i++) {
4385 MI.addOperand(MOp1);
4387 MI.addOperand(MOps.
back());
4398 unsigned OpNoForForwarding
4438 unsigned Opc =
DefMI.getOpcode();
4439 if (Opc != PPC::ADDItocL8 && Opc != PPC::ADDI && Opc != PPC::ADDI8)
4445 if (Opc == PPC::ADDItocL8 && Subtarget.isAIX())
4449 "Add inst must have at least three operands");
4450 RegMO = &
DefMI.getOperand(1);
4451 ImmMO = &
DefMI.getOperand(2);
4454 if (!RegMO->
isReg())
4463bool PPCInstrInfo::isRegElgibleForForwarding(
4466 bool &IsFwdFeederRegKilled,
bool &SeenIntermediateUse)
const {
4483 for (; It != E; ++It) {
4487 IsFwdFeederRegKilled =
true;
4489 SeenIntermediateUse =
true;
4491 if ((&*It) == &
DefMI)
4504bool PPCInstrInfo::isImmElgibleForForwarding(
const MachineOperand &ImmMO,
4508 int64_t BaseImm)
const {
4510 if (
DefMI.getOpcode() == PPC::ADDItocL8) {
4531 if (ImmMO.
isImm()) {
4536 APInt ActualValue(64, ImmMO.
getImm() + BaseImm,
true);
4541 Imm = SignExtend64<16>(ImmMO.
getImm() + BaseImm);
4557 unsigned OpNoForForwarding,
4559 if ((
DefMI.getOpcode() != PPC::LI &&
DefMI.getOpcode() != PPC::LI8) ||
4560 !
DefMI.getOperand(1).isImm())
4567 int64_t Immediate =
DefMI.getOperand(1).getImm();
4569 int64_t SExtImm = SignExtend64<16>(Immediate);
4571 bool ReplaceWithLI =
false;
4572 bool Is64BitLI =
false;
4575 unsigned Opc =
MI.getOpcode();
4596 bool Changed =
false;
4598 int64_t Comparand =
MI.getOperand(2).getImm();
4599 int64_t SExtComparand = ((
uint64_t)Comparand & ~0x7FFFuLL) != 0
4600 ? (Comparand | 0xFFFFFFFFFFFF0000)
4603 for (
auto &CompareUseMI :
MRI->use_instructions(DefReg)) {
4604 unsigned UseOpc = CompareUseMI.getOpcode();
4605 if (UseOpc != PPC::ISEL && UseOpc != PPC::ISEL8)
4607 unsigned CRSubReg = CompareUseMI.getOperand(3).getSubReg();
4608 Register TrueReg = CompareUseMI.getOperand(1).getReg();
4609 Register FalseReg = CompareUseMI.getOperand(2).getReg();
4610 unsigned RegToCopy =
4611 selectReg(SExtImm, SExtComparand, Opc, TrueReg, FalseReg, CRSubReg);
4612 if (RegToCopy == PPC::NoRegister)
4615 if (RegToCopy == PPC::ZERO || RegToCopy == PPC::ZERO8) {
4616 CompareUseMI.setDesc(
get(UseOpc == PPC::ISEL8 ? PPC::LI8 : PPC::LI));
4618 CompareUseMI.removeOperand(3);
4619 CompareUseMI.removeOperand(2);
4623 dbgs() <<
"Found LI -> CMPI -> ISEL, replacing with a copy.\n");
4627 CompareUseMI.setDesc(
get(PPC::COPY));
4628 CompareUseMI.removeOperand(3);
4629 CompareUseMI.removeOperand(RegToCopy == TrueReg ? 2 : 1);
4630 CmpIselsConverted++;
4639 MissedConvertibleImmediateInstrs++;
4647 int64_t Addend =
MI.getOperand(2).getImm();
4648 if (isInt<16>(Addend + SExtImm)) {
4649 ReplaceWithLI =
true;
4650 Is64BitLI = Opc == PPC::ADDI8;
4651 NewImm = Addend + SExtImm;
4657 case PPC::SUBFIC8: {
4659 if (
MI.getNumOperands() > 3 && !
MI.getOperand(3).isDead())
4661 int64_t Minuend =
MI.getOperand(2).getImm();
4662 if (isInt<16>(Minuend - SExtImm)) {
4663 ReplaceWithLI =
true;
4664 Is64BitLI = Opc == PPC::SUBFIC8;
4665 NewImm = Minuend - SExtImm;
4671 case PPC::RLDICL_rec:
4672 case PPC::RLDICL_32:
4673 case PPC::RLDICL_32_64: {
4675 int64_t SH =
MI.getOperand(2).getImm();
4676 int64_t MB =
MI.getOperand(3).getImm();
4677 APInt InVal((Opc == PPC::RLDICL || Opc == PPC::RLDICL_rec) ? 64 : 32,
4679 InVal = InVal.rotl(SH);
4685 if (isUInt<15>(InVal.getSExtValue()) ||
4686 (Opc == PPC::RLDICL_rec && isUInt<16>(InVal.getSExtValue()))) {
4687 ReplaceWithLI =
true;
4688 Is64BitLI = Opc != PPC::RLDICL_32;
4689 NewImm = InVal.getSExtValue();
4690 SetCR = Opc == PPC::RLDICL_rec;
4697 case PPC::RLWINM_rec:
4698 case PPC::RLWINM8_rec: {
4699 int64_t SH =
MI.getOperand(2).getImm();
4700 int64_t MB =
MI.getOperand(3).getImm();
4701 int64_t ME =
MI.getOperand(4).getImm();
4702 APInt InVal(32, SExtImm,
true);
4703 InVal = InVal.rotl(SH);
4709 bool ValueFits = isUInt<15>(InVal.getSExtValue());
4710 ValueFits |= ((Opc == PPC::RLWINM_rec || Opc == PPC::RLWINM8_rec) &&
4711 isUInt<16>(InVal.getSExtValue()));
4713 ReplaceWithLI =
true;
4714 Is64BitLI = Opc == PPC::RLWINM8 || Opc == PPC::RLWINM8_rec;
4715 NewImm = InVal.getSExtValue();
4716 SetCR = Opc == PPC::RLWINM_rec || Opc == PPC::RLWINM8_rec;