47 #define DEBUG_TYPE "reginfo"
49 #define GET_REGINFO_TARGET_DESC
50 #include "PPCGenRegisterInfo.inc"
52 STATISTIC(InflateGPRC,
"Number of gprc inputs for getLargestLegalClass");
53 STATISTIC(InflateGP8RC,
"Number of g8rc inputs for getLargestLegalClass");
57 cl::desc(
"Enable use of a base pointer for complex stack frames"));
61 cl::desc(
"Force the use of a base pointer in every function"));
65 cl::desc(
"Enable spills from gpr to vsr rather than stack"));
69 cl::desc(
"Consider R1 caller preserved so stack saves of "
70 "caller preserved registers can be LICM candidates"),
75 cl::desc(
"Maximum search distance for definition of CR bit "
89 cl::desc(
"Emit information about accumulator register spills "
100 TM.isPPC64() ? 0 : 1,
101 TM.isPPC64() ? 0 : 1),
103 ImmToIdxMap[
PPC::LD] = PPC::LDX; ImmToIdxMap[PPC::STD] = PPC::STDX;
104 ImmToIdxMap[PPC::LBZ] = PPC::LBZX; ImmToIdxMap[PPC::STB] = PPC::STBX;
105 ImmToIdxMap[PPC::LHZ] = PPC::LHZX; ImmToIdxMap[PPC::LHA] = PPC::LHAX;
106 ImmToIdxMap[PPC::LWZ] = PPC::LWZX; ImmToIdxMap[PPC::LWA] = PPC::LWAX;
107 ImmToIdxMap[PPC::LFS] = PPC::LFSX; ImmToIdxMap[PPC::LFD] = PPC::LFDX;
108 ImmToIdxMap[PPC::STH] = PPC::STHX; ImmToIdxMap[PPC::STW] = PPC::STWX;
109 ImmToIdxMap[PPC::STFS] = PPC::STFSX; ImmToIdxMap[PPC::STFD] = PPC::STFDX;
110 ImmToIdxMap[PPC::ADDI] = PPC::ADD4;
111 ImmToIdxMap[PPC::LWA_32] = PPC::LWAX_32;
114 ImmToIdxMap[PPC::LHA8] = PPC::LHAX8; ImmToIdxMap[PPC::LBZ8] = PPC::LBZX8;
115 ImmToIdxMap[PPC::LHZ8] = PPC::LHZX8; ImmToIdxMap[PPC::LWZ8] = PPC::LWZX8;
116 ImmToIdxMap[PPC::STB8] = PPC::STBX8; ImmToIdxMap[PPC::STH8] = PPC::STHX8;
117 ImmToIdxMap[PPC::STW8] = PPC::STWX8; ImmToIdxMap[PPC::STDU] = PPC::STDUX;
118 ImmToIdxMap[PPC::ADDI8] = PPC::ADD8;
119 ImmToIdxMap[PPC::LQ] = PPC::LQX_PSEUDO;
120 ImmToIdxMap[PPC::STQ] = PPC::STQX_PSEUDO;
123 ImmToIdxMap[PPC::DFLOADf32] = PPC::LXSSPX;
124 ImmToIdxMap[PPC::DFLOADf64] = PPC::LXSDX;
125 ImmToIdxMap[PPC::SPILLTOVSR_LD] = PPC::SPILLTOVSR_LDX;
126 ImmToIdxMap[PPC::SPILLTOVSR_ST] = PPC::SPILLTOVSR_STX;
127 ImmToIdxMap[PPC::DFSTOREf32] = PPC::STXSSPX;
128 ImmToIdxMap[PPC::DFSTOREf64] = PPC::STXSDX;
129 ImmToIdxMap[PPC::LXV] = PPC::LXVX;
130 ImmToIdxMap[PPC::LXSD] = PPC::LXSDX;
131 ImmToIdxMap[PPC::LXSSP] = PPC::LXSSPX;
132 ImmToIdxMap[PPC::STXV] = PPC::STXVX;
133 ImmToIdxMap[PPC::STXSD] = PPC::STXSDX;
134 ImmToIdxMap[PPC::STXSSP] = PPC::STXSSPX;
137 ImmToIdxMap[PPC::EVLDD] = PPC::EVLDDX;
138 ImmToIdxMap[PPC::EVSTDD] = PPC::EVSTDDX;
139 ImmToIdxMap[PPC::SPESTW] = PPC::SPESTWX;
140 ImmToIdxMap[PPC::SPELWZ] = PPC::SPELWZX;
143 ImmToIdxMap[PPC::PLBZ] = PPC::LBZX; ImmToIdxMap[PPC::PLBZ8] = PPC::LBZX8;
144 ImmToIdxMap[PPC::PLHZ] = PPC::LHZX; ImmToIdxMap[PPC::PLHZ8] = PPC::LHZX8;
145 ImmToIdxMap[PPC::PLHA] = PPC::LHAX; ImmToIdxMap[PPC::PLHA8] = PPC::LHAX8;
146 ImmToIdxMap[PPC::PLWZ] = PPC::LWZX; ImmToIdxMap[PPC::PLWZ8] = PPC::LWZX8;
147 ImmToIdxMap[PPC::PLWA] = PPC::LWAX; ImmToIdxMap[PPC::PLWA8] = PPC::LWAX;
148 ImmToIdxMap[PPC::PLD] = PPC::LDX; ImmToIdxMap[PPC::PSTD] = PPC::STDX;
150 ImmToIdxMap[PPC::PSTB] = PPC::STBX; ImmToIdxMap[PPC::PSTB8] = PPC::STBX8;
151 ImmToIdxMap[PPC::PSTH] = PPC::STHX; ImmToIdxMap[PPC::PSTH8] = PPC::STHX8;
152 ImmToIdxMap[PPC::PSTW] = PPC::STWX; ImmToIdxMap[PPC::PSTW8] = PPC::STWX8;
154 ImmToIdxMap[PPC::PLFS] = PPC::LFSX; ImmToIdxMap[PPC::PSTFS] = PPC::STFSX;
155 ImmToIdxMap[PPC::PLFD] = PPC::LFDX; ImmToIdxMap[PPC::PSTFD] = PPC::STFDX;
156 ImmToIdxMap[PPC::PLXSSP] = PPC::LXSSPX; ImmToIdxMap[PPC::PSTXSSP] = PPC::STXSSPX;
157 ImmToIdxMap[PPC::PLXSD] = PPC::LXSDX; ImmToIdxMap[PPC::PSTXSD] = PPC::STXSDX;
158 ImmToIdxMap[PPC::PLXV] = PPC::LXVX; ImmToIdxMap[PPC::PSTXV] = PPC::STXVX;
160 ImmToIdxMap[PPC::LXVP] = PPC::LXVPX;
161 ImmToIdxMap[PPC::STXVP] = PPC::STXVPX;
162 ImmToIdxMap[PPC::PLXVP] = PPC::LXVPX;
163 ImmToIdxMap[PPC::PSTXVP] = PPC::STXVPX;
175 return &PPC::G8RC_NOX0RegClass;
176 return &PPC::GPRC_NOR0RegClass;
180 return &PPC::G8RCRegClass;
181 return &PPC::GPRCRegClass;
192 return CSR_64_AllRegs_VSRP_SaveList;
194 return CSR_64_AllRegs_AIX_Dflt_VSX_SaveList;
195 return CSR_64_AllRegs_VSX_SaveList;
199 return CSR_64_AllRegs_AIX_Dflt_Altivec_SaveList;
200 return CSR_64_AllRegs_Altivec_SaveList;
202 return CSR_64_AllRegs_SaveList;
221 return SaveR2 ? CSR_SVR64_ColdCC_R2_VSRP_SaveList
222 : CSR_SVR64_ColdCC_VSRP_SaveList;
224 return SaveR2 ? CSR_SVR64_ColdCC_R2_Altivec_SaveList
225 : CSR_SVR64_ColdCC_Altivec_SaveList;
226 return SaveR2 ? CSR_SVR64_ColdCC_R2_SaveList
227 : CSR_SVR64_ColdCC_SaveList;
231 return CSR_SVR32_ColdCC_VSRP_SaveList;
233 return CSR_SVR32_ColdCC_Altivec_SaveList;
234 else if (Subtarget.
hasSPE())
235 return CSR_SVR32_ColdCC_SPE_SaveList;
236 return CSR_SVR32_ColdCC_SaveList;
241 return SaveR2 ? CSR_SVR464_R2_VSRP_SaveList : CSR_SVR464_VSRP_SaveList;
244 return SaveR2 ? CSR_PPC64_R2_Altivec_SaveList
245 : CSR_PPC64_Altivec_SaveList;
247 return SaveR2 ? CSR_PPC64_R2_SaveList : CSR_PPC64_SaveList;
253 : CSR_AIX32_SaveList;
254 return CSR_AIX32_SaveList;
257 return CSR_SVR432_VSRP_SaveList;
259 return CSR_SVR432_Altivec_SaveList;
260 else if (Subtarget.
hasSPE())
261 return CSR_SVR432_SPE_SaveList;
262 return CSR_SVR432_SaveList;
272 return CSR_64_AllRegs_VSRP_RegMask;
274 return CSR_64_AllRegs_AIX_Dflt_VSX_RegMask;
275 return CSR_64_AllRegs_VSX_RegMask;
279 return CSR_64_AllRegs_AIX_Dflt_Altivec_RegMask;
280 return CSR_64_AllRegs_Altivec_RegMask;
282 return CSR_64_AllRegs_RegMask;
288 ? CSR_PPC64_Altivec_RegMask
291 ? CSR_AIX32_Altivec_RegMask
292 : CSR_AIX32_RegMask);
298 ? CSR_SVR64_ColdCC_VSRP_RegMask
299 : (Subtarget.
hasAltivec() ? CSR_SVR64_ColdCC_Altivec_RegMask
300 : CSR_SVR64_ColdCC_RegMask);
303 ? CSR_SVR32_ColdCC_VSRP_RegMask
305 ? CSR_SVR32_ColdCC_Altivec_RegMask
306 : (Subtarget.
hasSPE() ? CSR_SVR32_ColdCC_SPE_RegMask
307 : CSR_SVR32_ColdCC_RegMask));
312 ? CSR_SVR464_VSRP_RegMask
313 : (Subtarget.
hasAltivec() ? CSR_PPC64_Altivec_RegMask
314 : CSR_PPC64_RegMask);
317 ? CSR_SVR432_VSRP_RegMask
319 ? CSR_SVR432_Altivec_RegMask
320 : (Subtarget.
hasSPE() ? CSR_SVR432_SPE_RegMask
321 : CSR_SVR432_RegMask));
326 return CSR_NoRegs_RegMask;
330 for (
unsigned PseudoReg : {PPC::ZERO, PPC::ZERO8,
PPC::RM})
331 Mask[PseudoReg / 32] &= ~(1u << (PseudoReg % 32));
341 markSuperRegs(Reserved, PPC::ZERO);
345 markSuperRegs(Reserved, PPC::FP);
349 markSuperRegs(Reserved, PPC::BP);
353 markSuperRegs(Reserved, PPC::CTR);
354 markSuperRegs(Reserved, PPC::CTR8);
356 markSuperRegs(Reserved, PPC::R1);
357 markSuperRegs(Reserved, PPC::LR);
358 markSuperRegs(Reserved, PPC::LR8);
359 markSuperRegs(Reserved,
PPC::RM);
361 markSuperRegs(Reserved, PPC::VRSAVE);
372 markSuperRegs(Reserved,
PPC::R2);
373 markSuperRegs(Reserved, PPC::R13);
379 markSuperRegs(Reserved,
PPC::R2);
383 markSuperRegs(Reserved, PPC::R13);
385 if (TFI->needsFP(MF))
386 markSuperRegs(Reserved, PPC::R31);
391 markSuperRegs(Reserved, PPC::R29);
393 markSuperRegs(Reserved, PPC::R30);
397 markSuperRegs(Reserved, PPC::R30);
402 IE = PPC::VRRCRegClass.
end();
I !=
IE; ++
I)
403 markSuperRegs(Reserved, *
I);
409 for (
auto Reg : CSR_Altivec_SaveList) {
412 markSuperRegs(Reserved,
Reg);
419 assert(checkAllSuperRegsMarked(Reserved));
432 return PhysReg != PPC::R1 && PhysReg != PPC::X1;
454 if (FrameSize & ~0x7FFF) {
455 LLVM_DEBUG(
dbgs() <<
"TRUE - Frame size is too large for D-Form.\n");
462 for (
unsigned i = 0;
i <
Info.size();
i++) {
464 if (
Info[
i].isSpilledToReg())
467 int FrIdx =
Info[
i].getFrameIdx();
471 unsigned Opcode = InstrInfo->getStoreOpcodeForSpill(RC);
476 LLVM_DEBUG(
dbgs() <<
"Memory Operand: " << InstrInfo->getName(Opcode)
477 <<
" for register " <<
printReg(
Reg,
this) <<
".\n");
478 LLVM_DEBUG(
dbgs() <<
"TRUE - Not fixed frame object that requires "
490 if (InstrInfo->isXFormMemOp(Opcode)) {
491 LLVM_DEBUG(
dbgs() <<
"Memory Operand: " << InstrInfo->getName(Opcode)
492 <<
" for register " <<
printReg(
Reg,
this) <<
".\n");
498 if ((Opcode == PPC::RESTORE_QUADWORD) || (Opcode == PPC::SPILL_QUADWORD)) {
499 LLVM_DEBUG(
dbgs() <<
"Memory Operand: " << InstrInfo->getName(Opcode)
500 <<
" for register " <<
printReg(
Reg,
this) <<
".\n");
534 !MFI.hasVarSizedObjects() && !MFI.hasOpaqueSPAdjustment())
558 VirtReg, Order, Hints, MF, VRM,
Matrix);
567 switch (
Use.getOpcode()) {
568 case TargetOpcode::COPY: {
569 ResultOp = &
Use.getOperand(0);
570 ResultReg = ResultOp->
getReg();
577 if (HintReg >= PPC::VSRp0 && HintReg <= PPC::VSRp31)
578 Hints.push_back(HintReg);
582 case PPC::BUILD_UACC: {
583 ResultOp = &
Use.getOperand(0);
584 ResultReg = ResultOp->
getReg();
588 assert((ACCPhys >= PPC::ACC0 && ACCPhys <= PPC::ACC7) &&
589 "Expecting an ACC register for BUILD_UACC.");
590 Register HintReg = PPC::UACC0 + (ACCPhys - PPC::ACC0);
591 Hints.push_back(HintReg);
597 return BaseImplRetVal;
603 const unsigned DefaultSafety = 1;
605 switch (RC->
getID()) {
608 case PPC::G8RC_NOX0RegClassID:
609 case PPC::GPRC_NOR0RegClassID:
610 case PPC::SPERCRegClassID:
611 case PPC::G8RCRegClassID:
612 case PPC::GPRCRegClassID: {
613 unsigned FP = TFI->
hasFP(MF) ? 1 : 0;
614 return 32 -
FP - DefaultSafety;
616 case PPC::F4RCRegClassID:
617 case PPC::F8RCRegClassID:
618 case PPC::VSLRCRegClassID:
619 return 32 - DefaultSafety;
620 case PPC::VFRCRegClassID:
621 case PPC::VRRCRegClassID: {
626 return 20 - DefaultSafety;
628 return 32 - DefaultSafety;
629 case PPC::VSFRCRegClassID:
630 case PPC::VSSRCRegClassID:
631 case PPC::VSRCRegClassID: {
636 return 52 - DefaultSafety;
638 return 64 - DefaultSafety;
639 case PPC::CRRCRegClassID:
640 return 8 - DefaultSafety;
648 const auto *DefaultSuperclass =
659 RC == &PPC::G8RCRegClass) {
661 return &PPC::SPILLTOVSRRCRegClass;
668 if (getRegSizeInBits(**
I) != getRegSizeInBits(*RC))
671 switch ((*I)->getID()) {
672 case PPC::VSSRCRegClassID:
673 return Subtarget.
hasP8Vector() ? *
I : DefaultSuperclass;
674 case PPC::VSFRCRegClassID:
675 case PPC::VSRCRegClassID:
677 case PPC::VSRpRCRegClassID:
679 case PPC::ACCRCRegClassID:
680 case PPC::UACCRCRegClassID:
681 return Subtarget.
hasMMA() ? *
I : DefaultSuperclass;
686 return DefaultSuperclass;
720 "Maximum call-frame size not sufficiently aligned");
726 bool KillNegSizeReg =
MI.getOperand(1).isKill();
727 Register NegSizeReg =
MI.getOperand(1).getReg();
739 .
addImm(maxCallFrameSize);
747 .
addImm(maxCallFrameSize);
759 bool &KillNegSizeReg,
792 if (MaxAlign < TargetAlign &&
isInt<16>(FrameSize)) {
812 if (MaxAlign > TargetAlign) {
813 unsigned UnalNegSizeReg = NegSizeReg;
821 unsigned NegSizeReg1 = NegSizeReg;
826 KillNegSizeReg =
true;
829 if (MaxAlign > TargetAlign) {
830 unsigned UnalNegSizeReg = NegSizeReg;
838 unsigned NegSizeReg1 = NegSizeReg;
843 KillNegSizeReg =
true;
861 Register FramePointer =
MI.getOperand(0).getReg();
862 const Register ActualNegSizeReg =
MI.getOperand(1).getReg();
863 bool KillNegSizeReg =
MI.getOperand(2).isKill();
864 Register NegSizeReg =
MI.getOperand(2).getReg();
867 if (FramePointer == NegSizeReg) {
868 assert(KillNegSizeReg &&
"FramePointer is a def and NegSizeReg is an use, "
869 "NegSizeReg should be killed");
873 BuildMI(
MBB, II, dl, CopyInst, ActualNegSizeReg)
876 NegSizeReg = ActualNegSizeReg;
877 KillNegSizeReg =
false;
882 if (NegSizeReg != ActualNegSizeReg)
883 BuildMI(
MBB, II, dl, CopyInst, ActualNegSizeReg)
907 MI.getOperand(0).getReg())
908 .
addImm(maxCallFrameSize);
945 if (SrcReg != PPC::CR0) {
952 .
addImm(getEncodingValue(SrcReg) * 4)
982 assert(
MI.definesRegister(DestReg) &&
983 "RESTORE_CR does not define its destination");
990 if (DestReg != PPC::CR0) {
994 unsigned ShiftBits = getEncodingValue(DestReg)*4;
1001 BuildMI(
MBB, II, dl,
TII.get(LP64 ? PPC::MTOCRF8 : PPC::MTOCRF), DestReg)
1031 unsigned CRBitSpillDistance = 0;
1032 bool SeenUse =
false;
1033 for (;
Ins != Rend; ++
Ins) {
1035 if (
Ins->modifiesRegister(SrcReg,
TRI))
1038 if (
Ins->readsRegister(SrcReg,
TRI))
1046 if (!
Ins->isDebugInstr())
1047 CRBitSpillDistance++;
1054 bool SpillsKnownBit =
false;
1056 switch (
Ins->getOpcode()) {
1060 SpillsKnownBit =
true;
1065 SpillsKnownBit =
true;
1083 if (SrcReg == PPC::CR0LT || SrcReg == PPC::CR1LT ||
1084 SrcReg == PPC::CR2LT || SrcReg == PPC::CR3LT ||
1085 SrcReg == PPC::CR4LT || SrcReg == PPC::CR5LT ||
1086 SrcReg == PPC::CR6LT || SrcReg == PPC::CR7LT) {
1111 .
addImm(getEncodingValue(SrcReg))
1118 bool KillsCRBit =
MI.killsRegister(SrcReg,
TRI);
1121 if (SpillsKnownBit && KillsCRBit && !SeenUse) {
1122 Ins->setDesc(
TII.get(PPC::UNENCODED_NOP));
1123 Ins->removeOperand(0);
1143 Register DestReg =
MI.getOperand(0).getReg();
1144 assert(
MI.definesRegister(DestReg) &&
1145 "RESTORE_CRBIT does not define its destination");
1150 BuildMI(
MBB, II, dl,
TII.get(TargetOpcode::IMPLICIT_DEF), DestReg);
1156 unsigned ShiftBits = getEncodingValue(DestReg);
1158 BuildMI(
MBB, II, dl,
TII.get(LP64 ? PPC::RLWIMI8 : PPC::RLWIMI), RegO)
1161 .
addImm(ShiftBits ? 32 - ShiftBits : 0)
1165 BuildMI(
MBB, II, dl,
TII.get(LP64 ? PPC::MTOCRF8 : PPC::MTOCRF),
1183 std::string Dest = PPC::ACCRCRegClass.contains(DestReg) ?
"acc" :
"uacc";
1184 std::string Src = PPC::ACCRCRegClass.contains(SrcReg) ?
"acc" :
"uacc";
1185 dbgs() <<
"Emitting copy from " << Src <<
" to " << Dest <<
":\n";
1197 dbgs() <<
"Emitting " << (IsPrimed ?
"acc" :
"uacc") <<
" register "
1198 << (IsRestore ?
"restore" :
"spill") <<
":\n";
1208 bool IsKilled,
bool TwoPairs) {
1209 unsigned Offset = 0;
1211 Offset = IsLittleEndian ? 48 : 0;
1213 Offset = IsLittleEndian ? 16 : 0;
1214 Register Reg = (SrcReg > PPC::VSRp15) ? PPC::V0 + (SrcReg - PPC::VSRp16) * 2
1215 : PPC::VSL0 + (SrcReg - PPC::VSRp0) * 2;
1219 Offset += IsLittleEndian ? -16 : 16;
1224 Offset += IsLittleEndian ? -16 : 16;
1228 Offset += IsLittleEndian ? -16 : 16;
1241 "Expecting to do this only if paired vector stores are disabled.");
1250 bool IsKilled =
MI.getOperand(0).isKill();
1271 bool IsKilled =
MI.getOperand(0).isKill();
1273 bool IsPrimed = PPC::ACCRCRegClass.contains(SrcReg);
1275 PPC::VSRp0 + (SrcReg - (IsPrimed ? PPC::ACC0 : PPC::UACC0)) * 2;
1297 if (IsPrimed && !IsKilled)
1314 Register DestReg =
MI.getOperand(0).getReg();
1315 assert(
MI.definesRegister(DestReg) &&
1316 "RESTORE_ACC does not define its destination");
1318 bool IsPrimed = PPC::ACCRCRegClass.contains(DestReg);
1320 PPC::VSRp0 + (DestReg - (IsPrimed ? PPC::ACC0 : PPC::UACC0)) * 2;
1349 bool IsKilled =
MI.getOperand(0).isKill();
1351 Register Reg = PPC::X0 + (SrcReg - PPC::G8p0) * 2;
1375 Register DestReg =
MI.getOperand(0).getReg();
1376 assert(
MI.definesRegister(DestReg) &&
1377 "RESTORE_QUADWORD does not define its destination");
1379 Register Reg = PPC::X0 + (DestReg - PPC::G8p0) * 2;
1383 IsLittleEndian ? 8 : 0);
1385 IsLittleEndian ? 0 : 8);
1401 if (PPC::CR2 <=
Reg &&
Reg <= PPC::CR4) {
1419 case PPC::DFLOADf32:
1420 case PPC::DFLOADf64:
1421 case PPC::DFSTOREf32:
1422 case PPC::DFSTOREf64:
1443 unsigned OpC =
MI.getOpcode();
1449 unsigned FIOperandNum) {
1451 unsigned OffsetOperandNo = (FIOperandNum == 2) ? 1 : 2;
1452 if (
MI.isInlineAsm())
1453 OffsetOperandNo = FIOperandNum - 1;
1455 MI.getOpcode() == TargetOpcode::PATCHPOINT)
1456 OffsetOperandNo = FIOperandNum + 1;
1458 return OffsetOperandNo;
1463 int SPAdj,
unsigned FIOperandNum,
1465 assert(SPAdj == 0 &&
"Unexpected");
1483 int FrameIndex =
MI.getOperand(FIOperandNum).getIndex();
1490 unsigned OpC =
MI.getOpcode();
1505 (OpC == PPC::PREPARE_PROBED_ALLOCA_64 ||
1506 OpC == PPC::PREPARE_PROBED_ALLOCA_32 ||
1507 OpC == PPC::PREPARE_PROBED_ALLOCA_NEGSIZE_SAME_REG_64 ||
1508 OpC == PPC::PREPARE_PROBED_ALLOCA_NEGSIZE_SAME_REG_32)) {
1514 if (OpC == PPC::SPILL_CR) {
1517 }
else if (OpC == PPC::RESTORE_CR) {
1520 }
else if (OpC == PPC::SPILL_CRBIT) {
1523 }
else if (OpC == PPC::RESTORE_CRBIT) {
1526 }
else if (OpC == PPC::SPILL_ACC || OpC == PPC::SPILL_UACC) {
1529 }
else if (OpC == PPC::RESTORE_ACC || OpC == PPC::RESTORE_UACC) {
1535 }
else if (OpC == PPC::SPILL_QUADWORD) {
1538 }
else if (OpC == PPC::RESTORE_QUADWORD) {
1544 MI.getOperand(FIOperandNum).ChangeToRegister(
1550 OpC != TargetOpcode::PATCHPOINT && !ImmToIdxMap.
count(OpC);
1554 Offset +=
MI.getOperand(OffsetOperandNo).getImm();
1569 if ((OpC == PPC::LXVP || OpC == PPC::STXVP) &&
1572 unsigned NewOpc = OpC == PPC::LXVP ? PPC::PLXVP : PPC::PSTXVP;
1573 MI.setDesc(
TII.get(NewOpc));
1583 assert(OpC != PPC::DBG_VALUE &&
1584 "This should be handled in a target-independent way");
1587 bool OffsetFitsMnemonic = (OpC == PPC::EVSTDD || OpC == PPC::EVLDD) ?
1590 if (
TII.isPrefixed(
MI.getOpcode()))
1591 OffsetFitsMnemonic = isInt<34>(Offset);
1592 if (!noImmForm && ((OffsetFitsMnemonic &&
1595 OpC == TargetOpcode::PATCHPOINT)) {
1596 MI.getOperand(OffsetOperandNo).ChangeToImmediate(Offset);
1609 unsigned NewOpcode = 0u;
1623 TII.materializeImmPostRA(
MBB, II, dl, SReg, Offset);
1630 unsigned OperandBase;
1637 "No indexed form of load or store available!");
1638 NewOpcode = ImmToIdxMap.
find(OpC)->second;
1639 MI.setDesc(
TII.get(NewOpcode));
1642 OperandBase = OffsetOperandNo;
1645 Register StackReg =
MI.getOperand(FIOperandNum).getReg();
1646 MI.getOperand(OperandBase).ChangeToRegister(StackReg,
false);
1647 MI.getOperand(OperandBase + 1).ChangeToRegister(SReg,
false,
false,
true);
1652 if (NewOpcode == PPC::LQX_PSEUDO || NewOpcode == PPC::STQX_PSEUDO) {
1653 assert(
is64Bit &&
"Quadword loads/stores only supported in 64-bit mode");
1658 MI.setDesc(
TII.get(NewOpcode == PPC::LQX_PSEUDO ? PPC::LQ : PPC::STQ));
1659 MI.getOperand(OperandBase + 1).ChangeToRegister(NewReg,
false);
1660 MI.getOperand(OperandBase).ChangeToImmediate(0);
1668 return TFI->
hasFP(MF) ? PPC::R31 : PPC::R1;
1670 return TFI->
hasFP(MF) ? PPC::X31 : PPC::X1;
1696 return hasStackRealignment(MF);
1705 assert(Offset < 0 &&
"Local offset must be negative");
1716 unsigned OpC =
MI->getOpcode();
1717 if (!ImmToIdxMap.
count(OpC))
1721 if ((OpC == PPC::ADDI || OpC == PPC::ADDI8) &&
1722 MI->getOperand(2).getImm() == 0)
1750 int64_t Offset)
const {
1751 unsigned ADDriOpc = TM.
isPPC64() ? PPC::ADDI8 : PPC::ADDI;
1756 DL =
Ins->getDebugLoc();
1774 int64_t Offset)
const {
1775 unsigned FIOperandNum = 0;
1776 while (!
MI.getOperand(FIOperandNum).isFI()) {
1778 assert(FIOperandNum <
MI.getNumOperands() &&
1779 "Instr doesn't have FrameIndex operand!");
1782 MI.getOperand(FIOperandNum).ChangeToRegister(BaseReg,
false);
1784 Offset +=
MI.getOperand(OffsetOperandNo).getImm();
1785 MI.getOperand(OffsetOperandNo).ChangeToImmediate(Offset);
1794 TII.getRegClass(MCID, FIOperandNum,
this, MF));
1799 int64_t Offset)
const {
1800 unsigned FIOperandNum = 0;
1801 while (!
MI->getOperand(FIOperandNum).isFI()) {
1803 assert(FIOperandNum < MI->getNumOperands() &&
1804 "Instr doesn't have FrameIndex operand!");
1808 Offset +=
MI->getOperand(OffsetOperandNo).getImm();
1810 return MI->getOpcode() == PPC::DBG_VALUE ||
1812 MI->getOpcode() == TargetOpcode::PATCHPOINT ||