47#define AARCH64_EXPAND_PSEUDO_NAME "AArch64 pseudo instruction expansion pass"
75 unsigned LdarOp,
unsigned StlrOp,
unsigned CmpOp,
76 unsigned ExtendImm,
unsigned ZeroReg,
100char AArch64ExpandPseudo::ID = 0;
112 assert(MO.isReg() && MO.getReg());
131 if (DstReg == AArch64::XZR || DstReg == AArch64::WZR) {
134 MI.eraseFromParent();
144 bool LastItem = std::next(
I) ==
E;
149 case AArch64::ORRWri:
150 case AArch64::ORRXri:
153 .
add(
MI.getOperand(0))
154 .
addReg(BitSize == 32 ? AArch64::WZR : AArch64::XZR)
158 bool DstIsDead =
MI.getOperand(0).isDead();
168 case AArch64::ANDXri:
171 .
add(
MI.getOperand(0))
172 .
addReg(BitSize == 32 ? AArch64::WZR : AArch64::XZR)
176 bool DstIsDead =
MI.getOperand(0).isDead();
186 case AArch64::MOVNWi:
187 case AArch64::MOVNXi:
188 case AArch64::MOVZWi:
189 case AArch64::MOVZXi: {
190 bool DstIsDead =
MI.getOperand(0).isDead();
198 case AArch64::MOVKWi:
199 case AArch64::MOVKXi: {
201 bool DstIsDead =
MI.getOperand(0).isDead();
214 MI.eraseFromParent();
218bool AArch64ExpandPseudo::expandCMP_SWAP(
220 unsigned StlrOp,
unsigned CmpOp,
unsigned ExtendImm,
unsigned ZeroReg,
225 Register StatusReg =
MI.getOperand(1).getReg();
226 bool StatusDead =
MI.getOperand(1).isDead();
229 assert(!
MI.getOperand(2).isUndef() &&
"cannot handle undef");
231 Register DesiredReg =
MI.getOperand(3).getReg();
240 MF->
insert(++LoadCmpBB->getIterator(), StoreBB);
241 MF->
insert(++StoreBB->getIterator(), DoneBB);
249 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::MOVZWi), StatusReg)
253 BuildMI(LoadCmpBB, MIMD,
TII->get(CmpOp), ZeroReg)
257 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::Bcc))
261 LoadCmpBB->addSuccessor(DoneBB);
262 LoadCmpBB->addSuccessor(StoreBB);
267 BuildMI(StoreBB, MIMD,
TII->get(StlrOp), StatusReg)
270 BuildMI(StoreBB, MIMD,
TII->get(AArch64::CBNZW))
273 StoreBB->addSuccessor(LoadCmpBB);
274 StoreBB->addSuccessor(DoneBB);
276 DoneBB->splice(DoneBB->end(), &
MBB,
MI,
MBB.
end());
277 DoneBB->transferSuccessors(&
MBB);
282 MI.eraseFromParent();
290 StoreBB->clearLiveIns();
292 LoadCmpBB->clearLiveIns();
298bool AArch64ExpandPseudo::expandCMP_SWAP_128(
305 Register StatusReg =
MI.getOperand(2).getReg();
306 bool StatusDead =
MI.getOperand(2).isDead();
309 assert(!
MI.getOperand(3).isUndef() &&
"cannot handle undef");
311 Register DesiredLoReg =
MI.getOperand(4).getReg();
312 Register DesiredHiReg =
MI.getOperand(5).getReg();
313 Register NewLoReg =
MI.getOperand(6).getReg();
314 Register NewHiReg =
MI.getOperand(7).getReg();
316 unsigned LdxpOp, StxpOp;
318 switch (
MI.getOpcode()) {
319 case AArch64::CMP_SWAP_128_MONOTONIC:
320 LdxpOp = AArch64::LDXPX;
321 StxpOp = AArch64::STXPX;
323 case AArch64::CMP_SWAP_128_RELEASE:
324 LdxpOp = AArch64::LDXPX;
325 StxpOp = AArch64::STLXPX;
327 case AArch64::CMP_SWAP_128_ACQUIRE:
328 LdxpOp = AArch64::LDAXPX;
329 StxpOp = AArch64::STXPX;
331 case AArch64::CMP_SWAP_128:
332 LdxpOp = AArch64::LDAXPX;
333 StxpOp = AArch64::STLXPX;
346 MF->
insert(++LoadCmpBB->getIterator(), StoreBB);
347 MF->
insert(++StoreBB->getIterator(), FailBB);
348 MF->
insert(++FailBB->getIterator(), DoneBB);
359 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::SUBSXrs), AArch64::XZR)
363 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::CSINCWr), StatusReg)
367 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::SUBSXrs), AArch64::XZR)
371 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::CSINCWr), StatusReg)
375 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::CBNZW))
378 LoadCmpBB->addSuccessor(FailBB);
379 LoadCmpBB->addSuccessor(StoreBB);
384 BuildMI(StoreBB, MIMD,
TII->get(StxpOp), StatusReg)
388 BuildMI(StoreBB, MIMD,
TII->get(AArch64::CBNZW))
392 StoreBB->addSuccessor(LoadCmpBB);
393 StoreBB->addSuccessor(DoneBB);
398 BuildMI(FailBB, MIMD,
TII->get(StxpOp), StatusReg)
402 BuildMI(FailBB, MIMD,
TII->get(AArch64::CBNZW))
405 FailBB->addSuccessor(LoadCmpBB);
406 FailBB->addSuccessor(DoneBB);
408 DoneBB->splice(DoneBB->end(), &
MBB,
MI,
MBB.
end());
409 DoneBB->transferSuccessors(&
MBB);
414 MI.eraseFromParent();
424 FailBB->clearLiveIns();
426 StoreBB->clearLiveIns();
428 LoadCmpBB->clearLiveIns();
472bool AArch64ExpandPseudo::expand_DestructiveOp(
481 bool DstIsDead =
MI.getOperand(0).isDead();
483 unsigned PredIdx, DOPIdx, SrcIdx, Src2Idx;
488 if (DstReg ==
MI.getOperand(3).getReg()) {
490 std::tie(PredIdx, DOPIdx, SrcIdx) = std::make_tuple(1, 3, 2);
497 std::tie(PredIdx, DOPIdx, SrcIdx) = std::make_tuple(1, 2, 3);
500 std::tie(PredIdx, DOPIdx, SrcIdx) = std::make_tuple(2, 3, 3);
503 std::tie(PredIdx, DOPIdx, SrcIdx, Src2Idx) = std::make_tuple(1, 2, 3, 4);
504 if (DstReg ==
MI.getOperand(3).getReg()) {
506 std::tie(PredIdx, DOPIdx, SrcIdx, Src2Idx) = std::make_tuple(1, 3, 4, 2);
508 }
else if (DstReg ==
MI.getOperand(4).getReg()) {
510 std::tie(PredIdx, DOPIdx, SrcIdx, Src2Idx) = std::make_tuple(1, 4, 3, 2);
521 bool DOPRegIsUnique =
false;
524 DOPRegIsUnique = DstReg !=
MI.getOperand(SrcIdx).getReg();
529 DstReg !=
MI.getOperand(DOPIdx).getReg() ||
530 MI.getOperand(DOPIdx).getReg() !=
MI.getOperand(SrcIdx).getReg();
534 DOPRegIsUnique =
true;
538 DstReg !=
MI.getOperand(DOPIdx).getReg() ||
539 (
MI.getOperand(DOPIdx).getReg() !=
MI.getOperand(SrcIdx).getReg() &&
540 MI.getOperand(DOPIdx).getReg() !=
MI.getOperand(Src2Idx).getReg());
556 uint64_t ElementSize =
TII->getElementSizeForOpcode(Opcode);
557 unsigned MovPrfx, LSLZero, MovPrfxZero;
558 switch (ElementSize) {
561 MovPrfx = AArch64::MOVPRFX_ZZ;
562 LSLZero = AArch64::LSL_ZPmI_B;
563 MovPrfxZero = AArch64::MOVPRFX_ZPzZ_B;
566 MovPrfx = AArch64::MOVPRFX_ZZ;
567 LSLZero = AArch64::LSL_ZPmI_H;
568 MovPrfxZero = AArch64::MOVPRFX_ZPzZ_H;
571 MovPrfx = AArch64::MOVPRFX_ZZ;
572 LSLZero = AArch64::LSL_ZPmI_S;
573 MovPrfxZero = AArch64::MOVPRFX_ZPzZ_S;
576 MovPrfx = AArch64::MOVPRFX_ZZ;
577 LSLZero = AArch64::LSL_ZPmI_D;
578 MovPrfxZero = AArch64::MOVPRFX_ZPzZ_D;
594 "The destructive operand should be unique");
596 "This instruction is unpredicated");
601 .
addReg(
MI.getOperand(PredIdx).getReg())
602 .
addReg(
MI.getOperand(DOPIdx).getReg());
616 .
add(
MI.getOperand(PredIdx))
620 }
else if (DstReg !=
MI.getOperand(DOPIdx).getReg()) {
621 assert(DOPRegIsUnique &&
"The destructive operand should be unique");
624 .
addReg(
MI.getOperand(DOPIdx).getReg());
637 .
add(
MI.getOperand(PredIdx))
638 .
add(
MI.getOperand(SrcIdx));
644 DOP.
add(
MI.getOperand(PredIdx))
646 .
add(
MI.getOperand(SrcIdx));
649 DOP.
add(
MI.getOperand(PredIdx))
651 .
add(
MI.getOperand(SrcIdx))
652 .
add(
MI.getOperand(Src2Idx));
658 transferImpOps(
MI, PRFX, DOP);
660 transferImpOps(
MI, DOP, DOP);
662 MI.eraseFromParent();
666bool AArch64ExpandPseudo::expandSetTagLoop(
672 Register AddressReg =
MI.getOperand(1).getReg();
676 bool ZeroData =
MI.getOpcode() == AArch64::STZGloop_wback;
677 const unsigned OpCode1 =
678 ZeroData ? AArch64::STZGPostIndex : AArch64::STGPostIndex;
679 const unsigned OpCode2 =
680 ZeroData ? AArch64::STZ2GPostIndex : AArch64::ST2GPostIndex;
682 unsigned Size =
MI.getOperand(2).getImm();
684 if (
Size % (16 * 2) != 0) {
700 MF->
insert(++LoopBB->getIterator(), DoneBB);
716 LoopBB->addSuccessor(LoopBB);
717 LoopBB->addSuccessor(DoneBB);
719 DoneBB->splice(DoneBB->end(), &
MBB,
MI,
MBB.
end());
720 DoneBB->transferSuccessors(&
MBB);
725 MI.eraseFromParent();
732 LoopBB->clearLiveIns();
734 DoneBB->clearLiveIns();
742 unsigned Opc,
unsigned N) {
747 int ImmOffset =
MI.getOperand(2).getImm() +
Offset;
748 bool Kill = (
Offset + 1 ==
N) ?
MI.getOperand(1).isKill() :
false;
749 assert(ImmOffset >= -256 && ImmOffset < 256 &&
750 "Immediate spill offset out of range");
753 TRI->getSubReg(
MI.getOperand(0).getReg(), AArch64::zsub0 +
Offset),
758 MI.eraseFromParent();
762bool AArch64ExpandPseudo::expandCALL_RVMARKER(
775 "invalid operand for regular call");
776 assert(RVTarget.
isGlobal() &&
"invalid operand for attached call");
777 unsigned Opc = CallTarget.
isGlobal() ? AArch64::BL : AArch64::BLR;
781 unsigned RegMaskStartIdx = 2;
784 while (!
MI.getOperand(RegMaskStartIdx).isRegMask()) {
785 auto MOP =
MI.getOperand(RegMaskStartIdx);
786 assert(MOP.isReg() &&
"can only add register operands");
788 MOP.getReg(),
false,
true));
805 if (
MI.shouldUpdateCallSiteInfo())
808 MI.eraseFromParent();
810 std::next(RVCall->getIterator()));
825 "invalid operand for regular call");
826 unsigned Opc = CallTarget.
isGlobal() ? AArch64::BL : AArch64::BLR;
829 Call->addOperand(CallTarget);
838 if (
MI.shouldUpdateCallSiteInfo())
841 MI.eraseFromParent();
846bool AArch64ExpandPseudo::expandStoreSwiftAsyncContext(
854 if (STI.getTargetTriple().getArchName() !=
"arm64e") {
871 unsigned Opc =
Offset >= 0 ? AArch64::ADDXri : AArch64::SUBXri;
908 MI.getParent()->successors().begin() !=
909 MI.getParent()->successors().end()) &&
910 "Unexpected unreachable in block that restores ZA");
915 .
add(
MI.getOperand(0));
937 for (
unsigned I = 2;
I <
MI.getNumOperands(); ++
I)
938 MIB.
add(
MI.getOperand(
I));
941 MI.eraseFromParent();
955 MI.getParent()->successors().begin() ==
956 MI.getParent()->successors().end()) {
957 MI.eraseFromParent();
999 auto PStateSM =
MI.getOperand(2).getReg();
1000 bool IsStreamingCallee =
MI.getOperand(3).getImm();
1001 unsigned Opc = IsStreamingCallee ? AArch64::TBZX : AArch64::TBNZX;
1023 TII->get(AArch64::MSRpstatesvcrImm1));
1027 MIB.
add(
MI.getOperand(0));
1028 MIB.
add(
MI.getOperand(1));
1029 for (
unsigned i = 4; i <
MI.getNumOperands(); ++i)
1030 MIB.
add(
MI.getOperand(i));
1034 MI.eraseFromParent();
1044 unsigned Opcode =
MI.getOpcode();
1048 if (OrigInstr != -1) {
1049 auto &Orig =
TII->get(OrigInstr);
1052 return expand_DestructiveOp(
MI,
MBB,
MBBI);
1060 case AArch64::BSPv8i8:
1061 case AArch64::BSPv16i8: {
1063 if (DstReg ==
MI.getOperand(3).getReg()) {
1066 TII->get(Opcode == AArch64::BSPv8i8 ? AArch64::BITv8i8
1067 : AArch64::BITv16i8))
1068 .
add(
MI.getOperand(0))
1069 .
add(
MI.getOperand(3))
1070 .
add(
MI.getOperand(2))
1071 .
add(
MI.getOperand(1));
1072 }
else if (DstReg ==
MI.getOperand(2).getReg()) {
1075 TII->get(Opcode == AArch64::BSPv8i8 ? AArch64::BIFv8i8
1076 : AArch64::BIFv16i8))
1077 .
add(
MI.getOperand(0))
1078 .
add(
MI.getOperand(2))
1079 .
add(
MI.getOperand(3))
1080 .
add(
MI.getOperand(1));
1083 if (DstReg ==
MI.getOperand(1).getReg()) {
1085 TII->get(Opcode == AArch64::BSPv8i8 ? AArch64::BSLv8i8
1086 : AArch64::BSLv16i8))
1087 .
add(
MI.getOperand(0))
1088 .
add(
MI.getOperand(1))
1089 .
add(
MI.getOperand(2))
1090 .
add(
MI.getOperand(3));
1093 TII->get(Opcode == AArch64::BSPv8i8 ? AArch64::ORRv8i8
1094 : AArch64::ORRv16i8))
1098 .
add(
MI.getOperand(1))
1099 .
add(
MI.getOperand(1));
1101 TII->get(Opcode == AArch64::BSPv8i8 ? AArch64::BSLv8i8
1102 : AArch64::BSLv16i8))
1103 .
add(
MI.getOperand(0))
1107 .
add(
MI.getOperand(2))
1108 .
add(
MI.getOperand(3));
1111 MI.eraseFromParent();
1115 case AArch64::ADDWrr:
1116 case AArch64::SUBWrr:
1117 case AArch64::ADDXrr:
1118 case AArch64::SUBXrr:
1119 case AArch64::ADDSWrr:
1120 case AArch64::SUBSWrr:
1121 case AArch64::ADDSXrr:
1122 case AArch64::SUBSXrr:
1123 case AArch64::ANDWrr:
1124 case AArch64::ANDXrr:
1125 case AArch64::BICWrr:
1126 case AArch64::BICXrr:
1127 case AArch64::ANDSWrr:
1128 case AArch64::ANDSXrr:
1129 case AArch64::BICSWrr:
1130 case AArch64::BICSXrr:
1131 case AArch64::EONWrr:
1132 case AArch64::EONXrr:
1133 case AArch64::EORWrr:
1134 case AArch64::EORXrr:
1135 case AArch64::ORNWrr:
1136 case AArch64::ORNXrr:
1137 case AArch64::ORRWrr:
1138 case AArch64::ORRXrr: {
1140 switch (
MI.getOpcode()) {
1143 case AArch64::ADDWrr: Opcode = AArch64::ADDWrs;
break;
1144 case AArch64::SUBWrr: Opcode = AArch64::SUBWrs;
break;
1145 case AArch64::ADDXrr: Opcode = AArch64::ADDXrs;
break;
1146 case AArch64::SUBXrr: Opcode = AArch64::SUBXrs;
break;
1147 case AArch64::ADDSWrr: Opcode = AArch64::ADDSWrs;
break;
1148 case AArch64::SUBSWrr: Opcode = AArch64::SUBSWrs;
break;
1149 case AArch64::ADDSXrr: Opcode = AArch64::ADDSXrs;
break;
1150 case AArch64::SUBSXrr: Opcode = AArch64::SUBSXrs;
break;
1151 case AArch64::ANDWrr: Opcode = AArch64::ANDWrs;
break;
1152 case AArch64::ANDXrr: Opcode = AArch64::ANDXrs;
break;
1153 case AArch64::BICWrr: Opcode = AArch64::BICWrs;
break;
1154 case AArch64::BICXrr: Opcode = AArch64::BICXrs;
break;
1155 case AArch64::ANDSWrr: Opcode = AArch64::ANDSWrs;
break;
1156 case AArch64::ANDSXrr: Opcode = AArch64::ANDSXrs;
break;
1157 case AArch64::BICSWrr: Opcode = AArch64::BICSWrs;
break;
1158 case AArch64::BICSXrr: Opcode = AArch64::BICSXrs;
break;
1159 case AArch64::EONWrr: Opcode = AArch64::EONWrs;
break;
1160 case AArch64::EONXrr: Opcode = AArch64::EONXrs;
break;
1161 case AArch64::EORWrr: Opcode = AArch64::EORWrs;
break;
1162 case AArch64::EORXrr: Opcode = AArch64::EORXrs;
break;
1163 case AArch64::ORNWrr: Opcode = AArch64::ORNWrs;
break;
1164 case AArch64::ORNXrr: Opcode = AArch64::ORNXrs;
break;
1165 case AArch64::ORRWrr: Opcode = AArch64::ORRWrs;
break;
1166 case AArch64::ORRXrr: Opcode = AArch64::ORRXrs;
break;
1171 TII->get(Opcode),
MI.getDebugLoc(),
true);
1174 MIB1->setPCSections(MF,
MI.getPCSections());
1176 .add(
MI.getOperand(1))
1177 .add(
MI.getOperand(2))
1179 transferImpOps(
MI, MIB1, MIB1);
1180 if (
auto DebugNumber =
MI.peekDebugInstrNum())
1182 MI.eraseFromParent();
1186 case AArch64::LOADgot: {
1195 TII->get(AArch64::LDRXl), DstReg);
1203 "Only expect globals, externalsymbols, or constant pools");
1216 unsigned Reg32 =
TRI->getSubReg(DstReg, AArch64::sub_32);
1217 unsigned DstFlags =
MI.getOperand(0).getTargetFlags();
1225 .
add(
MI.getOperand(0))
1240 "Only expect globals, externalsymbols, or constant pools");
1248 transferImpOps(
MI, MIB1, MIB2);
1250 MI.eraseFromParent();
1253 case AArch64::MOVaddrBA: {
1260 assert(
MI.getOperand(1).getOffset() == 0 &&
"unexpected offset");
1270 TII->get(AArch64::LDRXui), DstReg)
1274 transferImpOps(
MI, MIB1, MIB2);
1275 MI.eraseFromParent();
1280 case AArch64::MOVaddr:
1281 case AArch64::MOVaddrJT:
1282 case AArch64::MOVaddrCP:
1283 case AArch64::MOVaddrTLS:
1284 case AArch64::MOVaddrEXT: {
1287 assert(DstReg != AArch64::XZR);
1290 .
add(
MI.getOperand(1));
1300 auto Tag =
MI.getOperand(1);
1302 Tag.setOffset(0x100000000);
1311 .
add(
MI.getOperand(0))
1313 .
add(
MI.getOperand(2))
1316 transferImpOps(
MI, MIB1, MIB2);
1317 MI.eraseFromParent();
1320 case AArch64::ADDlowTLS:
1323 .
add(
MI.getOperand(0))
1324 .
add(
MI.getOperand(1))
1325 .
add(
MI.getOperand(2))
1327 MI.eraseFromParent();
1330 case AArch64::MOVbaseTLS: {
1332 auto SysReg = AArch64SysReg::TPIDR_EL0;
1335 SysReg = AArch64SysReg::TPIDR_EL3;
1337 SysReg = AArch64SysReg::TPIDR_EL2;
1339 SysReg = AArch64SysReg::TPIDR_EL1;
1342 MI.eraseFromParent();
1346 case AArch64::MOVi32imm:
1348 case AArch64::MOVi64imm:
1350 case AArch64::RET_ReallyLR: {
1359 transferImpOps(
MI, MIB, MIB);
1360 MI.eraseFromParent();
1363 case AArch64::CMP_SWAP_8:
1364 return expandCMP_SWAP(
MBB,
MBBI, AArch64::LDAXRB, AArch64::STLXRB,
1367 AArch64::WZR, NextMBBI);
1368 case AArch64::CMP_SWAP_16:
1369 return expandCMP_SWAP(
MBB,
MBBI, AArch64::LDAXRH, AArch64::STLXRH,
1372 AArch64::WZR, NextMBBI);
1373 case AArch64::CMP_SWAP_32:
1374 return expandCMP_SWAP(
MBB,
MBBI, AArch64::LDAXRW, AArch64::STLXRW,
1377 AArch64::WZR, NextMBBI);
1378 case AArch64::CMP_SWAP_64:
1379 return expandCMP_SWAP(
MBB,
MBBI,
1380 AArch64::LDAXRX, AArch64::STLXRX, AArch64::SUBSXrs,
1382 AArch64::XZR, NextMBBI);
1383 case AArch64::CMP_SWAP_128:
1384 case AArch64::CMP_SWAP_128_RELEASE:
1385 case AArch64::CMP_SWAP_128_ACQUIRE:
1386 case AArch64::CMP_SWAP_128_MONOTONIC:
1387 return expandCMP_SWAP_128(
MBB,
MBBI, NextMBBI);
1389 case AArch64::AESMCrrTied:
1390 case AArch64::AESIMCrrTied: {
1393 TII->get(Opcode == AArch64::AESMCrrTied ? AArch64::AESMCrr :
1395 .
add(
MI.getOperand(0))
1396 .
add(
MI.getOperand(1));
1397 transferImpOps(
MI, MIB, MIB);
1398 MI.eraseFromParent();
1401 case AArch64::IRGstack: {
1412 StackOffset FrameRegOffset = TFI->resolveFrameOffsetReference(
1413 MF, BaseOffset,
false ,
false , FrameReg,
1417 if (FrameRegOffset) {
1419 SrcReg =
MI.getOperand(0).getReg();
1421 FrameRegOffset,
TII);
1424 .
add(
MI.getOperand(0))
1426 .
add(
MI.getOperand(2));
1427 MI.eraseFromParent();
1430 case AArch64::TAGPstack: {
1431 int64_t
Offset =
MI.getOperand(2).getImm();
1433 TII->get(
Offset >= 0 ? AArch64::ADDG : AArch64::SUBG))
1434 .
add(
MI.getOperand(0))
1435 .
add(
MI.getOperand(1))
1437 .
add(
MI.getOperand(4));
1438 MI.eraseFromParent();
1441 case AArch64::STGloop_wback:
1442 case AArch64::STZGloop_wback:
1443 return expandSetTagLoop(
MBB,
MBBI, NextMBBI);
1444 case AArch64::STGloop:
1445 case AArch64::STZGloop:
1447 "Non-writeback variants of STGloop / STZGloop should not "
1448 "survive past PrologEpilogInserter.");
1449 case AArch64::STR_ZZZZXI:
1450 return expandSVESpillFill(
MBB,
MBBI, AArch64::STR_ZXI, 4);
1451 case AArch64::STR_ZZZXI:
1452 return expandSVESpillFill(
MBB,
MBBI, AArch64::STR_ZXI, 3);
1453 case AArch64::STR_ZZXI:
1454 return expandSVESpillFill(
MBB,
MBBI, AArch64::STR_ZXI, 2);
1455 case AArch64::LDR_ZZZZXI:
1456 return expandSVESpillFill(
MBB,
MBBI, AArch64::LDR_ZXI, 4);
1457 case AArch64::LDR_ZZZXI:
1458 return expandSVESpillFill(
MBB,
MBBI, AArch64::LDR_ZXI, 3);
1459 case AArch64::LDR_ZZXI:
1460 return expandSVESpillFill(
MBB,
MBBI, AArch64::LDR_ZXI, 2);
1461 case AArch64::BLR_RVMARKER:
1462 return expandCALL_RVMARKER(
MBB,
MBBI);
1463 case AArch64::BLR_BTI:
1464 return expandCALL_BTI(
MBB,
MBBI);
1465 case AArch64::StoreSwiftAsyncContext:
1466 return expandStoreSwiftAsyncContext(
MBB,
MBBI);
1467 case AArch64::RestoreZAPseudo: {
1468 auto *NewMBB = expandRestoreZA(
MBB,
MBBI);
1473 case AArch64::MSRpstatePseudo: {
1474 auto *NewMBB = expandCondSMToggle(
MBB,
MBBI);
1479 case AArch64::OBSCURE_COPY: {
1480 if (
MI.getOperand(0).getReg() !=
MI.getOperand(1).getReg()) {
1482 .
add(
MI.getOperand(0))
1484 .
add(
MI.getOperand(1))
1487 MI.eraseFromParent();
1513 for (
auto &
MBB : MF)
1520 return new AArch64ExpandPseudo();
#define AARCH64_EXPAND_PSEUDO_NAME
MachineInstrBuilder & UseMI
MachineInstrBuilder MachineInstrBuilder & DefMI
SmallVector< AArch64_IMM::ImmInsnModel, 4 > Insn
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
MachineBasicBlock MachineBasicBlock::iterator MBBI
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
This file defines the DenseMap class.
const HexagonInstrInfo * TII
This file implements the LivePhysRegs utility for tracking liveness of physical registers.
This file declares the MachineConstantPool class which is an abstract constant pool to keep track of ...
unsigned const TargetRegisterInfo * TRI
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
AArch64FunctionInfo - This class is derived from MachineFunctionInfo and contains private AArch64-spe...
unsigned getTaggedBasePointerOffset() const
bool isTargetILP32() const
bool isTargetMachO() const
The address of a basic block.
FunctionPass class - This class is used to implement most global optimizations.
A set of physical registers with utility functions to track liveness when walking backward/forward th...
Describe properties that are true of each instruction in the target description file.
unsigned getNumOperands() const
Return the number of declared MachineOperands for this MachineInstruction.
instr_iterator insert(instr_iterator I, MachineInstr *M)
Insert MI into the instruction list before I, possibly inside a bundle.
const BasicBlock * getBasicBlock() const
Return the LLVM basic block that this instance corresponded to originally.
void addSuccessor(MachineBasicBlock *Succ, BranchProbability Prob=BranchProbability::getUnknown())
Add Succ as a successor of this MachineBasicBlock.
MachineBasicBlock * splitAt(MachineInstr &SplitInst, bool UpdateLiveIns=true, LiveIntervals *LIS=nullptr)
Split a basic block into 2 pieces at SplitPoint.
void eraseFromParent()
This method unlinks 'this' from the containing function and deletes it.
const MachineFunction * getParent() const
Return the MachineFunction containing this basic block.
iterator_range< succ_iterator > successors()
The MachineConstantPool class keeps track of constants referenced by a function which must be spilled...
unsigned getConstantPoolIndex(const Constant *C, Align Alignment)
getConstantPoolIndex - Create a new entry in the constant pool or return an existing one.
MachineFunctionPass - This class adapts the FunctionPass interface to allow convenient creation of pa...
virtual bool runOnMachineFunction(MachineFunction &MF)=0
runOnMachineFunction - This method must be overloaded to perform the desired machine code transformat...
MachineBasicBlock * CreateMachineBasicBlock(const BasicBlock *bb=nullptr)
CreateMachineBasicBlock - Allocate a new MachineBasicBlock.
const TargetSubtargetInfo & getSubtarget() const
getSubtarget - Return the subtarget for which this machine code is being compiled.
MachineInstr * CreateMachineInstr(const MCInstrDesc &MCID, DebugLoc DL, bool NoImplicit=false)
CreateMachineInstr - Allocate a new MachineInstr.
const LLVMTargetMachine & getTarget() const
getTarget - Return the target machine this machine code is compiled with
Ty * getInfo()
getInfo - Keep track of various per-function pieces of information for backends that would like to do...
MachineConstantPool * getConstantPool()
getConstantPool - Return the constant pool object for the current function.
void moveCallSiteInfo(const MachineInstr *Old, const MachineInstr *New)
Move the call site info from Old to \New call site info.
void insert(iterator MBBI, MachineBasicBlock *MBB)
const MachineInstrBuilder & addExternalSymbol(const char *FnName, unsigned TargetFlags=0) const
const MachineInstrBuilder & setMIFlag(MachineInstr::MIFlag Flag) const
const MachineInstrBuilder & addImm(int64_t Val) const
Add a new immediate operand.
const MachineInstrBuilder & add(const MachineOperand &MO) const
const MachineInstrBuilder & addConstantPoolIndex(unsigned Idx, int Offset=0, unsigned TargetFlags=0) const
const MachineInstrBuilder & addGlobalAddress(const GlobalValue *GV, int64_t Offset=0, unsigned TargetFlags=0) const
const MachineInstrBuilder & addReg(Register RegNo, unsigned flags=0, unsigned SubReg=0) const
Add a new virtual register operand.
const MachineInstrBuilder & addMBB(MachineBasicBlock *MBB, unsigned TargetFlags=0) const
const MachineInstrBuilder & cloneMemRefs(const MachineInstr &OtherMI) const
const MachineInstrBuilder & addUse(Register RegNo, unsigned Flags=0, unsigned SubReg=0) const
Add a virtual register use operand.
const MachineInstrBuilder & setMIFlags(unsigned Flags) const
MachineInstr * getInstr() const
If conversion operators fail, use this method to get the MachineInstr explicitly.
const MachineInstrBuilder & addDef(Register RegNo, unsigned Flags=0, unsigned SubReg=0) const
Add a virtual register definition operand.
Representation of each machine instruction.
void setDebugInstrNum(unsigned Num)
Set instruction number of this MachineInstr.
void addOperand(MachineFunction &MF, const MachineOperand &Op)
Add the specified operand to the instruction.
MachineOperand class - Representation of each machine instruction operand.
const GlobalValue * getGlobal() const
bool isReg() const
isReg - Tests if this is a MO_Register operand.
bool isCPI() const
isCPI - Tests if this is a MO_ConstantPoolIndex operand.
bool isSymbol() const
isSymbol - Tests if this is a MO_ExternalSymbol operand.
unsigned getTargetFlags() const
bool isGlobal() const
isGlobal - Tests if this is a MO_GlobalAddress operand.
const char * getSymbolName() const
Register getReg() const
getReg - Returns the register number.
static MachineOperand CreateReg(Register Reg, bool isDef, bool isImp=false, bool isKill=false, bool isDead=false, bool isUndef=false, bool isEarlyClobber=false, unsigned SubReg=0, bool isDebug=false, bool isInternalRead=false, bool isRenamable=false)
int64_t getOffset() const
Return the offset from the symbol in this operand.
static PassRegistry * getPassRegistry()
getPassRegistry - Access the global registry object, which is automatically initialized at applicatio...
virtual StringRef getPassName() const
getPassName - Return a nice clean name for a pass.
Wrapper class representing virtual and physical registers.
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
StackOffset holds a fixed and a scalable offset in bytes.
StringRef - Represent a constant reference to a string, i.e.
CodeModel::Model getCodeModel() const
Returns the code model.
TargetRegisterInfo base class - We assume that the target defines a static array of TargetRegisterDes...
virtual const TargetRegisterInfo * getRegisterInfo() const
getRegisterInfo - If register information is available, return it.
virtual const TargetInstrInfo * getInstrInfo() const
self_iterator getIterator()
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
@ MO_NC
MO_NC - Indicates whether the linker is expected to check the symbol reference for overflow.
@ MO_PAGEOFF
MO_PAGEOFF - A symbol operand with this flag represents the offset of that symbol within a 4K page.
@ MO_PREL
MO_PREL - Indicates that the bits of the symbol operand represented by MO_G0 etc are PC relative.
@ MO_PAGE
MO_PAGE - A symbol operand with this flag represents the pc-relative offset of the 4K page containing...
@ MO_TAGGED
MO_TAGGED - With MO_PAGE, indicates that the page includes a memory tag in bits 56-63.
@ MO_G3
MO_G3 - A symbol operand with this flag (granule 3) represents the high 16-bits of a 64-bit address,...
static unsigned getArithExtendImm(AArch64_AM::ShiftExtendType ET, unsigned Imm)
getArithExtendImm - Encode the extend type and shift amount for an arithmetic instruction: imm: 3-bit...
static unsigned getShifterImm(AArch64_AM::ShiftExtendType ST, unsigned Imm)
getShifterImm - Encode the shift type and amount: imm: 6-bit shift amount shifter: 000 ==> lsl 001 ==...
void expandMOVImm(uint64_t Imm, unsigned BitSize, SmallVectorImpl< ImmInsnModel > &Insn)
Expand a MOVi32imm or MOVi64imm pseudo instruction to one or more real move-immediate instructions to...
int getSVERevInstr(uint16_t Opcode)
@ DestructiveInstTypeMask
@ DestructiveUnaryPassthru
@ DestructiveTernaryCommWithRev
@ DestructiveBinaryCommWithRev
int getSVEPseudoMap(uint16_t Opcode)
int getSVENonRevInstr(uint16_t Opcode)
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
@ Implicit
Not emitted register (e.g. carry, or temporary result).
@ Renamable
Register that may be renamed.
@ Define
Register definition.
@ Kill
The last use of a register.
@ Undef
Value of the register doesn't matter.
This is an optimization pass for GlobalISel generic memory operations.
auto drop_begin(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the first N elements excluded.
void finalizeBundle(MachineBasicBlock &MBB, MachineBasicBlock::instr_iterator FirstMI, MachineBasicBlock::instr_iterator LastMI)
finalizeBundle - Finalize a machine instruction bundle which includes a sequence of instructions star...
MachineInstrBuilder BuildMI(MachineFunction &MF, const MIMetadata &MIMD, const MCInstrDesc &MCID)
Builder interface. Specify how to create the initial instruction itself.
APFloat abs(APFloat X)
Returns the absolute value of the argument.
unsigned getDeadRegState(bool B)
void initializeAArch64ExpandPseudoPass(PassRegistry &)
FunctionPass * createAArch64ExpandPseudoPass()
Returns an instance of the pseudo instruction expansion pass.
void emitFrameOffset(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, const DebugLoc &DL, unsigned DestReg, unsigned SrcReg, StackOffset Offset, const TargetInstrInfo *TII, MachineInstr::MIFlag=MachineInstr::NoFlags, bool SetNZCV=false, bool NeedsWinCFI=false, bool *HasWinCFI=nullptr, bool EmitCFAOffset=false, StackOffset InitialOffset={}, unsigned FrameReg=AArch64::SP)
emitFrameOffset - Emit instructions as needed to set DestReg to SrcReg plus Offset.
void report_fatal_error(Error Err, bool gen_crash_diag=true)
Report a serious error, calling any installed error handler.
unsigned getKillRegState(bool B)
unsigned getRenamableRegState(bool B)
void computeAndAddLiveIns(LivePhysRegs &LiveRegs, MachineBasicBlock &MBB)
Convenience function combining computeLiveIns() and addLiveIns().
This struct is a compact representation of a valid (non-zero power of two) alignment.