45#define AARCH64_EXPAND_PSEUDO_NAME "AArch64 pseudo instruction expansion pass"
71 unsigned ContiguousOpc,
unsigned StridedOpc);
78 unsigned LdarOp,
unsigned StlrOp,
unsigned CmpOp,
79 unsigned ExtendImm,
unsigned ZeroReg,
103char AArch64ExpandPseudo::ID = 0;
115 assert(MO.isReg() && MO.getReg());
134 if (DstReg == AArch64::XZR || DstReg == AArch64::WZR) {
137 MI.eraseFromParent();
146 for (
auto I =
Insn.begin(), E =
Insn.end();
I != E; ++
I) {
147 bool LastItem = std::next(
I) == E;
152 case AArch64::ORRWri:
153 case AArch64::ORRXri:
156 .
add(
MI.getOperand(0))
157 .
addReg(BitSize == 32 ? AArch64::WZR : AArch64::XZR)
161 bool DstIsDead =
MI.getOperand(0).isDead();
171 case AArch64::ORRWrs:
172 case AArch64::ORRXrs: {
174 bool DstIsDead =
MI.getOperand(0).isDead();
184 case AArch64::ANDXri:
185 case AArch64::EORXri:
188 .
add(
MI.getOperand(0))
189 .
addReg(BitSize == 32 ? AArch64::WZR : AArch64::XZR)
193 bool DstIsDead =
MI.getOperand(0).isDead();
203 case AArch64::MOVNWi:
204 case AArch64::MOVNXi:
205 case AArch64::MOVZWi:
206 case AArch64::MOVZXi: {
207 bool DstIsDead =
MI.getOperand(0).isDead();
215 case AArch64::MOVKWi:
216 case AArch64::MOVKXi: {
218 bool DstIsDead =
MI.getOperand(0).isDead();
231 MI.eraseFromParent();
235bool AArch64ExpandPseudo::expandCMP_SWAP(
237 unsigned StlrOp,
unsigned CmpOp,
unsigned ExtendImm,
unsigned ZeroReg,
242 Register StatusReg =
MI.getOperand(1).getReg();
243 bool StatusDead =
MI.getOperand(1).isDead();
246 assert(!
MI.getOperand(2).isUndef() &&
"cannot handle undef");
248 Register DesiredReg =
MI.getOperand(3).getReg();
257 MF->
insert(++LoadCmpBB->getIterator(), StoreBB);
258 MF->
insert(++StoreBB->getIterator(), DoneBB);
266 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::MOVZWi), StatusReg)
270 BuildMI(LoadCmpBB, MIMD,
TII->get(CmpOp), ZeroReg)
274 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::Bcc))
278 LoadCmpBB->addSuccessor(DoneBB);
279 LoadCmpBB->addSuccessor(StoreBB);
284 BuildMI(StoreBB, MIMD,
TII->get(StlrOp), StatusReg)
287 BuildMI(StoreBB, MIMD,
TII->get(AArch64::CBNZW))
290 StoreBB->addSuccessor(LoadCmpBB);
291 StoreBB->addSuccessor(DoneBB);
293 DoneBB->splice(DoneBB->end(), &
MBB,
MI,
MBB.
end());
294 DoneBB->transferSuccessors(&
MBB);
299 MI.eraseFromParent();
307 StoreBB->clearLiveIns();
309 LoadCmpBB->clearLiveIns();
315bool AArch64ExpandPseudo::expandCMP_SWAP_128(
322 Register StatusReg =
MI.getOperand(2).getReg();
323 bool StatusDead =
MI.getOperand(2).isDead();
326 assert(!
MI.getOperand(3).isUndef() &&
"cannot handle undef");
328 Register DesiredLoReg =
MI.getOperand(4).getReg();
329 Register DesiredHiReg =
MI.getOperand(5).getReg();
330 Register NewLoReg =
MI.getOperand(6).getReg();
331 Register NewHiReg =
MI.getOperand(7).getReg();
333 unsigned LdxpOp, StxpOp;
335 switch (
MI.getOpcode()) {
336 case AArch64::CMP_SWAP_128_MONOTONIC:
337 LdxpOp = AArch64::LDXPX;
338 StxpOp = AArch64::STXPX;
340 case AArch64::CMP_SWAP_128_RELEASE:
341 LdxpOp = AArch64::LDXPX;
342 StxpOp = AArch64::STLXPX;
344 case AArch64::CMP_SWAP_128_ACQUIRE:
345 LdxpOp = AArch64::LDAXPX;
346 StxpOp = AArch64::STXPX;
348 case AArch64::CMP_SWAP_128:
349 LdxpOp = AArch64::LDAXPX;
350 StxpOp = AArch64::STLXPX;
363 MF->
insert(++LoadCmpBB->getIterator(), StoreBB);
364 MF->
insert(++StoreBB->getIterator(), FailBB);
365 MF->
insert(++FailBB->getIterator(), DoneBB);
376 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::SUBSXrs), AArch64::XZR)
380 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::CSINCWr), StatusReg)
384 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::SUBSXrs), AArch64::XZR)
388 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::CSINCWr), StatusReg)
392 BuildMI(LoadCmpBB, MIMD,
TII->get(AArch64::CBNZW))
395 LoadCmpBB->addSuccessor(FailBB);
396 LoadCmpBB->addSuccessor(StoreBB);
401 BuildMI(StoreBB, MIMD,
TII->get(StxpOp), StatusReg)
405 BuildMI(StoreBB, MIMD,
TII->get(AArch64::CBNZW))
409 StoreBB->addSuccessor(LoadCmpBB);
410 StoreBB->addSuccessor(DoneBB);
415 BuildMI(FailBB, MIMD,
TII->get(StxpOp), StatusReg)
419 BuildMI(FailBB, MIMD,
TII->get(AArch64::CBNZW))
422 FailBB->addSuccessor(LoadCmpBB);
423 FailBB->addSuccessor(DoneBB);
425 DoneBB->splice(DoneBB->end(), &
MBB,
MI,
MBB.
end());
426 DoneBB->transferSuccessors(&
MBB);
431 MI.eraseFromParent();
441 FailBB->clearLiveIns();
443 StoreBB->clearLiveIns();
445 LoadCmpBB->clearLiveIns();
489bool AArch64ExpandPseudo::expand_DestructiveOp(
498 bool DstIsDead =
MI.getOperand(0).isDead();
500 unsigned PredIdx, DOPIdx, SrcIdx, Src2Idx;
505 if (DstReg ==
MI.getOperand(3).getReg()) {
507 std::tie(PredIdx, DOPIdx, SrcIdx) = std::make_tuple(1, 3, 2);
514 std::tie(PredIdx, DOPIdx, SrcIdx) = std::make_tuple(1, 2, 3);
517 std::tie(PredIdx, DOPIdx, SrcIdx) = std::make_tuple(2, 3, 3);
520 std::tie(PredIdx, DOPIdx, SrcIdx, Src2Idx) = std::make_tuple(1, 2, 3, 4);
521 if (DstReg ==
MI.getOperand(3).getReg()) {
523 std::tie(PredIdx, DOPIdx, SrcIdx, Src2Idx) = std::make_tuple(1, 3, 4, 2);
525 }
else if (DstReg ==
MI.getOperand(4).getReg()) {
527 std::tie(PredIdx, DOPIdx, SrcIdx, Src2Idx) = std::make_tuple(1, 4, 3, 2);
538 bool DOPRegIsUnique =
false;
541 DOPRegIsUnique = DstReg !=
MI.getOperand(SrcIdx).getReg();
546 DstReg !=
MI.getOperand(DOPIdx).getReg() ||
547 MI.getOperand(DOPIdx).getReg() !=
MI.getOperand(SrcIdx).getReg();
551 DOPRegIsUnique =
true;
555 DstReg !=
MI.getOperand(DOPIdx).getReg() ||
556 (
MI.getOperand(DOPIdx).getReg() !=
MI.getOperand(SrcIdx).getReg() &&
557 MI.getOperand(DOPIdx).getReg() !=
MI.getOperand(Src2Idx).getReg());
573 uint64_t ElementSize =
TII->getElementSizeForOpcode(Opcode);
574 unsigned MovPrfx, LSLZero, MovPrfxZero;
575 switch (ElementSize) {
578 MovPrfx = AArch64::MOVPRFX_ZZ;
579 LSLZero = AArch64::LSL_ZPmI_B;
580 MovPrfxZero = AArch64::MOVPRFX_ZPzZ_B;
583 MovPrfx = AArch64::MOVPRFX_ZZ;
584 LSLZero = AArch64::LSL_ZPmI_H;
585 MovPrfxZero = AArch64::MOVPRFX_ZPzZ_H;
588 MovPrfx = AArch64::MOVPRFX_ZZ;
589 LSLZero = AArch64::LSL_ZPmI_S;
590 MovPrfxZero = AArch64::MOVPRFX_ZPzZ_S;
593 MovPrfx = AArch64::MOVPRFX_ZZ;
594 LSLZero = AArch64::LSL_ZPmI_D;
595 MovPrfxZero = AArch64::MOVPRFX_ZPzZ_D;
611 "The destructive operand should be unique");
613 "This instruction is unpredicated");
618 .
addReg(
MI.getOperand(PredIdx).getReg())
619 .
addReg(
MI.getOperand(DOPIdx).getReg());
633 .
add(
MI.getOperand(PredIdx))
637 }
else if (DstReg !=
MI.getOperand(DOPIdx).getReg()) {
638 assert(DOPRegIsUnique &&
"The destructive operand should be unique");
641 .
addReg(
MI.getOperand(DOPIdx).getReg());
654 .
add(
MI.getOperand(PredIdx))
655 .
add(
MI.getOperand(SrcIdx));
661 DOP.
add(
MI.getOperand(PredIdx))
663 .
add(
MI.getOperand(SrcIdx));
666 DOP.
add(
MI.getOperand(PredIdx))
668 .
add(
MI.getOperand(SrcIdx))
669 .
add(
MI.getOperand(Src2Idx));
675 transferImpOps(
MI, PRFX, DOP);
677 transferImpOps(
MI, DOP, DOP);
679 MI.eraseFromParent();
683bool AArch64ExpandPseudo::expandSetTagLoop(
689 Register AddressReg =
MI.getOperand(1).getReg();
693 bool ZeroData =
MI.getOpcode() == AArch64::STZGloop_wback;
694 const unsigned OpCode1 =
695 ZeroData ? AArch64::STZGPostIndex : AArch64::STGPostIndex;
696 const unsigned OpCode2 =
697 ZeroData ? AArch64::STZ2GPostIndex : AArch64::ST2GPostIndex;
699 unsigned Size =
MI.getOperand(2).getImm();
701 if (
Size % (16 * 2) != 0) {
717 MF->
insert(++LoopBB->getIterator(), DoneBB);
736 LoopBB->addSuccessor(LoopBB);
737 LoopBB->addSuccessor(DoneBB);
739 DoneBB->splice(DoneBB->end(), &
MBB,
MI,
MBB.
end());
740 DoneBB->transferSuccessors(&
MBB);
745 MI.eraseFromParent();
752 LoopBB->clearLiveIns();
754 DoneBB->clearLiveIns();
762 unsigned Opc,
unsigned N) {
763 assert((Opc == AArch64::LDR_ZXI || Opc == AArch64::STR_ZXI ||
764 Opc == AArch64::LDR_PXI || Opc == AArch64::STR_PXI) &&
765 "Unexpected opcode");
766 unsigned RState = (Opc == AArch64::LDR_ZXI || Opc == AArch64::LDR_PXI)
769 unsigned sub0 = (Opc == AArch64::LDR_ZXI || Opc == AArch64::STR_ZXI)
776 int ImmOffset =
MI.getOperand(2).getImm() +
Offset;
777 bool Kill = (
Offset + 1 ==
N) ?
MI.getOperand(1).isKill() :
false;
778 assert(ImmOffset >= -256 && ImmOffset < 256 &&
779 "Immediate spill offset out of range");
786 MI.eraseFromParent();
797 unsigned RegMaskStartIdx) {
806 while (!
MBBI->getOperand(RegMaskStartIdx).isRegMask()) {
808 assert(MOP.
isReg() &&
"can only add register operands");
810 MOP.
getReg(),
false,
true,
false,
816 Call->addOperand(MO);
827 unsigned RegMaskStartIdx) {
828 unsigned Opc = CallTarget.
isGlobal() ? AArch64::BL : AArch64::BLR;
831 "invalid operand for regular call");
835bool AArch64ExpandPseudo::expandCALL_RVMARKER(
844 assert(RVTarget.
isGlobal() &&
"invalid operand for attached call");
848 if (
MI.getOpcode() == AArch64::BLRA_RVMARKER) {
857 "Invalid auth call key");
864 assert(
MI.getOpcode() == AArch64::BLR_RVMARKER &&
"unknown rvmarker MI");
880 if (
MI.shouldUpdateCallSiteInfo())
883 MI.eraseFromParent();
885 std::next(RVCall->getIterator()));
909 if (
MI.shouldUpdateCallSiteInfo())
912 MI.eraseFromParent();
917bool AArch64ExpandPseudo::expandStoreSwiftAsyncContext(
925 if (STI.getTargetTriple().getArchName() !=
"arm64e") {
942 unsigned Opc =
Offset >= 0 ? AArch64::ADDXri : AArch64::SUBXri;
979 MI.getParent()->successors().begin() !=
980 MI.getParent()->successors().end()) &&
981 "Unexpected unreachable in block that restores ZA");
986 .
add(
MI.getOperand(0));
1008 for (
unsigned I = 2;
I <
MI.getNumOperands(); ++
I)
1009 MIB.
add(
MI.getOperand(
I));
1012 MI.eraseFromParent();
1026 MI.getParent()->successors().begin() ==
1027 MI.getParent()->successors().end()) {
1028 MI.eraseFromParent();
1071 switch (
MI.getOperand(2).getImm()) {
1075 Opc = AArch64::TBNZW;
1078 Opc = AArch64::TBZW;
1081 auto PStateSM =
MI.getOperand(3).getReg();
1083 unsigned SMReg32 =
TRI->getSubReg(PStateSM, AArch64::sub_32);
1105 TII->get(AArch64::MSRpstatesvcrImm1));
1109 MIB.
add(
MI.getOperand(0));
1110 MIB.
add(
MI.getOperand(1));
1111 for (
unsigned i = 4; i <
MI.getNumOperands(); ++i)
1112 MIB.
add(
MI.getOperand(i));
1116 MI.eraseFromParent();
1120bool AArch64ExpandPseudo::expandMultiVecPseudo(
1123 unsigned ContiguousOp,
unsigned StridedOpc) {
1138 .
add(
MI.getOperand(0))
1139 .
add(
MI.getOperand(1))
1140 .
add(
MI.getOperand(2))
1141 .
add(
MI.getOperand(3));
1142 transferImpOps(
MI, MIB, MIB);
1143 MI.eraseFromParent();
1153 unsigned Opcode =
MI.getOpcode();
1157 if (OrigInstr != -1) {
1158 auto &Orig =
TII->get(OrigInstr);
1161 return expand_DestructiveOp(
MI,
MBB,
MBBI);
1169 case AArch64::BSPv8i8:
1170 case AArch64::BSPv16i8: {
1172 if (DstReg ==
MI.getOperand(3).getReg()) {
1175 TII->get(Opcode == AArch64::BSPv8i8 ? AArch64::BITv8i8
1176 : AArch64::BITv16i8))
1177 .
add(
MI.getOperand(0))
1178 .
add(
MI.getOperand(3))
1179 .
add(
MI.getOperand(2))
1180 .
add(
MI.getOperand(1));
1181 }
else if (DstReg ==
MI.getOperand(2).getReg()) {
1184 TII->get(Opcode == AArch64::BSPv8i8 ? AArch64::BIFv8i8
1185 : AArch64::BIFv16i8))
1186 .
add(
MI.getOperand(0))
1187 .
add(
MI.getOperand(2))
1188 .
add(
MI.getOperand(3))
1189 .
add(
MI.getOperand(1));
1192 if (DstReg ==
MI.getOperand(1).getReg()) {
1194 TII->get(Opcode == AArch64::BSPv8i8 ? AArch64::BSLv8i8
1195 : AArch64::BSLv16i8))
1196 .
add(
MI.getOperand(0))
1197 .
add(
MI.getOperand(1))
1198 .
add(
MI.getOperand(2))
1199 .
add(
MI.getOperand(3));
1202 TII->get(Opcode == AArch64::BSPv8i8 ? AArch64::ORRv8i8
1203 : AArch64::ORRv16i8))
1207 .
add(
MI.getOperand(1))
1208 .
add(
MI.getOperand(1));
1210 TII->get(Opcode == AArch64::BSPv8i8 ? AArch64::BSLv8i8
1211 : AArch64::BSLv16i8))
1212 .
add(
MI.getOperand(0))
1216 .
add(
MI.getOperand(2))
1217 .
add(
MI.getOperand(3));
1220 MI.eraseFromParent();
1224 case AArch64::ADDWrr:
1225 case AArch64::SUBWrr:
1226 case AArch64::ADDXrr:
1227 case AArch64::SUBXrr:
1228 case AArch64::ADDSWrr:
1229 case AArch64::SUBSWrr:
1230 case AArch64::ADDSXrr:
1231 case AArch64::SUBSXrr:
1232 case AArch64::ANDWrr:
1233 case AArch64::ANDXrr:
1234 case AArch64::BICWrr:
1235 case AArch64::BICXrr:
1236 case AArch64::ANDSWrr:
1237 case AArch64::ANDSXrr:
1238 case AArch64::BICSWrr:
1239 case AArch64::BICSXrr:
1240 case AArch64::EONWrr:
1241 case AArch64::EONXrr:
1242 case AArch64::EORWrr:
1243 case AArch64::EORXrr:
1244 case AArch64::ORNWrr:
1245 case AArch64::ORNXrr:
1246 case AArch64::ORRWrr:
1247 case AArch64::ORRXrr: {
1249 switch (
MI.getOpcode()) {
1252 case AArch64::ADDWrr: Opcode = AArch64::ADDWrs;
break;
1253 case AArch64::SUBWrr: Opcode = AArch64::SUBWrs;
break;
1254 case AArch64::ADDXrr: Opcode = AArch64::ADDXrs;
break;
1255 case AArch64::SUBXrr: Opcode = AArch64::SUBXrs;
break;
1256 case AArch64::ADDSWrr: Opcode = AArch64::ADDSWrs;
break;
1257 case AArch64::SUBSWrr: Opcode = AArch64::SUBSWrs;
break;
1258 case AArch64::ADDSXrr: Opcode = AArch64::ADDSXrs;
break;
1259 case AArch64::SUBSXrr: Opcode = AArch64::SUBSXrs;
break;
1260 case AArch64::ANDWrr: Opcode = AArch64::ANDWrs;
break;
1261 case AArch64::ANDXrr: Opcode = AArch64::ANDXrs;
break;
1262 case AArch64::BICWrr: Opcode = AArch64::BICWrs;
break;
1263 case AArch64::BICXrr: Opcode = AArch64::BICXrs;
break;
1264 case AArch64::ANDSWrr: Opcode = AArch64::ANDSWrs;
break;
1265 case AArch64::ANDSXrr: Opcode = AArch64::ANDSXrs;
break;
1266 case AArch64::BICSWrr: Opcode = AArch64::BICSWrs;
break;
1267 case AArch64::BICSXrr: Opcode = AArch64::BICSXrs;
break;
1268 case AArch64::EONWrr: Opcode = AArch64::EONWrs;
break;
1269 case AArch64::EONXrr: Opcode = AArch64::EONXrs;
break;
1270 case AArch64::EORWrr: Opcode = AArch64::EORWrs;
break;
1271 case AArch64::EORXrr: Opcode = AArch64::EORXrs;
break;
1272 case AArch64::ORNWrr: Opcode = AArch64::ORNWrs;
break;
1273 case AArch64::ORNXrr: Opcode = AArch64::ORNXrs;
break;
1274 case AArch64::ORRWrr: Opcode = AArch64::ORRWrs;
break;
1275 case AArch64::ORRXrr: Opcode = AArch64::ORRXrs;
break;
1280 TII->get(Opcode),
MI.getDebugLoc(),
true);
1283 MIB1->setPCSections(MF,
MI.getPCSections());
1285 .add(
MI.getOperand(1))
1286 .add(
MI.getOperand(2))
1288 transferImpOps(
MI, MIB1, MIB1);
1289 if (
auto DebugNumber =
MI.peekDebugInstrNum())
1291 MI.eraseFromParent();
1295 case AArch64::LOADgot: {
1304 TII->get(AArch64::LDRXl), DstReg);
1312 "Only expect globals, externalsymbols, or constant pools");
1325 unsigned Reg32 =
TRI->getSubReg(DstReg, AArch64::sub_32);
1326 unsigned DstFlags =
MI.getOperand(0).getTargetFlags();
1334 .
add(
MI.getOperand(0))
1349 "Only expect globals, externalsymbols, or constant pools");
1357 transferImpOps(
MI, MIB1, MIB2);
1359 MI.eraseFromParent();
1362 case AArch64::MOVaddrBA: {
1369 assert(
MI.getOperand(1).getOffset() == 0 &&
"unexpected offset");
1379 TII->get(AArch64::LDRXui), DstReg)
1383 transferImpOps(
MI, MIB1, MIB2);
1384 MI.eraseFromParent();
1389 case AArch64::MOVaddr:
1390 case AArch64::MOVaddrJT:
1391 case AArch64::MOVaddrCP:
1392 case AArch64::MOVaddrTLS:
1393 case AArch64::MOVaddrEXT: {
1396 assert(DstReg != AArch64::XZR);
1399 .
add(
MI.getOperand(1));
1409 auto Tag =
MI.getOperand(1);
1411 Tag.setOffset(0x100000000);
1420 .
add(
MI.getOperand(0))
1422 .
add(
MI.getOperand(2))
1425 transferImpOps(
MI, MIB1, MIB2);
1426 MI.eraseFromParent();
1429 case AArch64::ADDlowTLS:
1432 .
add(
MI.getOperand(0))
1433 .
add(
MI.getOperand(1))
1434 .
add(
MI.getOperand(2))
1436 MI.eraseFromParent();
1439 case AArch64::MOVbaseTLS: {
1441 auto SysReg = AArch64SysReg::TPIDR_EL0;
1444 SysReg = AArch64SysReg::TPIDR_EL3;
1446 SysReg = AArch64SysReg::TPIDR_EL2;
1448 SysReg = AArch64SysReg::TPIDR_EL1;
1450 SysReg = AArch64SysReg::TPIDRRO_EL0;
1453 MI.eraseFromParent();
1457 case AArch64::MOVi32imm:
1459 case AArch64::MOVi64imm:
1461 case AArch64::RET_ReallyLR: {
1470 transferImpOps(
MI, MIB, MIB);
1471 MI.eraseFromParent();
1474 case AArch64::CMP_SWAP_8:
1475 return expandCMP_SWAP(
MBB,
MBBI, AArch64::LDAXRB, AArch64::STLXRB,
1478 AArch64::WZR, NextMBBI);
1479 case AArch64::CMP_SWAP_16:
1480 return expandCMP_SWAP(
MBB,
MBBI, AArch64::LDAXRH, AArch64::STLXRH,
1483 AArch64::WZR, NextMBBI);
1484 case AArch64::CMP_SWAP_32:
1485 return expandCMP_SWAP(
MBB,
MBBI, AArch64::LDAXRW, AArch64::STLXRW,
1488 AArch64::WZR, NextMBBI);
1489 case AArch64::CMP_SWAP_64:
1490 return expandCMP_SWAP(
MBB,
MBBI,
1491 AArch64::LDAXRX, AArch64::STLXRX, AArch64::SUBSXrs,
1493 AArch64::XZR, NextMBBI);
1494 case AArch64::CMP_SWAP_128:
1495 case AArch64::CMP_SWAP_128_RELEASE:
1496 case AArch64::CMP_SWAP_128_ACQUIRE:
1497 case AArch64::CMP_SWAP_128_MONOTONIC:
1498 return expandCMP_SWAP_128(
MBB,
MBBI, NextMBBI);
1500 case AArch64::AESMCrrTied:
1501 case AArch64::AESIMCrrTied: {
1504 TII->get(Opcode == AArch64::AESMCrrTied ? AArch64::AESMCrr :
1506 .
add(
MI.getOperand(0))
1507 .
add(
MI.getOperand(1));
1508 transferImpOps(
MI, MIB, MIB);
1509 MI.eraseFromParent();
1512 case AArch64::IRGstack: {
1523 StackOffset FrameRegOffset = TFI->resolveFrameOffsetReference(
1524 MF, BaseOffset,
false ,
false , FrameReg,
1528 if (FrameRegOffset) {
1530 SrcReg =
MI.getOperand(0).getReg();
1532 FrameRegOffset,
TII);
1535 .
add(
MI.getOperand(0))
1537 .
add(
MI.getOperand(2));
1538 MI.eraseFromParent();
1541 case AArch64::TAGPstack: {
1542 int64_t
Offset =
MI.getOperand(2).getImm();
1544 TII->get(
Offset >= 0 ? AArch64::ADDG : AArch64::SUBG))
1545 .
add(
MI.getOperand(0))
1546 .
add(
MI.getOperand(1))
1548 .
add(
MI.getOperand(4));
1549 MI.eraseFromParent();
1552 case AArch64::STGloop_wback:
1553 case AArch64::STZGloop_wback:
1554 return expandSetTagLoop(
MBB,
MBBI, NextMBBI);
1555 case AArch64::STGloop:
1556 case AArch64::STZGloop:
1558 "Non-writeback variants of STGloop / STZGloop should not "
1559 "survive past PrologEpilogInserter.");
1560 case AArch64::STR_ZZZZXI:
1561 return expandSVESpillFill(
MBB,
MBBI, AArch64::STR_ZXI, 4);
1562 case AArch64::STR_ZZZXI:
1563 return expandSVESpillFill(
MBB,
MBBI, AArch64::STR_ZXI, 3);
1564 case AArch64::STR_ZZXI:
1565 return expandSVESpillFill(
MBB,
MBBI, AArch64::STR_ZXI, 2);
1566 case AArch64::STR_PPXI:
1567 return expandSVESpillFill(
MBB,
MBBI, AArch64::STR_PXI, 2);
1568 case AArch64::LDR_ZZZZXI:
1569 return expandSVESpillFill(
MBB,
MBBI, AArch64::LDR_ZXI, 4);
1570 case AArch64::LDR_ZZZXI:
1571 return expandSVESpillFill(
MBB,
MBBI, AArch64::LDR_ZXI, 3);
1572 case AArch64::LDR_ZZXI:
1573 return expandSVESpillFill(
MBB,
MBBI, AArch64::LDR_ZXI, 2);
1574 case AArch64::LDR_PPXI:
1575 return expandSVESpillFill(
MBB,
MBBI, AArch64::LDR_PXI, 2);
1576 case AArch64::BLR_RVMARKER:
1577 case AArch64::BLRA_RVMARKER:
1578 return expandCALL_RVMARKER(
MBB,
MBBI);
1579 case AArch64::BLR_BTI:
1580 return expandCALL_BTI(
MBB,
MBBI);
1581 case AArch64::StoreSwiftAsyncContext:
1582 return expandStoreSwiftAsyncContext(
MBB,
MBBI);
1583 case AArch64::RestoreZAPseudo: {
1584 auto *NewMBB = expandRestoreZA(
MBB,
MBBI);
1589 case AArch64::MSRpstatePseudo: {
1590 auto *NewMBB = expandCondSMToggle(
MBB,
MBBI);
1595 case AArch64::COALESCER_BARRIER_FPR16:
1596 case AArch64::COALESCER_BARRIER_FPR32:
1597 case AArch64::COALESCER_BARRIER_FPR64:
1598 case AArch64::COALESCER_BARRIER_FPR128:
1599 MI.eraseFromParent();
1601 case AArch64::LD1B_2Z_IMM_PSEUDO:
1602 return expandMultiVecPseudo(
1603 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1604 AArch64::LD1B_2Z_IMM, AArch64::LD1B_2Z_STRIDED_IMM);
1605 case AArch64::LD1H_2Z_IMM_PSEUDO:
1606 return expandMultiVecPseudo(
1607 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1608 AArch64::LD1H_2Z_IMM, AArch64::LD1H_2Z_STRIDED_IMM);
1609 case AArch64::LD1W_2Z_IMM_PSEUDO:
1610 return expandMultiVecPseudo(
1611 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1612 AArch64::LD1W_2Z_IMM, AArch64::LD1W_2Z_STRIDED_IMM);
1613 case AArch64::LD1D_2Z_IMM_PSEUDO:
1614 return expandMultiVecPseudo(
1615 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1616 AArch64::LD1D_2Z_IMM, AArch64::LD1D_2Z_STRIDED_IMM);
1617 case AArch64::LDNT1B_2Z_IMM_PSEUDO:
1618 return expandMultiVecPseudo(
1619 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1620 AArch64::LDNT1B_2Z_IMM, AArch64::LDNT1B_2Z_STRIDED_IMM);
1621 case AArch64::LDNT1H_2Z_IMM_PSEUDO:
1622 return expandMultiVecPseudo(
1623 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1624 AArch64::LDNT1H_2Z_IMM, AArch64::LDNT1H_2Z_STRIDED_IMM);
1625 case AArch64::LDNT1W_2Z_IMM_PSEUDO:
1626 return expandMultiVecPseudo(
1627 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1628 AArch64::LDNT1W_2Z_IMM, AArch64::LDNT1W_2Z_STRIDED_IMM);
1629 case AArch64::LDNT1D_2Z_IMM_PSEUDO:
1630 return expandMultiVecPseudo(
1631 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1632 AArch64::LDNT1D_2Z_IMM, AArch64::LDNT1D_2Z_STRIDED_IMM);
1633 case AArch64::LD1B_2Z_PSEUDO:
1634 return expandMultiVecPseudo(
MBB,
MBBI, AArch64::ZPR2RegClass,
1635 AArch64::ZPR2StridedRegClass, AArch64::LD1B_2Z,
1636 AArch64::LD1B_2Z_STRIDED);
1637 case AArch64::LD1H_2Z_PSEUDO:
1638 return expandMultiVecPseudo(
MBB,
MBBI, AArch64::ZPR2RegClass,
1639 AArch64::ZPR2StridedRegClass, AArch64::LD1H_2Z,
1640 AArch64::LD1H_2Z_STRIDED);
1641 case AArch64::LD1W_2Z_PSEUDO:
1642 return expandMultiVecPseudo(
MBB,
MBBI, AArch64::ZPR2RegClass,
1643 AArch64::ZPR2StridedRegClass, AArch64::LD1W_2Z,
1644 AArch64::LD1W_2Z_STRIDED);
1645 case AArch64::LD1D_2Z_PSEUDO:
1646 return expandMultiVecPseudo(
MBB,
MBBI, AArch64::ZPR2RegClass,
1647 AArch64::ZPR2StridedRegClass, AArch64::LD1D_2Z,
1648 AArch64::LD1D_2Z_STRIDED);
1649 case AArch64::LDNT1B_2Z_PSEUDO:
1650 return expandMultiVecPseudo(
1651 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1652 AArch64::LDNT1B_2Z, AArch64::LDNT1B_2Z_STRIDED);
1653 case AArch64::LDNT1H_2Z_PSEUDO:
1654 return expandMultiVecPseudo(
1655 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1656 AArch64::LDNT1H_2Z, AArch64::LDNT1H_2Z_STRIDED);
1657 case AArch64::LDNT1W_2Z_PSEUDO:
1658 return expandMultiVecPseudo(
1659 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1660 AArch64::LDNT1W_2Z, AArch64::LDNT1W_2Z_STRIDED);
1661 case AArch64::LDNT1D_2Z_PSEUDO:
1662 return expandMultiVecPseudo(
1663 MBB,
MBBI, AArch64::ZPR2RegClass, AArch64::ZPR2StridedRegClass,
1664 AArch64::LDNT1D_2Z, AArch64::LDNT1D_2Z_STRIDED);
1665 case AArch64::LD1B_4Z_IMM_PSEUDO:
1666 return expandMultiVecPseudo(
1667 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1668 AArch64::LD1B_4Z_IMM, AArch64::LD1B_4Z_STRIDED_IMM);
1669 case AArch64::LD1H_4Z_IMM_PSEUDO:
1670 return expandMultiVecPseudo(
1671 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1672 AArch64::LD1H_4Z_IMM, AArch64::LD1H_4Z_STRIDED_IMM);
1673 case AArch64::LD1W_4Z_IMM_PSEUDO:
1674 return expandMultiVecPseudo(
1675 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1676 AArch64::LD1W_4Z_IMM, AArch64::LD1W_4Z_STRIDED_IMM);
1677 case AArch64::LD1D_4Z_IMM_PSEUDO:
1678 return expandMultiVecPseudo(
1679 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1680 AArch64::LD1D_4Z_IMM, AArch64::LD1D_4Z_STRIDED_IMM);
1681 case AArch64::LDNT1B_4Z_IMM_PSEUDO:
1682 return expandMultiVecPseudo(
1683 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1684 AArch64::LDNT1B_4Z_IMM, AArch64::LDNT1B_4Z_STRIDED_IMM);
1685 case AArch64::LDNT1H_4Z_IMM_PSEUDO:
1686 return expandMultiVecPseudo(
1687 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1688 AArch64::LDNT1H_4Z_IMM, AArch64::LDNT1H_4Z_STRIDED_IMM);
1689 case AArch64::LDNT1W_4Z_IMM_PSEUDO:
1690 return expandMultiVecPseudo(
1691 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1692 AArch64::LDNT1W_4Z_IMM, AArch64::LDNT1W_4Z_STRIDED_IMM);
1693 case AArch64::LDNT1D_4Z_IMM_PSEUDO:
1694 return expandMultiVecPseudo(
1695 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1696 AArch64::LDNT1D_4Z_IMM, AArch64::LDNT1D_4Z_STRIDED_IMM);
1697 case AArch64::LD1B_4Z_PSEUDO:
1698 return expandMultiVecPseudo(
MBB,
MBBI, AArch64::ZPR4RegClass,
1699 AArch64::ZPR4StridedRegClass, AArch64::LD1B_4Z,
1700 AArch64::LD1B_4Z_STRIDED);
1701 case AArch64::LD1H_4Z_PSEUDO:
1702 return expandMultiVecPseudo(
MBB,
MBBI, AArch64::ZPR4RegClass,
1703 AArch64::ZPR4StridedRegClass, AArch64::LD1H_4Z,
1704 AArch64::LD1H_4Z_STRIDED);
1705 case AArch64::LD1W_4Z_PSEUDO:
1706 return expandMultiVecPseudo(
MBB,
MBBI, AArch64::ZPR4RegClass,
1707 AArch64::ZPR4StridedRegClass, AArch64::LD1W_4Z,
1708 AArch64::LD1W_4Z_STRIDED);
1709 case AArch64::LD1D_4Z_PSEUDO:
1710 return expandMultiVecPseudo(
MBB,
MBBI, AArch64::ZPR4RegClass,
1711 AArch64::ZPR4StridedRegClass, AArch64::LD1D_4Z,
1712 AArch64::LD1D_4Z_STRIDED);
1713 case AArch64::LDNT1B_4Z_PSEUDO:
1714 return expandMultiVecPseudo(
1715 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1716 AArch64::LDNT1B_4Z, AArch64::LDNT1B_4Z_STRIDED);
1717 case AArch64::LDNT1H_4Z_PSEUDO:
1718 return expandMultiVecPseudo(
1719 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1720 AArch64::LDNT1H_4Z, AArch64::LDNT1H_4Z_STRIDED);
1721 case AArch64::LDNT1W_4Z_PSEUDO:
1722 return expandMultiVecPseudo(
1723 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1724 AArch64::LDNT1W_4Z, AArch64::LDNT1W_4Z_STRIDED);
1725 case AArch64::LDNT1D_4Z_PSEUDO:
1726 return expandMultiVecPseudo(
1727 MBB,
MBBI, AArch64::ZPR4RegClass, AArch64::ZPR4StridedRegClass,
1728 AArch64::LDNT1D_4Z, AArch64::LDNT1D_4Z_STRIDED);
1752 for (
auto &
MBB : MF)
1759 return new AArch64ExpandPseudo();
#define AARCH64_EXPAND_PSEUDO_NAME
MachineInstrBuilder & UseMI
static MachineInstr * createCallWithOps(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, const AArch64InstrInfo *TII, unsigned Opcode, ArrayRef< MachineOperand > ExplicitOps, unsigned RegMaskStartIdx)
static MachineInstr * createCall(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, const AArch64InstrInfo *TII, MachineOperand &CallTarget, unsigned RegMaskStartIdx)
MachineInstrBuilder MachineInstrBuilder & DefMI
SmallVector< AArch64_IMM::ImmInsnModel, 4 > Insn
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
MachineBasicBlock MachineBasicBlock::iterator MBBI
const HexagonInstrInfo * TII
This file implements the LivePhysRegs utility for tracking liveness of physical registers.
This file declares the MachineConstantPool class which is an abstract constant pool to keep track of ...
unsigned const TargetRegisterInfo * TRI
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
AArch64FunctionInfo - This class is derived from MachineFunctionInfo and contains private AArch64-spe...
unsigned getTaggedBasePointerOffset() const
bool isTargetILP32() const
bool isTargetMachO() const
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
The address of a basic block.
FunctionPass class - This class is used to implement most global optimizations.
A set of physical registers with utility functions to track liveness when walking backward/forward th...
Describe properties that are true of each instruction in the target description file.
instr_iterator insert(instr_iterator I, MachineInstr *M)
Insert MI into the instruction list before I, possibly inside a bundle.
const BasicBlock * getBasicBlock() const
Return the LLVM basic block that this instance corresponded to originally.
void addSuccessor(MachineBasicBlock *Succ, BranchProbability Prob=BranchProbability::getUnknown())
Add Succ as a successor of this MachineBasicBlock.
MachineBasicBlock * splitAt(MachineInstr &SplitInst, bool UpdateLiveIns=true, LiveIntervals *LIS=nullptr)
Split a basic block into 2 pieces at SplitPoint.
void eraseFromParent()
This method unlinks 'this' from the containing function and deletes it.
const MachineFunction * getParent() const
Return the MachineFunction containing this basic block.
iterator_range< succ_iterator > successors()
The MachineConstantPool class keeps track of constants referenced by a function which must be spilled...
unsigned getConstantPoolIndex(const Constant *C, Align Alignment)
getConstantPoolIndex - Create a new entry in the constant pool or return an existing one.
MachineFunctionPass - This class adapts the FunctionPass interface to allow convenient creation of pa...
virtual bool runOnMachineFunction(MachineFunction &MF)=0
runOnMachineFunction - This method must be overloaded to perform the desired machine code transformat...
const TargetSubtargetInfo & getSubtarget() const
getSubtarget - Return the subtarget for which this machine code is being compiled.
MachineInstr * CreateMachineInstr(const MCInstrDesc &MCID, DebugLoc DL, bool NoImplicit=false)
CreateMachineInstr - Allocate a new MachineInstr.
const LLVMTargetMachine & getTarget() const
getTarget - Return the target machine this machine code is compiled with
Ty * getInfo()
getInfo - Keep track of various per-function pieces of information for backends that would like to do...
MachineConstantPool * getConstantPool()
getConstantPool - Return the constant pool object for the current function.
void moveCallSiteInfo(const MachineInstr *Old, const MachineInstr *New)
Move the call site info from Old to \New call site info.
MachineBasicBlock * CreateMachineBasicBlock(const BasicBlock *BB=nullptr, std::optional< UniqueBBID > BBID=std::nullopt)
CreateMachineBasicBlock - Allocate a new MachineBasicBlock.
void insert(iterator MBBI, MachineBasicBlock *MBB)
const MachineInstrBuilder & addExternalSymbol(const char *FnName, unsigned TargetFlags=0) const
const MachineInstrBuilder & setMIFlag(MachineInstr::MIFlag Flag) const
const MachineInstrBuilder & addImm(int64_t Val) const
Add a new immediate operand.
const MachineInstrBuilder & add(const MachineOperand &MO) const
const MachineInstrBuilder & addConstantPoolIndex(unsigned Idx, int Offset=0, unsigned TargetFlags=0) const
const MachineInstrBuilder & addGlobalAddress(const GlobalValue *GV, int64_t Offset=0, unsigned TargetFlags=0) const
const MachineInstrBuilder & addReg(Register RegNo, unsigned flags=0, unsigned SubReg=0) const
Add a new virtual register operand.
const MachineInstrBuilder & addMBB(MachineBasicBlock *MBB, unsigned TargetFlags=0) const
const MachineInstrBuilder & cloneMemRefs(const MachineInstr &OtherMI) const
const MachineInstrBuilder & addUse(Register RegNo, unsigned Flags=0, unsigned SubReg=0) const
Add a virtual register use operand.
const MachineInstrBuilder & setMIFlags(unsigned Flags) const
MachineInstr * getInstr() const
If conversion operators fail, use this method to get the MachineInstr explicitly.
const MachineInstrBuilder & addDef(Register RegNo, unsigned Flags=0, unsigned SubReg=0) const
Add a virtual register definition operand.
Representation of each machine instruction.
void setDebugInstrNum(unsigned Num)
Set instruction number of this MachineInstr.
MachineOperand class - Representation of each machine instruction operand.
const GlobalValue * getGlobal() const
bool isReg() const
isReg - Tests if this is a MO_Register operand.
bool isCPI() const
isCPI - Tests if this is a MO_ConstantPoolIndex operand.
bool isSymbol() const
isSymbol - Tests if this is a MO_ExternalSymbol operand.
unsigned getTargetFlags() const
bool isGlobal() const
isGlobal - Tests if this is a MO_GlobalAddress operand.
const char * getSymbolName() const
Register getReg() const
getReg - Returns the register number.
static MachineOperand CreateReg(Register Reg, bool isDef, bool isImp=false, bool isKill=false, bool isDead=false, bool isUndef=false, bool isEarlyClobber=false, unsigned SubReg=0, bool isDebug=false, bool isInternalRead=false, bool isRenamable=false)
int64_t getOffset() const
Return the offset from the symbol in this operand.
static PassRegistry * getPassRegistry()
getPassRegistry - Access the global registry object, which is automatically initialized at applicatio...
virtual StringRef getPassName() const
getPassName - Return a nice clean name for a pass.
Wrapper class representing virtual and physical registers.
MCRegister asMCReg() const
Utility to check-convert this value to a MCRegister.
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
StackOffset holds a fixed and a scalable offset in bytes.
StringRef - Represent a constant reference to a string, i.e.
CodeModel::Model getCodeModel() const
Returns the code model.
ArrayRef< MCPhysReg > getRegisters() const
TargetRegisterInfo base class - We assume that the target defines a static array of TargetRegisterDes...
virtual const TargetRegisterInfo * getRegisterInfo() const
getRegisterInfo - If register information is available, return it.
virtual const TargetInstrInfo * getInstrInfo() const
self_iterator getIterator()
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
@ MO_NC
MO_NC - Indicates whether the linker is expected to check the symbol reference for overflow.
@ MO_PAGEOFF
MO_PAGEOFF - A symbol operand with this flag represents the offset of that symbol within a 4K page.
@ MO_PREL
MO_PREL - Indicates that the bits of the symbol operand represented by MO_G0 etc are PC relative.
@ MO_PAGE
MO_PAGE - A symbol operand with this flag represents the pc-relative offset of the 4K page containing...
@ MO_TAGGED
MO_TAGGED - With MO_PAGE, indicates that the page includes a memory tag in bits 56-63.
@ MO_G3
MO_G3 - A symbol operand with this flag (granule 3) represents the high 16-bits of a 64-bit address,...
static unsigned getArithExtendImm(AArch64_AM::ShiftExtendType ET, unsigned Imm)
getArithExtendImm - Encode the extend type and shift amount for an arithmetic instruction: imm: 3-bit...
static unsigned getShifterImm(AArch64_AM::ShiftExtendType ST, unsigned Imm)
getShifterImm - Encode the shift type and amount: imm: 6-bit shift amount shifter: 000 ==> lsl 001 ==...
void expandMOVImm(uint64_t Imm, unsigned BitSize, SmallVectorImpl< ImmInsnModel > &Insn)
Expand a MOVi32imm or MOVi64imm pseudo instruction to one or more real move-immediate instructions to...
int getSVERevInstr(uint16_t Opcode)
@ DestructiveInstTypeMask
@ DestructiveUnaryPassthru
@ DestructiveTernaryCommWithRev
@ DestructiveBinaryCommWithRev
int getSVEPseudoMap(uint16_t Opcode)
int getSVENonRevInstr(uint16_t Opcode)
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
@ Implicit
Not emitted register (e.g. carry, or temporary result).
@ Renamable
Register that may be renamed.
@ Define
Register definition.
@ Kill
The last use of a register.
@ Undef
Value of the register doesn't matter.
This is an optimization pass for GlobalISel generic memory operations.
auto drop_begin(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the first N elements excluded.
void finalizeBundle(MachineBasicBlock &MBB, MachineBasicBlock::instr_iterator FirstMI, MachineBasicBlock::instr_iterator LastMI)
finalizeBundle - Finalize a machine instruction bundle which includes a sequence of instructions star...
MachineInstrBuilder BuildMI(MachineFunction &MF, const MIMetadata &MIMD, const MCInstrDesc &MCID)
Builder interface. Specify how to create the initial instruction itself.
APFloat abs(APFloat X)
Returns the absolute value of the argument.
unsigned getDeadRegState(bool B)
void initializeAArch64ExpandPseudoPass(PassRegistry &)
FunctionPass * createAArch64ExpandPseudoPass()
Returns an instance of the pseudo instruction expansion pass.
void emitFrameOffset(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, const DebugLoc &DL, unsigned DestReg, unsigned SrcReg, StackOffset Offset, const TargetInstrInfo *TII, MachineInstr::MIFlag=MachineInstr::NoFlags, bool SetNZCV=false, bool NeedsWinCFI=false, bool *HasWinCFI=nullptr, bool EmitCFAOffset=false, StackOffset InitialOffset={}, unsigned FrameReg=AArch64::SP)
emitFrameOffset - Emit instructions as needed to set DestReg to SrcReg plus Offset.
void report_fatal_error(Error Err, bool gen_crash_diag=true)
Report a serious error, calling any installed error handler.
unsigned getKillRegState(bool B)
unsigned getRenamableRegState(bool B)
bool is_contained(R &&Range, const E &Element)
Returns true if Element is found in Range.
void computeAndAddLiveIns(LivePhysRegs &LiveRegs, MachineBasicBlock &MBB)
Convenience function combining computeLiveIns() and addLiveIns().
This struct is a compact representation of a valid (non-zero power of two) alignment.
Description of the encoding of one expression Op.