LLVM 20.0.0git
MachineCopyPropagation.cpp
Go to the documentation of this file.
1//===- MachineCopyPropagation.cpp - Machine Copy Propagation Pass ---------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This is an extremely simple MachineInstr-level copy propagation pass.
10//
11// This pass forwards the source of COPYs to the users of their destinations
12// when doing so is legal. For example:
13//
14// %reg1 = COPY %reg0
15// ...
16// ... = OP %reg1
17//
18// If
19// - %reg0 has not been clobbered by the time of the use of %reg1
20// - the register class constraints are satisfied
21// - the COPY def is the only value that reaches OP
22// then this pass replaces the above with:
23//
24// %reg1 = COPY %reg0
25// ...
26// ... = OP %reg0
27//
28// This pass also removes some redundant COPYs. For example:
29//
30// %R1 = COPY %R0
31// ... // No clobber of %R1
32// %R0 = COPY %R1 <<< Removed
33//
34// or
35//
36// %R1 = COPY %R0
37// ... // No clobber of %R0
38// %R1 = COPY %R0 <<< Removed
39//
40// or
41//
42// $R0 = OP ...
43// ... // No read/clobber of $R0 and $R1
44// $R1 = COPY $R0 // $R0 is killed
45// Replace $R0 with $R1 and remove the COPY
46// $R1 = OP ...
47// ...
48//
49//===----------------------------------------------------------------------===//
50
51#include "llvm/ADT/DenseMap.h"
52#include "llvm/ADT/STLExtras.h"
53#include "llvm/ADT/SetVector.h"
54#include "llvm/ADT/SmallSet.h"
56#include "llvm/ADT/Statistic.h"
68#include "llvm/MC/MCRegister.h"
70#include "llvm/Pass.h"
71#include "llvm/Support/Debug.h"
74#include <cassert>
75#include <iterator>
76
77using namespace llvm;
78
79#define DEBUG_TYPE "machine-cp"
80
81STATISTIC(NumDeletes, "Number of dead copies deleted");
82STATISTIC(NumCopyForwards, "Number of copy uses forwarded");
83STATISTIC(NumCopyBackwardPropagated, "Number of copy defs backward propagated");
84STATISTIC(SpillageChainsLength, "Length of spillage chains");
85STATISTIC(NumSpillageChains, "Number of spillage chains");
86DEBUG_COUNTER(FwdCounter, "machine-cp-fwd",
87 "Controls which register COPYs are forwarded");
88
89static cl::opt<bool> MCPUseCopyInstr("mcp-use-is-copy-instr", cl::init(false),
92 EnableSpillageCopyElimination("enable-spill-copy-elim", cl::Hidden);
93
94namespace {
95
96static std::optional<DestSourcePair> isCopyInstr(const MachineInstr &MI,
97 const TargetInstrInfo &TII,
98 bool UseCopyInstr) {
99 if (UseCopyInstr)
100 return TII.isCopyInstr(MI);
101
102 if (MI.isCopy())
103 return std::optional<DestSourcePair>(
104 DestSourcePair{MI.getOperand(0), MI.getOperand(1)});
105
106 return std::nullopt;
107}
108
109class CopyTracker {
110 struct CopyInfo {
111 MachineInstr *MI = nullptr;
112 MachineInstr *LastSeenUseInCopy = nullptr;
115 bool Avail = false;
116 };
117
119
120public:
121 /// Mark all of the given registers and their subregisters as unavailable for
122 /// copying.
123 void markRegsUnavailable(ArrayRef<MCRegister> Regs,
124 const TargetRegisterInfo &TRI) {
125 for (MCRegister Reg : Regs) {
126 // Source of copy is no longer available for propagation.
127 for (MCRegUnit Unit : TRI.regunits(Reg)) {
128 auto CI = Copies.find(Unit);
129 if (CI != Copies.end())
130 CI->second.Avail = false;
131 }
132 }
133 }
134
135 /// Remove register from copy maps.
136 void invalidateRegister(MCRegister Reg, const TargetRegisterInfo &TRI,
137 const TargetInstrInfo &TII, bool UseCopyInstr) {
138 // Since Reg might be a subreg of some registers, only invalidate Reg is not
139 // enough. We have to find the COPY defines Reg or registers defined by Reg
140 // and invalidate all of them. Similarly, we must invalidate all of the
141 // the subregisters used in the source of the COPY.
142 SmallSet<MCRegUnit, 8> RegUnitsToInvalidate;
143 auto InvalidateCopy = [&](MachineInstr *MI) {
144 std::optional<DestSourcePair> CopyOperands =
145 isCopyInstr(*MI, TII, UseCopyInstr);
146 assert(CopyOperands && "Expect copy");
147
148 auto Dest = TRI.regunits(CopyOperands->Destination->getReg().asMCReg());
149 auto Src = TRI.regunits(CopyOperands->Source->getReg().asMCReg());
150 RegUnitsToInvalidate.insert(Dest.begin(), Dest.end());
151 RegUnitsToInvalidate.insert(Src.begin(), Src.end());
152 };
153
154 for (MCRegUnit Unit : TRI.regunits(Reg)) {
155 auto I = Copies.find(Unit);
156 if (I != Copies.end()) {
157 if (MachineInstr *MI = I->second.MI)
158 InvalidateCopy(MI);
159 if (MachineInstr *MI = I->second.LastSeenUseInCopy)
160 InvalidateCopy(MI);
161 }
162 }
163 for (MCRegUnit Unit : RegUnitsToInvalidate)
164 Copies.erase(Unit);
165 }
166
167 /// Clobber a single register, removing it from the tracker's copy maps.
168 void clobberRegister(MCRegister Reg, const TargetRegisterInfo &TRI,
169 const TargetInstrInfo &TII, bool UseCopyInstr) {
170 for (MCRegUnit Unit : TRI.regunits(Reg)) {
171 auto I = Copies.find(Unit);
172 if (I != Copies.end()) {
173 // When we clobber the source of a copy, we need to clobber everything
174 // it defined.
175 markRegsUnavailable(I->second.DefRegs, TRI);
176 // When we clobber the destination of a copy, we need to clobber the
177 // whole register it defined.
178 if (MachineInstr *MI = I->second.MI) {
179 std::optional<DestSourcePair> CopyOperands =
180 isCopyInstr(*MI, TII, UseCopyInstr);
181
182 MCRegister Def = CopyOperands->Destination->getReg().asMCReg();
183 MCRegister Src = CopyOperands->Source->getReg().asMCReg();
184
185 markRegsUnavailable(Def, TRI);
186
187 // Since we clobber the destination of a copy, the semantic of Src's
188 // "DefRegs" to contain Def is no longer effectual. We will also need
189 // to remove the record from the copy maps that indicates Src defined
190 // Def. Failing to do so might cause the target to miss some
191 // opportunities to further eliminate redundant copy instructions.
192 // Consider the following sequence during the
193 // ForwardCopyPropagateBlock procedure:
194 // L1: r0 = COPY r9 <- TrackMI
195 // L2: r0 = COPY r8 <- TrackMI (Remove r9 defined r0 from tracker)
196 // L3: use r0 <- Remove L2 from MaybeDeadCopies
197 // L4: early-clobber r9 <- Clobber r9 (L2 is still valid in tracker)
198 // L5: r0 = COPY r8 <- Remove NopCopy
199 for (MCRegUnit SrcUnit : TRI.regunits(Src)) {
200 auto SrcCopy = Copies.find(SrcUnit);
201 if (SrcCopy != Copies.end() && SrcCopy->second.LastSeenUseInCopy) {
202 // If SrcCopy defines multiple values, we only need
203 // to erase the record for Def in DefRegs.
204 for (auto itr = SrcCopy->second.DefRegs.begin();
205 itr != SrcCopy->second.DefRegs.end(); itr++) {
206 if (*itr == Def) {
207 SrcCopy->second.DefRegs.erase(itr);
208 // If DefReg becomes empty after removal, we can remove the
209 // SrcCopy from the tracker's copy maps. We only remove those
210 // entries solely record the Def is defined by Src. If an
211 // entry also contains the definition record of other Def'
212 // registers, it cannot be cleared.
213 if (SrcCopy->second.DefRegs.empty() && !SrcCopy->second.MI) {
214 Copies.erase(SrcCopy);
215 }
216 break;
217 }
218 }
219 }
220 }
221 }
222 // Now we can erase the copy.
223 Copies.erase(I);
224 }
225 }
226 }
227
228 /// Track copy's src users, and return false if that can't be done.
229 /// We can only track if we have a COPY instruction which source is
230 /// the same as the Reg.
231 bool trackSrcUsers(MCRegister Reg, MachineInstr &MI,
233 bool UseCopyInstr) {
234 MCRegUnit RU = *TRI.regunits(Reg).begin();
235 MachineInstr *AvailCopy = findCopyDefViaUnit(RU, TRI);
236 if (!AvailCopy)
237 return false;
238
239 std::optional<DestSourcePair> CopyOperands =
240 isCopyInstr(*AvailCopy, TII, UseCopyInstr);
241 Register Src = CopyOperands->Source->getReg();
242
243 // Bail out, if the source of the copy is not the same as the Reg.
244 if (Src != Reg)
245 return false;
246
247 auto I = Copies.find(RU);
248 if (I == Copies.end())
249 return false;
250
251 I->second.SrcUsers.insert(&MI);
252 return true;
253 }
254
255 /// Return the users for a given register.
257 const TargetRegisterInfo &TRI) {
258 MCRegUnit RU = *TRI.regunits(Reg).begin();
259 auto I = Copies.find(RU);
260 if (I == Copies.end())
261 return {};
262 return I->second.SrcUsers;
263 }
264
265 /// Add this copy's registers into the tracker's copy maps.
266 void trackCopy(MachineInstr *MI, const TargetRegisterInfo &TRI,
267 const TargetInstrInfo &TII, bool UseCopyInstr) {
268 std::optional<DestSourcePair> CopyOperands =
269 isCopyInstr(*MI, TII, UseCopyInstr);
270 assert(CopyOperands && "Tracking non-copy?");
271
272 MCRegister Src = CopyOperands->Source->getReg().asMCReg();
273 MCRegister Def = CopyOperands->Destination->getReg().asMCReg();
274
275 // Remember Def is defined by the copy.
276 for (MCRegUnit Unit : TRI.regunits(Def))
277 Copies[Unit] = {MI, nullptr, {}, {}, true};
278
279 // Remember source that's copied to Def. Once it's clobbered, then
280 // it's no longer available for copy propagation.
281 for (MCRegUnit Unit : TRI.regunits(Src)) {
282 auto &Copy = Copies[Unit];
283 if (!is_contained(Copy.DefRegs, Def))
284 Copy.DefRegs.push_back(Def);
285 Copy.LastSeenUseInCopy = MI;
286 }
287 }
288
289 bool hasAnyCopies() {
290 return !Copies.empty();
291 }
292
293 MachineInstr *findCopyForUnit(MCRegUnit RegUnit,
294 const TargetRegisterInfo &TRI,
295 bool MustBeAvailable = false) {
296 auto CI = Copies.find(RegUnit);
297 if (CI == Copies.end())
298 return nullptr;
299 if (MustBeAvailable && !CI->second.Avail)
300 return nullptr;
301 return CI->second.MI;
302 }
303
304 MachineInstr *findCopyDefViaUnit(MCRegUnit RegUnit,
305 const TargetRegisterInfo &TRI) {
306 auto CI = Copies.find(RegUnit);
307 if (CI == Copies.end())
308 return nullptr;
309 if (CI->second.DefRegs.size() != 1)
310 return nullptr;
311 MCRegUnit RU = *TRI.regunits(CI->second.DefRegs[0]).begin();
312 return findCopyForUnit(RU, TRI, true);
313 }
314
315 MachineInstr *findAvailBackwardCopy(MachineInstr &I, MCRegister Reg,
316 const TargetRegisterInfo &TRI,
317 const TargetInstrInfo &TII,
318 bool UseCopyInstr) {
319 MCRegUnit RU = *TRI.regunits(Reg).begin();
320 MachineInstr *AvailCopy = findCopyDefViaUnit(RU, TRI);
321
322 if (!AvailCopy)
323 return nullptr;
324
325 std::optional<DestSourcePair> CopyOperands =
326 isCopyInstr(*AvailCopy, TII, UseCopyInstr);
327 Register AvailSrc = CopyOperands->Source->getReg();
328 Register AvailDef = CopyOperands->Destination->getReg();
329 if (!TRI.isSubRegisterEq(AvailSrc, Reg))
330 return nullptr;
331
332 for (const MachineInstr &MI :
333 make_range(AvailCopy->getReverseIterator(), I.getReverseIterator()))
334 for (const MachineOperand &MO : MI.operands())
335 if (MO.isRegMask())
336 // FIXME: Shall we simultaneously invalidate AvailSrc or AvailDef?
337 if (MO.clobbersPhysReg(AvailSrc) || MO.clobbersPhysReg(AvailDef))
338 return nullptr;
339
340 return AvailCopy;
341 }
342
343 MachineInstr *findAvailCopy(MachineInstr &DestCopy, MCRegister Reg,
344 const TargetRegisterInfo &TRI,
345 const TargetInstrInfo &TII, bool UseCopyInstr) {
346 // We check the first RegUnit here, since we'll only be interested in the
347 // copy if it copies the entire register anyway.
348 MCRegUnit RU = *TRI.regunits(Reg).begin();
349 MachineInstr *AvailCopy =
350 findCopyForUnit(RU, TRI, /*MustBeAvailable=*/true);
351
352 if (!AvailCopy)
353 return nullptr;
354
355 std::optional<DestSourcePair> CopyOperands =
356 isCopyInstr(*AvailCopy, TII, UseCopyInstr);
357 Register AvailSrc = CopyOperands->Source->getReg();
358 Register AvailDef = CopyOperands->Destination->getReg();
359 if (!TRI.isSubRegisterEq(AvailDef, Reg))
360 return nullptr;
361
362 // Check that the available copy isn't clobbered by any regmasks between
363 // itself and the destination.
364 for (const MachineInstr &MI :
365 make_range(AvailCopy->getIterator(), DestCopy.getIterator()))
366 for (const MachineOperand &MO : MI.operands())
367 if (MO.isRegMask())
368 if (MO.clobbersPhysReg(AvailSrc) || MO.clobbersPhysReg(AvailDef))
369 return nullptr;
370
371 return AvailCopy;
372 }
373
374 // Find last COPY that defines Reg before Current MachineInstr.
375 MachineInstr *findLastSeenDefInCopy(const MachineInstr &Current,
376 MCRegister Reg,
377 const TargetRegisterInfo &TRI,
378 const TargetInstrInfo &TII,
379 bool UseCopyInstr) {
380 MCRegUnit RU = *TRI.regunits(Reg).begin();
381 auto CI = Copies.find(RU);
382 if (CI == Copies.end() || !CI->second.Avail)
383 return nullptr;
384
385 MachineInstr *DefCopy = CI->second.MI;
386 std::optional<DestSourcePair> CopyOperands =
387 isCopyInstr(*DefCopy, TII, UseCopyInstr);
388 Register Def = CopyOperands->Destination->getReg();
389 if (!TRI.isSubRegisterEq(Def, Reg))
390 return nullptr;
391
392 for (const MachineInstr &MI :
393 make_range(static_cast<const MachineInstr *>(DefCopy)->getIterator(),
394 Current.getIterator()))
395 for (const MachineOperand &MO : MI.operands())
396 if (MO.isRegMask())
397 if (MO.clobbersPhysReg(Def)) {
398 LLVM_DEBUG(dbgs() << "MCP: Removed tracking of "
399 << printReg(Def, &TRI) << "\n");
400 return nullptr;
401 }
402
403 return DefCopy;
404 }
405
406 // Find last COPY that uses Reg.
407 MachineInstr *findLastSeenUseInCopy(MCRegister Reg,
408 const TargetRegisterInfo &TRI) {
409 MCRegUnit RU = *TRI.regunits(Reg).begin();
410 auto CI = Copies.find(RU);
411 if (CI == Copies.end())
412 return nullptr;
413 return CI->second.LastSeenUseInCopy;
414 }
415
416 void clear() {
417 Copies.clear();
418 }
419};
420
421class MachineCopyPropagation : public MachineFunctionPass {
422 const TargetRegisterInfo *TRI = nullptr;
423 const TargetInstrInfo *TII = nullptr;
424 const MachineRegisterInfo *MRI = nullptr;
425
426 // Return true if this is a copy instruction and false otherwise.
427 bool UseCopyInstr;
428
429public:
430 static char ID; // Pass identification, replacement for typeid
431
432 MachineCopyPropagation(bool CopyInstr = false)
433 : MachineFunctionPass(ID), UseCopyInstr(CopyInstr || MCPUseCopyInstr) {
435 }
436
437 void getAnalysisUsage(AnalysisUsage &AU) const override {
438 AU.setPreservesCFG();
440 }
441
442 bool runOnMachineFunction(MachineFunction &MF) override;
443
446 MachineFunctionProperties::Property::NoVRegs);
447 }
448
449private:
450 typedef enum { DebugUse = false, RegularUse = true } DebugType;
451
452 void ReadRegister(MCRegister Reg, MachineInstr &Reader, DebugType DT);
453 void readSuccessorLiveIns(const MachineBasicBlock &MBB);
454 void ForwardCopyPropagateBlock(MachineBasicBlock &MBB);
455 void BackwardCopyPropagateBlock(MachineBasicBlock &MBB);
456 void EliminateSpillageCopies(MachineBasicBlock &MBB);
457 bool eraseIfRedundant(MachineInstr &Copy, MCRegister Src, MCRegister Def);
458 void forwardUses(MachineInstr &MI);
459 void propagateDefs(MachineInstr &MI);
460 bool isForwardableRegClassCopy(const MachineInstr &Copy,
461 const MachineInstr &UseI, unsigned UseIdx);
462 bool isBackwardPropagatableRegClassCopy(const MachineInstr &Copy,
463 const MachineInstr &UseI,
464 unsigned UseIdx);
465 bool hasImplicitOverlap(const MachineInstr &MI, const MachineOperand &Use);
466 bool hasOverlappingMultipleDef(const MachineInstr &MI,
467 const MachineOperand &MODef, Register Def);
468 bool canUpdateSrcUsers(const MachineInstr &Copy,
469 const MachineOperand &CopySrc);
470
471 /// Candidates for deletion.
472 SmallSetVector<MachineInstr *, 8> MaybeDeadCopies;
473
474 /// Multimap tracking debug users in current BB
476
477 CopyTracker Tracker;
478
479 bool Changed = false;
480};
481
482} // end anonymous namespace
483
484char MachineCopyPropagation::ID = 0;
485
486char &llvm::MachineCopyPropagationID = MachineCopyPropagation::ID;
487
488INITIALIZE_PASS(MachineCopyPropagation, DEBUG_TYPE,
489 "Machine Copy Propagation Pass", false, false)
490
491void MachineCopyPropagation::ReadRegister(MCRegister Reg, MachineInstr &Reader,
492 DebugType DT) {
493 // If 'Reg' is defined by a copy, the copy is no longer a candidate
494 // for elimination. If a copy is "read" by a debug user, record the user
495 // for propagation.
496 for (MCRegUnit Unit : TRI->regunits(Reg)) {
497 if (MachineInstr *Copy = Tracker.findCopyForUnit(Unit, *TRI)) {
498 if (DT == RegularUse) {
499 LLVM_DEBUG(dbgs() << "MCP: Copy is used - not dead: "; Copy->dump());
500 MaybeDeadCopies.remove(Copy);
501 } else {
502 CopyDbgUsers[Copy].insert(&Reader);
503 }
504 }
505 }
506}
507
508void MachineCopyPropagation::readSuccessorLiveIns(
509 const MachineBasicBlock &MBB) {
510 if (MaybeDeadCopies.empty())
511 return;
512
513 // If a copy result is livein to a successor, it is not dead.
514 for (const MachineBasicBlock *Succ : MBB.successors()) {
515 for (const auto &LI : Succ->liveins()) {
516 for (MCRegUnit Unit : TRI->regunits(LI.PhysReg)) {
517 if (MachineInstr *Copy = Tracker.findCopyForUnit(Unit, *TRI))
518 MaybeDeadCopies.remove(Copy);
519 }
520 }
521 }
522}
523
524/// Return true if \p PreviousCopy did copy register \p Src to register \p Def.
525/// This fact may have been obscured by sub register usage or may not be true at
526/// all even though Src and Def are subregisters of the registers used in
527/// PreviousCopy. e.g.
528/// isNopCopy("ecx = COPY eax", AX, CX) == true
529/// isNopCopy("ecx = COPY eax", AH, CL) == false
530static bool isNopCopy(const MachineInstr &PreviousCopy, MCRegister Src,
532 const TargetInstrInfo *TII, bool UseCopyInstr) {
533
534 std::optional<DestSourcePair> CopyOperands =
535 isCopyInstr(PreviousCopy, *TII, UseCopyInstr);
536 MCRegister PreviousSrc = CopyOperands->Source->getReg().asMCReg();
537 MCRegister PreviousDef = CopyOperands->Destination->getReg().asMCReg();
538 if (Src == PreviousSrc && Def == PreviousDef)
539 return true;
540 if (!TRI->isSubRegister(PreviousSrc, Src))
541 return false;
542 unsigned SubIdx = TRI->getSubRegIndex(PreviousSrc, Src);
543 return SubIdx == TRI->getSubRegIndex(PreviousDef, Def);
544}
545
546/// Remove instruction \p Copy if there exists a previous copy that copies the
547/// register \p Src to the register \p Def; This may happen indirectly by
548/// copying the super registers.
549bool MachineCopyPropagation::eraseIfRedundant(MachineInstr &Copy,
550 MCRegister Src, MCRegister Def) {
551 // Avoid eliminating a copy from/to a reserved registers as we cannot predict
552 // the value (Example: The sparc zero register is writable but stays zero).
553 if (MRI->isReserved(Src) || MRI->isReserved(Def))
554 return false;
555
556 // Search for an existing copy.
557 MachineInstr *PrevCopy =
558 Tracker.findAvailCopy(Copy, Def, *TRI, *TII, UseCopyInstr);
559 if (!PrevCopy)
560 return false;
561
562 auto PrevCopyOperands = isCopyInstr(*PrevCopy, *TII, UseCopyInstr);
563 // Check that the existing copy uses the correct sub registers.
564 if (PrevCopyOperands->Destination->isDead())
565 return false;
566 if (!isNopCopy(*PrevCopy, Src, Def, TRI, TII, UseCopyInstr))
567 return false;
568
569 LLVM_DEBUG(dbgs() << "MCP: copy is a NOP, removing: "; Copy.dump());
570
571 // Copy was redundantly redefining either Src or Def. Remove earlier kill
572 // flags between Copy and PrevCopy because the value will be reused now.
573 std::optional<DestSourcePair> CopyOperands =
574 isCopyInstr(Copy, *TII, UseCopyInstr);
575 assert(CopyOperands);
576
577 Register CopyDef = CopyOperands->Destination->getReg();
578 assert(CopyDef == Src || CopyDef == Def);
579 for (MachineInstr &MI :
580 make_range(PrevCopy->getIterator(), Copy.getIterator()))
581 MI.clearRegisterKills(CopyDef, TRI);
582
583 // Clear undef flag from remaining copy if needed.
584 if (!CopyOperands->Source->isUndef()) {
585 PrevCopy->getOperand(PrevCopyOperands->Source->getOperandNo())
586 .setIsUndef(false);
587 }
588
589 Copy.eraseFromParent();
590 Changed = true;
591 ++NumDeletes;
592 return true;
593}
594
595bool MachineCopyPropagation::isBackwardPropagatableRegClassCopy(
596 const MachineInstr &Copy, const MachineInstr &UseI, unsigned UseIdx) {
597 std::optional<DestSourcePair> CopyOperands =
598 isCopyInstr(Copy, *TII, UseCopyInstr);
599 Register Def = CopyOperands->Destination->getReg();
600
601 if (const TargetRegisterClass *URC =
602 UseI.getRegClassConstraint(UseIdx, TII, TRI))
603 return URC->contains(Def);
604
605 // We don't process further if UseI is a COPY, since forward copy propagation
606 // should handle that.
607 return false;
608}
609
610/// Decide whether we should forward the source of \param Copy to its use in
611/// \param UseI based on the physical register class constraints of the opcode
612/// and avoiding introducing more cross-class COPYs.
613bool MachineCopyPropagation::isForwardableRegClassCopy(const MachineInstr &Copy,
614 const MachineInstr &UseI,
615 unsigned UseIdx) {
616 std::optional<DestSourcePair> CopyOperands =
617 isCopyInstr(Copy, *TII, UseCopyInstr);
618 Register CopySrcReg = CopyOperands->Source->getReg();
619
620 // If the new register meets the opcode register constraints, then allow
621 // forwarding.
622 if (const TargetRegisterClass *URC =
623 UseI.getRegClassConstraint(UseIdx, TII, TRI))
624 return URC->contains(CopySrcReg);
625
626 auto UseICopyOperands = isCopyInstr(UseI, *TII, UseCopyInstr);
627 if (!UseICopyOperands)
628 return false;
629
630 /// COPYs don't have register class constraints, so if the user instruction
631 /// is a COPY, we just try to avoid introducing additional cross-class
632 /// COPYs. For example:
633 ///
634 /// RegClassA = COPY RegClassB // Copy parameter
635 /// ...
636 /// RegClassB = COPY RegClassA // UseI parameter
637 ///
638 /// which after forwarding becomes
639 ///
640 /// RegClassA = COPY RegClassB
641 /// ...
642 /// RegClassB = COPY RegClassB
643 ///
644 /// so we have reduced the number of cross-class COPYs and potentially
645 /// introduced a nop COPY that can be removed.
646
647 // Allow forwarding if src and dst belong to any common class, so long as they
648 // don't belong to any (possibly smaller) common class that requires copies to
649 // go via a different class.
650 Register UseDstReg = UseICopyOperands->Destination->getReg();
651 bool Found = false;
652 bool IsCrossClass = false;
653 for (const TargetRegisterClass *RC : TRI->regclasses()) {
654 if (RC->contains(CopySrcReg) && RC->contains(UseDstReg)) {
655 Found = true;
656 if (TRI->getCrossCopyRegClass(RC) != RC) {
657 IsCrossClass = true;
658 break;
659 }
660 }
661 }
662 if (!Found)
663 return false;
664 if (!IsCrossClass)
665 return true;
666 // The forwarded copy would be cross-class. Only do this if the original copy
667 // was also cross-class.
668 Register CopyDstReg = CopyOperands->Destination->getReg();
669 for (const TargetRegisterClass *RC : TRI->regclasses()) {
670 if (RC->contains(CopySrcReg) && RC->contains(CopyDstReg) &&
671 TRI->getCrossCopyRegClass(RC) != RC)
672 return true;
673 }
674 return false;
675}
676
677/// Check that \p MI does not have implicit uses that overlap with it's \p Use
678/// operand (the register being replaced), since these can sometimes be
679/// implicitly tied to other operands. For example, on AMDGPU:
680///
681/// V_MOVRELS_B32_e32 %VGPR2, %M0<imp-use>, %EXEC<imp-use>, %VGPR2_VGPR3_VGPR4_VGPR5<imp-use>
682///
683/// the %VGPR2 is implicitly tied to the larger reg operand, but we have no
684/// way of knowing we need to update the latter when updating the former.
685bool MachineCopyPropagation::hasImplicitOverlap(const MachineInstr &MI,
686 const MachineOperand &Use) {
687 for (const MachineOperand &MIUse : MI.uses())
688 if (&MIUse != &Use && MIUse.isReg() && MIUse.isImplicit() &&
689 MIUse.isUse() && TRI->regsOverlap(Use.getReg(), MIUse.getReg()))
690 return true;
691
692 return false;
693}
694
695/// For an MI that has multiple definitions, check whether \p MI has
696/// a definition that overlaps with another of its definitions.
697/// For example, on ARM: umull r9, r9, lr, r0
698/// The umull instruction is unpredictable unless RdHi and RdLo are different.
699bool MachineCopyPropagation::hasOverlappingMultipleDef(
700 const MachineInstr &MI, const MachineOperand &MODef, Register Def) {
701 for (const MachineOperand &MIDef : MI.all_defs()) {
702 if ((&MIDef != &MODef) && MIDef.isReg() &&
703 TRI->regsOverlap(Def, MIDef.getReg()))
704 return true;
705 }
706
707 return false;
708}
709
710/// Return true if it is safe to update all users of the \p CopySrc register
711/// in the given \p Copy instruction.
712bool MachineCopyPropagation::canUpdateSrcUsers(const MachineInstr &Copy,
713 const MachineOperand &CopySrc) {
714 assert(CopySrc.isReg() && "Expected a register operand");
715 for (auto *SrcUser : Tracker.getSrcUsers(CopySrc.getReg(), *TRI)) {
716 if (hasImplicitOverlap(*SrcUser, CopySrc))
717 return false;
718
719 for (MachineOperand &MO : SrcUser->uses()) {
720 if (!MO.isReg() || !MO.isUse() || MO.getReg() != CopySrc.getReg())
721 continue;
722 if (MO.isTied() || !MO.isRenamable() ||
723 !isBackwardPropagatableRegClassCopy(Copy, *SrcUser,
724 MO.getOperandNo()))
725 return false;
726 }
727 }
728 return true;
729}
730
731/// Look for available copies whose destination register is used by \p MI and
732/// replace the use in \p MI with the copy's source register.
733void MachineCopyPropagation::forwardUses(MachineInstr &MI) {
734 if (!Tracker.hasAnyCopies())
735 return;
736
737 // Look for non-tied explicit vreg uses that have an active COPY
738 // instruction that defines the physical register allocated to them.
739 // Replace the vreg with the source of the active COPY.
740 for (unsigned OpIdx = 0, OpEnd = MI.getNumOperands(); OpIdx < OpEnd;
741 ++OpIdx) {
742 MachineOperand &MOUse = MI.getOperand(OpIdx);
743 // Don't forward into undef use operands since doing so can cause problems
744 // with the machine verifier, since it doesn't treat undef reads as reads,
745 // so we can end up with a live range that ends on an undef read, leading to
746 // an error that the live range doesn't end on a read of the live range
747 // register.
748 if (!MOUse.isReg() || MOUse.isTied() || MOUse.isUndef() || MOUse.isDef() ||
749 MOUse.isImplicit())
750 continue;
751
752 if (!MOUse.getReg())
753 continue;
754
755 // Check that the register is marked 'renamable' so we know it is safe to
756 // rename it without violating any constraints that aren't expressed in the
757 // IR (e.g. ABI or opcode requirements).
758 if (!MOUse.isRenamable())
759 continue;
760
761 MachineInstr *Copy = Tracker.findAvailCopy(MI, MOUse.getReg().asMCReg(),
762 *TRI, *TII, UseCopyInstr);
763 if (!Copy)
764 continue;
765
766 std::optional<DestSourcePair> CopyOperands =
767 isCopyInstr(*Copy, *TII, UseCopyInstr);
768 Register CopyDstReg = CopyOperands->Destination->getReg();
769 const MachineOperand &CopySrc = *CopyOperands->Source;
770 Register CopySrcReg = CopySrc.getReg();
771
772 Register ForwardedReg = CopySrcReg;
773 // MI might use a sub-register of the Copy destination, in which case the
774 // forwarded register is the matching sub-register of the Copy source.
775 if (MOUse.getReg() != CopyDstReg) {
776 unsigned SubRegIdx = TRI->getSubRegIndex(CopyDstReg, MOUse.getReg());
777 assert(SubRegIdx &&
778 "MI source is not a sub-register of Copy destination");
779 ForwardedReg = TRI->getSubReg(CopySrcReg, SubRegIdx);
780 if (!ForwardedReg) {
781 LLVM_DEBUG(dbgs() << "MCP: Copy source does not have sub-register "
782 << TRI->getSubRegIndexName(SubRegIdx) << '\n');
783 continue;
784 }
785 }
786
787 // Don't forward COPYs of reserved regs unless they are constant.
788 if (MRI->isReserved(CopySrcReg) && !MRI->isConstantPhysReg(CopySrcReg))
789 continue;
790
791 if (!isForwardableRegClassCopy(*Copy, MI, OpIdx))
792 continue;
793
794 if (hasImplicitOverlap(MI, MOUse))
795 continue;
796
797 // Check that the instruction is not a copy that partially overwrites the
798 // original copy source that we are about to use. The tracker mechanism
799 // cannot cope with that.
800 if (isCopyInstr(MI, *TII, UseCopyInstr) &&
801 MI.modifiesRegister(CopySrcReg, TRI) &&
802 !MI.definesRegister(CopySrcReg, /*TRI=*/nullptr)) {
803 LLVM_DEBUG(dbgs() << "MCP: Copy source overlap with dest in " << MI);
804 continue;
805 }
806
807 if (!DebugCounter::shouldExecute(FwdCounter)) {
808 LLVM_DEBUG(dbgs() << "MCP: Skipping forwarding due to debug counter:\n "
809 << MI);
810 continue;
811 }
812
813 LLVM_DEBUG(dbgs() << "MCP: Replacing " << printReg(MOUse.getReg(), TRI)
814 << "\n with " << printReg(ForwardedReg, TRI)
815 << "\n in " << MI << " from " << *Copy);
816
817 MOUse.setReg(ForwardedReg);
818
819 if (!CopySrc.isRenamable())
820 MOUse.setIsRenamable(false);
821 MOUse.setIsUndef(CopySrc.isUndef());
822
823 LLVM_DEBUG(dbgs() << "MCP: After replacement: " << MI << "\n");
824
825 // Clear kill markers that may have been invalidated.
826 for (MachineInstr &KMI :
827 make_range(Copy->getIterator(), std::next(MI.getIterator())))
828 KMI.clearRegisterKills(CopySrcReg, TRI);
829
830 ++NumCopyForwards;
831 Changed = true;
832 }
833}
834
835void MachineCopyPropagation::ForwardCopyPropagateBlock(MachineBasicBlock &MBB) {
836 LLVM_DEBUG(dbgs() << "MCP: ForwardCopyPropagateBlock " << MBB.getName()
837 << "\n");
838
840 // Analyze copies (which don't overlap themselves).
841 std::optional<DestSourcePair> CopyOperands =
842 isCopyInstr(MI, *TII, UseCopyInstr);
843 if (CopyOperands) {
844
845 Register RegSrc = CopyOperands->Source->getReg();
846 Register RegDef = CopyOperands->Destination->getReg();
847
848 if (!TRI->regsOverlap(RegDef, RegSrc)) {
849 assert(RegDef.isPhysical() && RegSrc.isPhysical() &&
850 "MachineCopyPropagation should be run after register allocation!");
851
852 MCRegister Def = RegDef.asMCReg();
853 MCRegister Src = RegSrc.asMCReg();
854
855 // The two copies cancel out and the source of the first copy
856 // hasn't been overridden, eliminate the second one. e.g.
857 // %ecx = COPY %eax
858 // ... nothing clobbered eax.
859 // %eax = COPY %ecx
860 // =>
861 // %ecx = COPY %eax
862 //
863 // or
864 //
865 // %ecx = COPY %eax
866 // ... nothing clobbered eax.
867 // %ecx = COPY %eax
868 // =>
869 // %ecx = COPY %eax
870 if (eraseIfRedundant(MI, Def, Src) || eraseIfRedundant(MI, Src, Def))
871 continue;
872
873 forwardUses(MI);
874
875 // Src may have been changed by forwardUses()
876 CopyOperands = isCopyInstr(MI, *TII, UseCopyInstr);
877 Src = CopyOperands->Source->getReg().asMCReg();
878
879 // If Src is defined by a previous copy, the previous copy cannot be
880 // eliminated.
881 ReadRegister(Src, MI, RegularUse);
882 for (const MachineOperand &MO : MI.implicit_operands()) {
883 if (!MO.isReg() || !MO.readsReg())
884 continue;
885 MCRegister Reg = MO.getReg().asMCReg();
886 if (!Reg)
887 continue;
888 ReadRegister(Reg, MI, RegularUse);
889 }
890
891 LLVM_DEBUG(dbgs() << "MCP: Copy is a deletion candidate: "; MI.dump());
892
893 // Copy is now a candidate for deletion.
894 if (!MRI->isReserved(Def))
895 MaybeDeadCopies.insert(&MI);
896
897 // If 'Def' is previously source of another copy, then this earlier copy's
898 // source is no longer available. e.g.
899 // %xmm9 = copy %xmm2
900 // ...
901 // %xmm2 = copy %xmm0
902 // ...
903 // %xmm2 = copy %xmm9
904 Tracker.clobberRegister(Def, *TRI, *TII, UseCopyInstr);
905 for (const MachineOperand &MO : MI.implicit_operands()) {
906 if (!MO.isReg() || !MO.isDef())
907 continue;
908 MCRegister Reg = MO.getReg().asMCReg();
909 if (!Reg)
910 continue;
911 Tracker.clobberRegister(Reg, *TRI, *TII, UseCopyInstr);
912 }
913
914 Tracker.trackCopy(&MI, *TRI, *TII, UseCopyInstr);
915
916 continue;
917 }
918 }
919
920 // Clobber any earlyclobber regs first.
921 for (const MachineOperand &MO : MI.operands())
922 if (MO.isReg() && MO.isEarlyClobber()) {
923 MCRegister Reg = MO.getReg().asMCReg();
924 // If we have a tied earlyclobber, that means it is also read by this
925 // instruction, so we need to make sure we don't remove it as dead
926 // later.
927 if (MO.isTied())
928 ReadRegister(Reg, MI, RegularUse);
929 Tracker.clobberRegister(Reg, *TRI, *TII, UseCopyInstr);
930 }
931
932 forwardUses(MI);
933
934 // Not a copy.
936 const MachineOperand *RegMask = nullptr;
937 for (const MachineOperand &MO : MI.operands()) {
938 if (MO.isRegMask())
939 RegMask = &MO;
940 if (!MO.isReg())
941 continue;
942 Register Reg = MO.getReg();
943 if (!Reg)
944 continue;
945
946 assert(!Reg.isVirtual() &&
947 "MachineCopyPropagation should be run after register allocation!");
948
949 if (MO.isDef() && !MO.isEarlyClobber()) {
950 // Skip invalidating constant registers.
951 if (!MRI->isConstantPhysReg(Reg)) {
952 Defs.push_back(Reg.asMCReg());
953 continue;
954 }
955 } else if (MO.readsReg())
956 ReadRegister(Reg.asMCReg(), MI, MO.isDebug() ? DebugUse : RegularUse);
957 }
958
959 // The instruction has a register mask operand which means that it clobbers
960 // a large set of registers. Treat clobbered registers the same way as
961 // defined registers.
962 if (RegMask) {
963 // Erase any MaybeDeadCopies whose destination register is clobbered.
965 MaybeDeadCopies.begin();
966 DI != MaybeDeadCopies.end();) {
967 MachineInstr *MaybeDead = *DI;
968 std::optional<DestSourcePair> CopyOperands =
969 isCopyInstr(*MaybeDead, *TII, UseCopyInstr);
970 MCRegister Reg = CopyOperands->Destination->getReg().asMCReg();
971 assert(!MRI->isReserved(Reg));
972
973 if (!RegMask->clobbersPhysReg(Reg)) {
974 ++DI;
975 continue;
976 }
977
978 LLVM_DEBUG(dbgs() << "MCP: Removing copy due to regmask clobbering: ";
979 MaybeDead->dump());
980
981 // Make sure we invalidate any entries in the copy maps before erasing
982 // the instruction.
983 Tracker.clobberRegister(Reg, *TRI, *TII, UseCopyInstr);
984
985 // erase() will return the next valid iterator pointing to the next
986 // element after the erased one.
987 DI = MaybeDeadCopies.erase(DI);
988 MaybeDead->eraseFromParent();
989 Changed = true;
990 ++NumDeletes;
991 }
992 }
993
994 // Any previous copy definition or reading the Defs is no longer available.
995 for (MCRegister Reg : Defs)
996 Tracker.clobberRegister(Reg, *TRI, *TII, UseCopyInstr);
997 }
998
999 bool TracksLiveness = MRI->tracksLiveness();
1000
1001 // If liveness is tracked, we can use the live-in lists to know which
1002 // copies aren't dead.
1003 if (TracksLiveness)
1004 readSuccessorLiveIns(MBB);
1005
1006 // If MBB doesn't have succesor, delete copies whose defs are not used.
1007 // If MBB does have successors, we can only delete copies if we are able to
1008 // use liveness information from successors to confirm they are really dead.
1009 if (MBB.succ_empty() || TracksLiveness) {
1010 for (MachineInstr *MaybeDead : MaybeDeadCopies) {
1011 LLVM_DEBUG(dbgs() << "MCP: Removing copy due to no live-out succ: ";
1012 MaybeDead->dump());
1013
1014 std::optional<DestSourcePair> CopyOperands =
1015 isCopyInstr(*MaybeDead, *TII, UseCopyInstr);
1016 assert(CopyOperands);
1017
1018 Register SrcReg = CopyOperands->Source->getReg();
1019 Register DestReg = CopyOperands->Destination->getReg();
1020 assert(!MRI->isReserved(DestReg));
1021
1022 // Update matching debug values, if any.
1023 SmallVector<MachineInstr *> MaybeDeadDbgUsers(
1024 CopyDbgUsers[MaybeDead].begin(), CopyDbgUsers[MaybeDead].end());
1025 MRI->updateDbgUsersToReg(DestReg.asMCReg(), SrcReg.asMCReg(),
1026 MaybeDeadDbgUsers);
1027
1028 MaybeDead->eraseFromParent();
1029 Changed = true;
1030 ++NumDeletes;
1031 }
1032 }
1033
1034 MaybeDeadCopies.clear();
1035 CopyDbgUsers.clear();
1036 Tracker.clear();
1037}
1038
1039static bool isBackwardPropagatableCopy(const DestSourcePair &CopyOperands,
1040 const MachineRegisterInfo &MRI) {
1041 Register Def = CopyOperands.Destination->getReg();
1042 Register Src = CopyOperands.Source->getReg();
1043
1044 if (!Def || !Src)
1045 return false;
1046
1047 if (MRI.isReserved(Def) || MRI.isReserved(Src))
1048 return false;
1049
1050 return CopyOperands.Source->isRenamable() && CopyOperands.Source->isKill();
1051}
1052
1053void MachineCopyPropagation::propagateDefs(MachineInstr &MI) {
1054 if (!Tracker.hasAnyCopies())
1055 return;
1056
1057 for (unsigned OpIdx = 0, OpEnd = MI.getNumOperands(); OpIdx != OpEnd;
1058 ++OpIdx) {
1059 MachineOperand &MODef = MI.getOperand(OpIdx);
1060
1061 if (!MODef.isReg() || MODef.isUse())
1062 continue;
1063
1064 // Ignore non-trivial cases.
1065 if (MODef.isTied() || MODef.isUndef() || MODef.isImplicit())
1066 continue;
1067
1068 if (!MODef.getReg())
1069 continue;
1070
1071 // We only handle if the register comes from a vreg.
1072 if (!MODef.isRenamable())
1073 continue;
1074
1075 MachineInstr *Copy = Tracker.findAvailBackwardCopy(
1076 MI, MODef.getReg().asMCReg(), *TRI, *TII, UseCopyInstr);
1077 if (!Copy)
1078 continue;
1079
1080 std::optional<DestSourcePair> CopyOperands =
1081 isCopyInstr(*Copy, *TII, UseCopyInstr);
1082 Register Def = CopyOperands->Destination->getReg();
1083 Register Src = CopyOperands->Source->getReg();
1084
1085 if (MODef.getReg() != Src)
1086 continue;
1087
1088 if (!isBackwardPropagatableRegClassCopy(*Copy, MI, OpIdx))
1089 continue;
1090
1091 if (hasImplicitOverlap(MI, MODef))
1092 continue;
1093
1094 if (hasOverlappingMultipleDef(MI, MODef, Def))
1095 continue;
1096
1097 if (!canUpdateSrcUsers(*Copy, *CopyOperands->Source))
1098 continue;
1099
1100 LLVM_DEBUG(dbgs() << "MCP: Replacing " << printReg(MODef.getReg(), TRI)
1101 << "\n with " << printReg(Def, TRI) << "\n in "
1102 << MI << " from " << *Copy);
1103
1104 MODef.setReg(Def);
1105 MODef.setIsRenamable(CopyOperands->Destination->isRenamable());
1106
1107 for (auto *SrcUser : Tracker.getSrcUsers(Src, *TRI)) {
1108 for (MachineOperand &MO : SrcUser->uses()) {
1109 if (!MO.isReg() || !MO.isUse() || MO.getReg() != Src)
1110 continue;
1111 MO.setReg(Def);
1112 MO.setIsRenamable(CopyOperands->Destination->isRenamable());
1113 }
1114 }
1115
1116 LLVM_DEBUG(dbgs() << "MCP: After replacement: " << MI << "\n");
1117 MaybeDeadCopies.insert(Copy);
1118 Changed = true;
1119 ++NumCopyBackwardPropagated;
1120 }
1121}
1122
1123void MachineCopyPropagation::BackwardCopyPropagateBlock(
1125 LLVM_DEBUG(dbgs() << "MCP: BackwardCopyPropagateBlock " << MBB.getName()
1126 << "\n");
1127
1129 // Ignore non-trivial COPYs.
1130 std::optional<DestSourcePair> CopyOperands =
1131 isCopyInstr(MI, *TII, UseCopyInstr);
1132 if (CopyOperands) {
1133 Register DefReg = CopyOperands->Destination->getReg();
1134 Register SrcReg = CopyOperands->Source->getReg();
1135
1136 if (!TRI->regsOverlap(DefReg, SrcReg)) {
1137 // Unlike forward cp, we don't invoke propagateDefs here,
1138 // just let forward cp do COPY-to-COPY propagation.
1139 if (isBackwardPropagatableCopy(*CopyOperands, *MRI)) {
1140 Tracker.invalidateRegister(SrcReg.asMCReg(), *TRI, *TII,
1141 UseCopyInstr);
1142 Tracker.invalidateRegister(DefReg.asMCReg(), *TRI, *TII,
1143 UseCopyInstr);
1144 Tracker.trackCopy(&MI, *TRI, *TII, UseCopyInstr);
1145 continue;
1146 }
1147 }
1148 }
1149
1150 // Invalidate any earlyclobber regs first.
1151 for (const MachineOperand &MO : MI.operands())
1152 if (MO.isReg() && MO.isEarlyClobber()) {
1153 MCRegister Reg = MO.getReg().asMCReg();
1154 if (!Reg)
1155 continue;
1156 Tracker.invalidateRegister(Reg, *TRI, *TII, UseCopyInstr);
1157 }
1158
1159 propagateDefs(MI);
1160 for (const MachineOperand &MO : MI.operands()) {
1161 if (!MO.isReg())
1162 continue;
1163
1164 if (!MO.getReg())
1165 continue;
1166
1167 if (MO.isDef())
1168 Tracker.invalidateRegister(MO.getReg().asMCReg(), *TRI, *TII,
1169 UseCopyInstr);
1170
1171 if (MO.readsReg()) {
1172 if (MO.isDebug()) {
1173 // Check if the register in the debug instruction is utilized
1174 // in a copy instruction, so we can update the debug info if the
1175 // register is changed.
1176 for (MCRegUnit Unit : TRI->regunits(MO.getReg().asMCReg())) {
1177 if (auto *Copy = Tracker.findCopyDefViaUnit(Unit, *TRI)) {
1178 CopyDbgUsers[Copy].insert(&MI);
1179 }
1180 }
1181 } else if (!Tracker.trackSrcUsers(MO.getReg().asMCReg(), MI, *TRI, *TII,
1182 UseCopyInstr)) {
1183 // If we can't track the source users, invalidate the register.
1184 Tracker.invalidateRegister(MO.getReg().asMCReg(), *TRI, *TII,
1185 UseCopyInstr);
1186 }
1187 }
1188 }
1189 }
1190
1191 for (auto *Copy : MaybeDeadCopies) {
1192 std::optional<DestSourcePair> CopyOperands =
1193 isCopyInstr(*Copy, *TII, UseCopyInstr);
1194 Register Src = CopyOperands->Source->getReg();
1195 Register Def = CopyOperands->Destination->getReg();
1196 SmallVector<MachineInstr *> MaybeDeadDbgUsers(CopyDbgUsers[Copy].begin(),
1197 CopyDbgUsers[Copy].end());
1198
1199 MRI->updateDbgUsersToReg(Src.asMCReg(), Def.asMCReg(), MaybeDeadDbgUsers);
1200 Copy->eraseFromParent();
1201 ++NumDeletes;
1202 }
1203
1204 MaybeDeadCopies.clear();
1205 CopyDbgUsers.clear();
1206 Tracker.clear();
1207}
1208
1212 MachineInstr *Leader) {
1213 auto &SC = SpillChain[Leader];
1214 auto &RC = ReloadChain[Leader];
1215 for (auto I = SC.rbegin(), E = SC.rend(); I != E; ++I)
1216 (*I)->dump();
1217 for (MachineInstr *MI : RC)
1218 MI->dump();
1219}
1220
1221// Remove spill-reload like copy chains. For example
1222// r0 = COPY r1
1223// r1 = COPY r2
1224// r2 = COPY r3
1225// r3 = COPY r4
1226// <def-use r4>
1227// r4 = COPY r3
1228// r3 = COPY r2
1229// r2 = COPY r1
1230// r1 = COPY r0
1231// will be folded into
1232// r0 = COPY r1
1233// r1 = COPY r4
1234// <def-use r4>
1235// r4 = COPY r1
1236// r1 = COPY r0
1237// TODO: Currently we don't track usage of r0 outside the chain, so we
1238// conservatively keep its value as it was before the rewrite.
1239//
1240// The algorithm is trying to keep
1241// property#1: No Def of spill COPY in the chain is used or defined until the
1242// paired reload COPY in the chain uses the Def.
1243//
1244// property#2: NO Source of COPY in the chain is used or defined until the next
1245// COPY in the chain defines the Source, except the innermost spill-reload
1246// pair.
1247//
1248// The algorithm is conducted by checking every COPY inside the MBB, assuming
1249// the COPY is a reload COPY, then try to find paired spill COPY by searching
1250// the COPY defines the Src of the reload COPY backward. If such pair is found,
1251// it either belongs to an existing chain or a new chain depends on
1252// last available COPY uses the Def of the reload COPY.
1253// Implementation notes, we use CopyTracker::findLastDefCopy(Reg, ...) to find
1254// out last COPY that defines Reg; we use CopyTracker::findLastUseCopy(Reg, ...)
1255// to find out last COPY that uses Reg. When we are encountered with a Non-COPY
1256// instruction, we check registers in the operands of this instruction. If this
1257// Reg is defined by a COPY, we untrack this Reg via
1258// CopyTracker::clobberRegister(Reg, ...).
1259void MachineCopyPropagation::EliminateSpillageCopies(MachineBasicBlock &MBB) {
1260 // ChainLeader maps MI inside a spill-reload chain to its innermost reload COPY.
1261 // Thus we can track if a MI belongs to an existing spill-reload chain.
1263 // SpillChain maps innermost reload COPY of a spill-reload chain to a sequence
1264 // of COPYs that forms spills of a spill-reload chain.
1265 // ReloadChain maps innermost reload COPY of a spill-reload chain to a
1266 // sequence of COPYs that forms reloads of a spill-reload chain.
1268 // If a COPY's Source has use or def until next COPY defines the Source,
1269 // we put the COPY in this set to keep property#2.
1270 DenseSet<const MachineInstr *> CopySourceInvalid;
1271
1272 auto TryFoldSpillageCopies =
1273 [&, this](const SmallVectorImpl<MachineInstr *> &SC,
1275 assert(SC.size() == RC.size() && "Spill-reload should be paired");
1276
1277 // We need at least 3 pairs of copies for the transformation to apply,
1278 // because the first outermost pair cannot be removed since we don't
1279 // recolor outside of the chain and that we need at least one temporary
1280 // spill slot to shorten the chain. If we only have a chain of two
1281 // pairs, we already have the shortest sequence this code can handle:
1282 // the outermost pair for the temporary spill slot, and the pair that
1283 // use that temporary spill slot for the other end of the chain.
1284 // TODO: We might be able to simplify to one spill-reload pair if collecting
1285 // more infomation about the outermost COPY.
1286 if (SC.size() <= 2)
1287 return;
1288
1289 // If violate property#2, we don't fold the chain.
1290 for (const MachineInstr *Spill : drop_begin(SC))
1291 if (CopySourceInvalid.count(Spill))
1292 return;
1293
1294 for (const MachineInstr *Reload : drop_end(RC))
1295 if (CopySourceInvalid.count(Reload))
1296 return;
1297
1298 auto CheckCopyConstraint = [this](Register Def, Register Src) {
1299 for (const TargetRegisterClass *RC : TRI->regclasses()) {
1300 if (RC->contains(Def) && RC->contains(Src))
1301 return true;
1302 }
1303 return false;
1304 };
1305
1306 auto UpdateReg = [](MachineInstr *MI, const MachineOperand *Old,
1307 const MachineOperand *New) {
1308 for (MachineOperand &MO : MI->operands()) {
1309 if (&MO == Old)
1310 MO.setReg(New->getReg());
1311 }
1312 };
1313
1314 std::optional<DestSourcePair> InnerMostSpillCopy =
1315 isCopyInstr(*SC[0], *TII, UseCopyInstr);
1316 std::optional<DestSourcePair> OuterMostSpillCopy =
1317 isCopyInstr(*SC.back(), *TII, UseCopyInstr);
1318 std::optional<DestSourcePair> InnerMostReloadCopy =
1319 isCopyInstr(*RC[0], *TII, UseCopyInstr);
1320 std::optional<DestSourcePair> OuterMostReloadCopy =
1321 isCopyInstr(*RC.back(), *TII, UseCopyInstr);
1322 if (!CheckCopyConstraint(OuterMostSpillCopy->Source->getReg(),
1323 InnerMostSpillCopy->Source->getReg()) ||
1324 !CheckCopyConstraint(InnerMostReloadCopy->Destination->getReg(),
1325 OuterMostReloadCopy->Destination->getReg()))
1326 return;
1327
1328 SpillageChainsLength += SC.size() + RC.size();
1329 NumSpillageChains += 1;
1330 UpdateReg(SC[0], InnerMostSpillCopy->Destination,
1331 OuterMostSpillCopy->Source);
1332 UpdateReg(RC[0], InnerMostReloadCopy->Source,
1333 OuterMostReloadCopy->Destination);
1334
1335 for (size_t I = 1; I < SC.size() - 1; ++I) {
1336 SC[I]->eraseFromParent();
1337 RC[I]->eraseFromParent();
1338 NumDeletes += 2;
1339 }
1340 };
1341
1342 auto IsFoldableCopy = [this](const MachineInstr &MaybeCopy) {
1343 if (MaybeCopy.getNumImplicitOperands() > 0)
1344 return false;
1345 std::optional<DestSourcePair> CopyOperands =
1346 isCopyInstr(MaybeCopy, *TII, UseCopyInstr);
1347 if (!CopyOperands)
1348 return false;
1349 Register Src = CopyOperands->Source->getReg();
1350 Register Def = CopyOperands->Destination->getReg();
1351 return Src && Def && !TRI->regsOverlap(Src, Def) &&
1352 CopyOperands->Source->isRenamable() &&
1353 CopyOperands->Destination->isRenamable();
1354 };
1355
1356 auto IsSpillReloadPair = [&, this](const MachineInstr &Spill,
1357 const MachineInstr &Reload) {
1358 if (!IsFoldableCopy(Spill) || !IsFoldableCopy(Reload))
1359 return false;
1360 std::optional<DestSourcePair> SpillCopy =
1361 isCopyInstr(Spill, *TII, UseCopyInstr);
1362 std::optional<DestSourcePair> ReloadCopy =
1363 isCopyInstr(Reload, *TII, UseCopyInstr);
1364 if (!SpillCopy || !ReloadCopy)
1365 return false;
1366 return SpillCopy->Source->getReg() == ReloadCopy->Destination->getReg() &&
1367 SpillCopy->Destination->getReg() == ReloadCopy->Source->getReg();
1368 };
1369
1370 auto IsChainedCopy = [&, this](const MachineInstr &Prev,
1371 const MachineInstr &Current) {
1372 if (!IsFoldableCopy(Prev) || !IsFoldableCopy(Current))
1373 return false;
1374 std::optional<DestSourcePair> PrevCopy =
1375 isCopyInstr(Prev, *TII, UseCopyInstr);
1376 std::optional<DestSourcePair> CurrentCopy =
1377 isCopyInstr(Current, *TII, UseCopyInstr);
1378 if (!PrevCopy || !CurrentCopy)
1379 return false;
1380 return PrevCopy->Source->getReg() == CurrentCopy->Destination->getReg();
1381 };
1382
1384 std::optional<DestSourcePair> CopyOperands =
1385 isCopyInstr(MI, *TII, UseCopyInstr);
1386
1387 // Update track information via non-copy instruction.
1388 SmallSet<Register, 8> RegsToClobber;
1389 if (!CopyOperands) {
1390 for (const MachineOperand &MO : MI.operands()) {
1391 if (!MO.isReg())
1392 continue;
1393 Register Reg = MO.getReg();
1394 if (!Reg)
1395 continue;
1396 MachineInstr *LastUseCopy =
1397 Tracker.findLastSeenUseInCopy(Reg.asMCReg(), *TRI);
1398 if (LastUseCopy) {
1399 LLVM_DEBUG(dbgs() << "MCP: Copy source of\n");
1400 LLVM_DEBUG(LastUseCopy->dump());
1401 LLVM_DEBUG(dbgs() << "might be invalidated by\n");
1402 LLVM_DEBUG(MI.dump());
1403 CopySourceInvalid.insert(LastUseCopy);
1404 }
1405 // Must be noted Tracker.clobberRegister(Reg, ...) removes tracking of
1406 // Reg, i.e, COPY that defines Reg is removed from the mapping as well
1407 // as marking COPYs that uses Reg unavailable.
1408 // We don't invoke CopyTracker::clobberRegister(Reg, ...) if Reg is not
1409 // defined by a previous COPY, since we don't want to make COPYs uses
1410 // Reg unavailable.
1411 if (Tracker.findLastSeenDefInCopy(MI, Reg.asMCReg(), *TRI, *TII,
1412 UseCopyInstr))
1413 // Thus we can keep the property#1.
1414 RegsToClobber.insert(Reg);
1415 }
1416 for (Register Reg : RegsToClobber) {
1417 Tracker.clobberRegister(Reg, *TRI, *TII, UseCopyInstr);
1418 LLVM_DEBUG(dbgs() << "MCP: Removed tracking of " << printReg(Reg, TRI)
1419 << "\n");
1420 }
1421 continue;
1422 }
1423
1424 Register Src = CopyOperands->Source->getReg();
1425 Register Def = CopyOperands->Destination->getReg();
1426 // Check if we can find a pair spill-reload copy.
1427 LLVM_DEBUG(dbgs() << "MCP: Searching paired spill for reload: ");
1428 LLVM_DEBUG(MI.dump());
1429 MachineInstr *MaybeSpill =
1430 Tracker.findLastSeenDefInCopy(MI, Src.asMCReg(), *TRI, *TII, UseCopyInstr);
1431 bool MaybeSpillIsChained = ChainLeader.count(MaybeSpill);
1432 if (!MaybeSpillIsChained && MaybeSpill &&
1433 IsSpillReloadPair(*MaybeSpill, MI)) {
1434 // Check if we already have an existing chain. Now we have a
1435 // spill-reload pair.
1436 // L2: r2 = COPY r3
1437 // L5: r3 = COPY r2
1438 // Looking for a valid COPY before L5 which uses r3.
1439 // This can be serverial cases.
1440 // Case #1:
1441 // No COPY is found, which can be r3 is def-use between (L2, L5), we
1442 // create a new chain for L2 and L5.
1443 // Case #2:
1444 // L2: r2 = COPY r3
1445 // L5: r3 = COPY r2
1446 // Such COPY is found and is L2, we create a new chain for L2 and L5.
1447 // Case #3:
1448 // L2: r2 = COPY r3
1449 // L3: r1 = COPY r3
1450 // L5: r3 = COPY r2
1451 // we create a new chain for L2 and L5.
1452 // Case #4:
1453 // L2: r2 = COPY r3
1454 // L3: r1 = COPY r3
1455 // L4: r3 = COPY r1
1456 // L5: r3 = COPY r2
1457 // Such COPY won't be found since L4 defines r3. we create a new chain
1458 // for L2 and L5.
1459 // Case #5:
1460 // L2: r2 = COPY r3
1461 // L3: r3 = COPY r1
1462 // L4: r1 = COPY r3
1463 // L5: r3 = COPY r2
1464 // COPY is found and is L4 which belongs to an existing chain, we add
1465 // L2 and L5 to this chain.
1466 LLVM_DEBUG(dbgs() << "MCP: Found spill: ");
1467 LLVM_DEBUG(MaybeSpill->dump());
1468 MachineInstr *MaybePrevReload =
1469 Tracker.findLastSeenUseInCopy(Def.asMCReg(), *TRI);
1470 auto Leader = ChainLeader.find(MaybePrevReload);
1471 MachineInstr *L = nullptr;
1472 if (Leader == ChainLeader.end() ||
1473 (MaybePrevReload && !IsChainedCopy(*MaybePrevReload, MI))) {
1474 L = &MI;
1475 assert(!SpillChain.count(L) &&
1476 "SpillChain should not have contained newly found chain");
1477 } else {
1478 assert(MaybePrevReload &&
1479 "Found a valid leader through nullptr should not happend");
1480 L = Leader->second;
1481 assert(SpillChain[L].size() > 0 &&
1482 "Existing chain's length should be larger than zero");
1483 }
1484 assert(!ChainLeader.count(&MI) && !ChainLeader.count(MaybeSpill) &&
1485 "Newly found paired spill-reload should not belong to any chain "
1486 "at this point");
1487 ChainLeader.insert({MaybeSpill, L});
1488 ChainLeader.insert({&MI, L});
1489 SpillChain[L].push_back(MaybeSpill);
1490 ReloadChain[L].push_back(&MI);
1491 LLVM_DEBUG(dbgs() << "MCP: Chain " << L << " now is:\n");
1492 LLVM_DEBUG(printSpillReloadChain(SpillChain, ReloadChain, L));
1493 } else if (MaybeSpill && !MaybeSpillIsChained) {
1494 // MaybeSpill is unable to pair with MI. That's to say adding MI makes
1495 // the chain invalid.
1496 // The COPY defines Src is no longer considered as a candidate of a
1497 // valid chain. Since we expect the Def of a spill copy isn't used by
1498 // any COPY instruction until a reload copy. For example:
1499 // L1: r1 = COPY r2
1500 // L2: r3 = COPY r1
1501 // If we later have
1502 // L1: r1 = COPY r2
1503 // L2: r3 = COPY r1
1504 // L3: r2 = COPY r1
1505 // L1 and L3 can't be a valid spill-reload pair.
1506 // Thus we keep the property#1.
1507 LLVM_DEBUG(dbgs() << "MCP: Not paired spill-reload:\n");
1508 LLVM_DEBUG(MaybeSpill->dump());
1509 LLVM_DEBUG(MI.dump());
1510 Tracker.clobberRegister(Src.asMCReg(), *TRI, *TII, UseCopyInstr);
1511 LLVM_DEBUG(dbgs() << "MCP: Removed tracking of " << printReg(Src, TRI)
1512 << "\n");
1513 }
1514 Tracker.trackCopy(&MI, *TRI, *TII, UseCopyInstr);
1515 }
1516
1517 for (auto I = SpillChain.begin(), E = SpillChain.end(); I != E; ++I) {
1518 auto &SC = I->second;
1519 assert(ReloadChain.count(I->first) &&
1520 "Reload chain of the same leader should exist");
1521 auto &RC = ReloadChain[I->first];
1522 TryFoldSpillageCopies(SC, RC);
1523 }
1524
1525 MaybeDeadCopies.clear();
1526 CopyDbgUsers.clear();
1527 Tracker.clear();
1528}
1529
1530bool MachineCopyPropagation::runOnMachineFunction(MachineFunction &MF) {
1531 if (skipFunction(MF.getFunction()))
1532 return false;
1533
1534 bool isSpillageCopyElimEnabled = false;
1536 case cl::BOU_UNSET:
1537 isSpillageCopyElimEnabled =
1539 break;
1540 case cl::BOU_TRUE:
1541 isSpillageCopyElimEnabled = true;
1542 break;
1543 case cl::BOU_FALSE:
1544 isSpillageCopyElimEnabled = false;
1545 break;
1546 }
1547
1548 Changed = false;
1549
1551 TII = MF.getSubtarget().getInstrInfo();
1552 MRI = &MF.getRegInfo();
1553
1554 for (MachineBasicBlock &MBB : MF) {
1555 if (isSpillageCopyElimEnabled)
1556 EliminateSpillageCopies(MBB);
1557 BackwardCopyPropagateBlock(MBB);
1558 ForwardCopyPropagateBlock(MBB);
1559 }
1560
1561 return Changed;
1562}
1563
1565llvm::createMachineCopyPropagationPass(bool UseCopyInstr = false) {
1566 return new MachineCopyPropagation(UseCopyInstr);
1567}
unsigned const MachineRegisterInfo * MRI
MachineBasicBlock & MBB
#define LLVM_ATTRIBUTE_UNUSED
Definition: Compiler.h:282
This file provides an implementation of debug counters.
#define DEBUG_COUNTER(VARNAME, COUNTERNAME, DESC)
Definition: DebugCounter.h:190
#define LLVM_DEBUG(...)
Definition: Debug.h:106
This file defines the DenseMap class.
const HexagonInstrInfo * TII
IRTranslator LLVM IR MI
#define I(x, y, z)
Definition: MD5.cpp:58
static cl::opt< cl::boolOrDefault > EnableSpillageCopyElimination("enable-spill-copy-elim", cl::Hidden)
static void LLVM_ATTRIBUTE_UNUSED printSpillReloadChain(DenseMap< MachineInstr *, SmallVector< MachineInstr * > > &SpillChain, DenseMap< MachineInstr *, SmallVector< MachineInstr * > > &ReloadChain, MachineInstr *Leader)
static bool isBackwardPropagatableCopy(const DestSourcePair &CopyOperands, const MachineRegisterInfo &MRI)
static bool isNopCopy(const MachineInstr &PreviousCopy, MCRegister Src, MCRegister Def, const TargetRegisterInfo *TRI, const TargetInstrInfo *TII, bool UseCopyInstr)
Return true if PreviousCopy did copy register Src to register Def.
static cl::opt< bool > MCPUseCopyInstr("mcp-use-is-copy-instr", cl::init(false), cl::Hidden)
#define DEBUG_TYPE
unsigned const TargetRegisterInfo * TRI
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
Definition: PassSupport.h:38
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
SI Lower i1 Copies
This file contains some templates that are useful if you are working with the STL at all.
This file implements a set that has insertion order iteration characteristics.
This file defines the SmallSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
Definition: Statistic.h:166
Represent the analysis usage information of a pass.
void setPreservesCFG()
This function should be called by the pass, iff they do not:
Definition: Pass.cpp:256
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
Definition: ArrayRef.h:41
static bool shouldExecute(unsigned CounterName)
Definition: DebugCounter.h:87
iterator find(const_arg_type_t< KeyT > Val)
Definition: DenseMap.h:156
iterator begin()
Definition: DenseMap.h:75
size_type count(const_arg_type_t< KeyT > Val) const
Return 1 if the specified key is in the map, 0 otherwise.
Definition: DenseMap.h:152
iterator end()
Definition: DenseMap.h:84
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
Definition: DenseMap.h:211
Implements a dense probed hash-table based set.
Definition: DenseSet.h:278
Wrapper class representing physical registers. Should be passed by value.
Definition: MCRegister.h:33
iterator_range< succ_iterator > successors()
StringRef getName() const
Return the name of the corresponding LLVM basic block, or an empty string.
MachineFunctionPass - This class adapts the FunctionPass interface to allow convenient creation of pa...
void getAnalysisUsage(AnalysisUsage &AU) const override
getAnalysisUsage - Subclasses that override getAnalysisUsage must call this.
virtual bool runOnMachineFunction(MachineFunction &MF)=0
runOnMachineFunction - This method must be overloaded to perform the desired machine code transformat...
virtual MachineFunctionProperties getRequiredProperties() const
Properties which a MachineFunction may have at a given point in time.
MachineFunctionProperties & set(Property P)
const TargetSubtargetInfo & getSubtarget() const
getSubtarget - Return the subtarget for which this machine code is being compiled.
MachineRegisterInfo & getRegInfo()
getRegInfo - Return information about the registers currently in use.
Function & getFunction()
Return the LLVM function that this machine code represents.
Representation of each machine instruction.
Definition: MachineInstr.h:69
void eraseFromParent()
Unlink 'this' from the containing basic block and delete it.
const MachineOperand & getOperand(unsigned i) const
Definition: MachineInstr.h:585
const TargetRegisterClass * getRegClassConstraint(unsigned OpIdx, const TargetInstrInfo *TII, const TargetRegisterInfo *TRI) const
Compute the static register class constraint for operand OpIdx.
MachineOperand class - Representation of each machine instruction operand.
bool isImplicit() const
void setIsRenamable(bool Val=true)
bool isReg() const
isReg - Tests if this is a MO_Register operand.
void setReg(Register Reg)
Change the register this operand corresponds to.
bool isRenamable() const
isRenamable - Returns true if this register may be renamed, i.e.
void setIsUndef(bool Val=true)
Register getReg() const
getReg - Returns the register number.
static bool clobbersPhysReg(const uint32_t *RegMask, MCRegister PhysReg)
clobbersPhysReg - Returns true if this RegMask clobbers PhysReg.
MachineRegisterInfo - Keep track of information for virtual and physical registers,...
static PassRegistry * getPassRegistry()
getPassRegistry - Access the global registry object, which is automatically initialized at applicatio...
void dump() const
Definition: Pass.cpp:136
Wrapper class representing virtual and physical registers.
Definition: Register.h:19
MCRegister asMCReg() const
Utility to check-convert this value to a MCRegister.
Definition: Register.h:110
constexpr bool isPhysical() const
Return true if the specified register number is in the physical register namespace.
Definition: Register.h:95
iterator begin()
Get an iterator to the beginning of the SetVector.
Definition: SetVector.h:103
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
Definition: SmallPtrSet.h:519
A SetVector that performs no allocations if smaller than a certain size.
Definition: SetVector.h:370
SmallSet - This maintains a set of unique values, optimizing for the case when the set is small (less...
Definition: SmallSet.h:132
std::pair< const_iterator, bool > insert(const T &V)
insert - Insert an element into the set if it isn't already there.
Definition: SmallSet.h:181
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
Definition: SmallVector.h:573
void push_back(const T &Elt)
Definition: SmallVector.h:413
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
Definition: SmallVector.h:1196
TargetInstrInfo - Interface to description of machine instruction set.
TargetRegisterInfo base class - We assume that the target defines a static array of TargetRegisterDes...
virtual const TargetRegisterInfo * getRegisterInfo() const
getRegisterInfo - If register information is available, return it.
virtual bool enableSpillageCopyElimination() const
Enable spillage copy elimination in MachineCopyPropagation pass.
virtual const TargetInstrInfo * getInstrInfo() const
A Use represents the edge between a Value definition and its users.
Definition: Use.h:43
std::pair< iterator, bool > insert(const ValueT &V)
Definition: DenseSet.h:213
size_type count(const_arg_type_t< ValueT > V) const
Return 1 if the specified key is in the set, 0 otherwise.
Definition: DenseSet.h:95
reverse_self_iterator getReverseIterator()
Definition: ilist_node.h:135
self_iterator getIterator()
Definition: ilist_node.h:132
This provides a very simple, boring adaptor for a begin and end iterator into a range type.
DebugType
Definition: COFF.h:699
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
Definition: CallingConv.h:24
@ SC
CHAIN = SC CHAIN, Imm128 - System call.
Reg
All possible values of the reg field in the ModR/M byte.
initializer< Ty > init(const Ty &Val)
Definition: CommandLine.h:443
NodeAddr< DefNode * > Def
Definition: RDFGraph.h:384
const_iterator begin(StringRef path, Style style=Style::native)
Get begin iterator over path.
Definition: Path.cpp:226
const_iterator end(StringRef path)
Get end iterator over path.
Definition: Path.cpp:235
This is an optimization pass for GlobalISel generic memory operations.
Definition: AddressRanges.h:18
auto drop_begin(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the first N elements excluded.
Definition: STLExtras.h:329
auto size(R &&Range, std::enable_if_t< std::is_base_of< std::random_access_iterator_tag, typename std::iterator_traits< decltype(Range.begin())>::iterator_category >::value, void > *=nullptr)
Get the size of a range.
Definition: STLExtras.h:1697
iterator_range< T > make_range(T x, T y)
Convenience function for iterating over sub-ranges.
iterator_range< early_inc_iterator_impl< detail::IterOfRange< RangeT > > > make_early_inc_range(RangeT &&Range)
Make a range that does early increment to allow mutation of the underlying range without disrupting i...
Definition: STLExtras.h:657
auto reverse(ContainerTy &&C)
Definition: STLExtras.h:420
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
Definition: Debug.cpp:163
auto drop_end(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the last N elements excluded.
Definition: STLExtras.h:336
bool is_contained(R &&Range, const E &Element)
Returns true if Element is found in Range.
Definition: STLExtras.h:1903
MachineFunctionPass * createMachineCopyPropagationPass(bool UseCopyInstr)
void initializeMachineCopyPropagationPass(PassRegistry &)
Printable printReg(Register Reg, const TargetRegisterInfo *TRI=nullptr, unsigned SubIdx=0, const MachineRegisterInfo *MRI=nullptr)
Prints virtual and physical registers with or without a TRI instance.
char & MachineCopyPropagationID
MachineCopyPropagation - This pass performs copy propagation on machine instructions.
const MachineOperand * Source
const MachineOperand * Destination