LLVM 18.0.0git
MachineCopyPropagation.cpp
Go to the documentation of this file.
1//===- MachineCopyPropagation.cpp - Machine Copy Propagation Pass ---------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This is an extremely simple MachineInstr-level copy propagation pass.
10//
11// This pass forwards the source of COPYs to the users of their destinations
12// when doing so is legal. For example:
13//
14// %reg1 = COPY %reg0
15// ...
16// ... = OP %reg1
17//
18// If
19// - %reg0 has not been clobbered by the time of the use of %reg1
20// - the register class constraints are satisfied
21// - the COPY def is the only value that reaches OP
22// then this pass replaces the above with:
23//
24// %reg1 = COPY %reg0
25// ...
26// ... = OP %reg0
27//
28// This pass also removes some redundant COPYs. For example:
29//
30// %R1 = COPY %R0
31// ... // No clobber of %R1
32// %R0 = COPY %R1 <<< Removed
33//
34// or
35//
36// %R1 = COPY %R0
37// ... // No clobber of %R0
38// %R1 = COPY %R0 <<< Removed
39//
40// or
41//
42// $R0 = OP ...
43// ... // No read/clobber of $R0 and $R1
44// $R1 = COPY $R0 // $R0 is killed
45// Replace $R0 with $R1 and remove the COPY
46// $R1 = OP ...
47// ...
48//
49//===----------------------------------------------------------------------===//
50
51#include "llvm/ADT/DenseMap.h"
52#include "llvm/ADT/STLExtras.h"
53#include "llvm/ADT/SetVector.h"
54#include "llvm/ADT/SmallSet.h"
56#include "llvm/ADT/Statistic.h"
69#include "llvm/Pass.h"
70#include "llvm/Support/Debug.h"
73#include <cassert>
74#include <iterator>
75
76using namespace llvm;
77
78#define DEBUG_TYPE "machine-cp"
79
80STATISTIC(NumDeletes, "Number of dead copies deleted");
81STATISTIC(NumCopyForwards, "Number of copy uses forwarded");
82STATISTIC(NumCopyBackwardPropagated, "Number of copy defs backward propagated");
83STATISTIC(SpillageChainsLength, "Length of spillage chains");
84STATISTIC(NumSpillageChains, "Number of spillage chains");
85DEBUG_COUNTER(FwdCounter, "machine-cp-fwd",
86 "Controls which register COPYs are forwarded");
87
88static cl::opt<bool> MCPUseCopyInstr("mcp-use-is-copy-instr", cl::init(false),
91 EnableSpillageCopyElimination("enable-spill-copy-elim", cl::Hidden);
92
93namespace {
94
95static std::optional<DestSourcePair> isCopyInstr(const MachineInstr &MI,
96 const TargetInstrInfo &TII,
97 bool UseCopyInstr) {
98 if (UseCopyInstr)
99 return TII.isCopyInstr(MI);
100
101 if (MI.isCopy())
102 return std::optional<DestSourcePair>(
103 DestSourcePair{MI.getOperand(0), MI.getOperand(1)});
104
105 return std::nullopt;
106}
107
108class CopyTracker {
109 struct CopyInfo {
110 MachineInstr *MI, *LastSeenUseInCopy;
112 bool Avail;
113 };
114
116
117public:
118 /// Mark all of the given registers and their subregisters as unavailable for
119 /// copying.
120 void markRegsUnavailable(ArrayRef<MCRegister> Regs,
121 const TargetRegisterInfo &TRI) {
122 for (MCRegister Reg : Regs) {
123 // Source of copy is no longer available for propagation.
124 for (MCRegUnit Unit : TRI.regunits(Reg)) {
125 auto CI = Copies.find(Unit);
126 if (CI != Copies.end())
127 CI->second.Avail = false;
128 }
129 }
130 }
131
132 /// Remove register from copy maps.
133 void invalidateRegister(MCRegister Reg, const TargetRegisterInfo &TRI,
134 const TargetInstrInfo &TII, bool UseCopyInstr) {
135 // Since Reg might be a subreg of some registers, only invalidate Reg is not
136 // enough. We have to find the COPY defines Reg or registers defined by Reg
137 // and invalidate all of them. Similarly, we must invalidate all of the
138 // the subregisters used in the source of the COPY.
139 SmallSet<MCRegUnit, 8> RegUnitsToInvalidate;
140 auto InvalidateCopy = [&](MachineInstr *MI) {
141 std::optional<DestSourcePair> CopyOperands =
142 isCopyInstr(*MI, TII, UseCopyInstr);
143 assert(CopyOperands && "Expect copy");
144
145 auto Dest = TRI.regunits(CopyOperands->Destination->getReg().asMCReg());
146 auto Src = TRI.regunits(CopyOperands->Source->getReg().asMCReg());
147 RegUnitsToInvalidate.insert(Dest.begin(), Dest.end());
148 RegUnitsToInvalidate.insert(Src.begin(), Src.end());
149 };
150
151 for (MCRegUnit Unit : TRI.regunits(Reg)) {
152 auto I = Copies.find(Unit);
153 if (I != Copies.end()) {
154 if (MachineInstr *MI = I->second.MI)
155 InvalidateCopy(MI);
156 if (MachineInstr *MI = I->second.LastSeenUseInCopy)
157 InvalidateCopy(MI);
158 }
159 }
160 for (MCRegUnit Unit : RegUnitsToInvalidate)
161 Copies.erase(Unit);
162 }
163
164 /// Clobber a single register, removing it from the tracker's copy maps.
165 void clobberRegister(MCRegister Reg, const TargetRegisterInfo &TRI,
166 const TargetInstrInfo &TII, bool UseCopyInstr) {
167 for (MCRegUnit Unit : TRI.regunits(Reg)) {
168 auto I = Copies.find(Unit);
169 if (I != Copies.end()) {
170 // When we clobber the source of a copy, we need to clobber everything
171 // it defined.
172 markRegsUnavailable(I->second.DefRegs, TRI);
173 // When we clobber the destination of a copy, we need to clobber the
174 // whole register it defined.
175 if (MachineInstr *MI = I->second.MI) {
176 std::optional<DestSourcePair> CopyOperands =
177 isCopyInstr(*MI, TII, UseCopyInstr);
178 markRegsUnavailable({CopyOperands->Destination->getReg().asMCReg()},
179 TRI);
180 }
181 // Now we can erase the copy.
182 Copies.erase(I);
183 }
184 }
185 }
186
187 /// Add this copy's registers into the tracker's copy maps.
188 void trackCopy(MachineInstr *MI, const TargetRegisterInfo &TRI,
189 const TargetInstrInfo &TII, bool UseCopyInstr) {
190 std::optional<DestSourcePair> CopyOperands =
191 isCopyInstr(*MI, TII, UseCopyInstr);
192 assert(CopyOperands && "Tracking non-copy?");
193
194 MCRegister Src = CopyOperands->Source->getReg().asMCReg();
195 MCRegister Def = CopyOperands->Destination->getReg().asMCReg();
196
197 // Remember Def is defined by the copy.
198 for (MCRegUnit Unit : TRI.regunits(Def))
199 Copies[Unit] = {MI, nullptr, {}, true};
200
201 // Remember source that's copied to Def. Once it's clobbered, then
202 // it's no longer available for copy propagation.
203 for (MCRegUnit Unit : TRI.regunits(Src)) {
204 auto I = Copies.insert({Unit, {nullptr, nullptr, {}, false}});
205 auto &Copy = I.first->second;
206 if (!is_contained(Copy.DefRegs, Def))
207 Copy.DefRegs.push_back(Def);
208 Copy.LastSeenUseInCopy = MI;
209 }
210 }
211
212 bool hasAnyCopies() {
213 return !Copies.empty();
214 }
215
216 MachineInstr *findCopyForUnit(MCRegister RegUnit,
217 const TargetRegisterInfo &TRI,
218 bool MustBeAvailable = false) {
219 auto CI = Copies.find(RegUnit);
220 if (CI == Copies.end())
221 return nullptr;
222 if (MustBeAvailable && !CI->second.Avail)
223 return nullptr;
224 return CI->second.MI;
225 }
226
227 MachineInstr *findCopyDefViaUnit(MCRegister RegUnit,
228 const TargetRegisterInfo &TRI) {
229 auto CI = Copies.find(RegUnit);
230 if (CI == Copies.end())
231 return nullptr;
232 if (CI->second.DefRegs.size() != 1)
233 return nullptr;
234 MCRegUnit RU = *TRI.regunits(CI->second.DefRegs[0]).begin();
235 return findCopyForUnit(RU, TRI, true);
236 }
237
238 MachineInstr *findAvailBackwardCopy(MachineInstr &I, MCRegister Reg,
239 const TargetRegisterInfo &TRI,
240 const TargetInstrInfo &TII,
241 bool UseCopyInstr) {
242 MCRegUnit RU = *TRI.regunits(Reg).begin();
243 MachineInstr *AvailCopy = findCopyDefViaUnit(RU, TRI);
244
245 if (!AvailCopy)
246 return nullptr;
247
248 std::optional<DestSourcePair> CopyOperands =
249 isCopyInstr(*AvailCopy, TII, UseCopyInstr);
250 Register AvailSrc = CopyOperands->Source->getReg();
251 Register AvailDef = CopyOperands->Destination->getReg();
252 if (!TRI.isSubRegisterEq(AvailSrc, Reg))
253 return nullptr;
254
255 for (const MachineInstr &MI :
256 make_range(AvailCopy->getReverseIterator(), I.getReverseIterator()))
257 for (const MachineOperand &MO : MI.operands())
258 if (MO.isRegMask())
259 // FIXME: Shall we simultaneously invalidate AvailSrc or AvailDef?
260 if (MO.clobbersPhysReg(AvailSrc) || MO.clobbersPhysReg(AvailDef))
261 return nullptr;
262
263 return AvailCopy;
264 }
265
266 MachineInstr *findAvailCopy(MachineInstr &DestCopy, MCRegister Reg,
267 const TargetRegisterInfo &TRI,
268 const TargetInstrInfo &TII, bool UseCopyInstr) {
269 // We check the first RegUnit here, since we'll only be interested in the
270 // copy if it copies the entire register anyway.
271 MCRegUnit RU = *TRI.regunits(Reg).begin();
272 MachineInstr *AvailCopy =
273 findCopyForUnit(RU, TRI, /*MustBeAvailable=*/true);
274
275 if (!AvailCopy)
276 return nullptr;
277
278 std::optional<DestSourcePair> CopyOperands =
279 isCopyInstr(*AvailCopy, TII, UseCopyInstr);
280 Register AvailSrc = CopyOperands->Source->getReg();
281 Register AvailDef = CopyOperands->Destination->getReg();
282 if (!TRI.isSubRegisterEq(AvailDef, Reg))
283 return nullptr;
284
285 // Check that the available copy isn't clobbered by any regmasks between
286 // itself and the destination.
287 for (const MachineInstr &MI :
288 make_range(AvailCopy->getIterator(), DestCopy.getIterator()))
289 for (const MachineOperand &MO : MI.operands())
290 if (MO.isRegMask())
291 if (MO.clobbersPhysReg(AvailSrc) || MO.clobbersPhysReg(AvailDef))
292 return nullptr;
293
294 return AvailCopy;
295 }
296
297 // Find last COPY that defines Reg before Current MachineInstr.
298 MachineInstr *findLastSeenDefInCopy(const MachineInstr &Current,
299 MCRegister Reg,
300 const TargetRegisterInfo &TRI,
301 const TargetInstrInfo &TII,
302 bool UseCopyInstr) {
303 MCRegUnit RU = *TRI.regunits(Reg).begin();
304 auto CI = Copies.find(RU);
305 if (CI == Copies.end() || !CI->second.Avail)
306 return nullptr;
307
308 MachineInstr *DefCopy = CI->second.MI;
309 std::optional<DestSourcePair> CopyOperands =
310 isCopyInstr(*DefCopy, TII, UseCopyInstr);
311 Register Def = CopyOperands->Destination->getReg();
312 if (!TRI.isSubRegisterEq(Def, Reg))
313 return nullptr;
314
315 for (const MachineInstr &MI :
316 make_range(static_cast<const MachineInstr *>(DefCopy)->getIterator(),
317 Current.getIterator()))
318 for (const MachineOperand &MO : MI.operands())
319 if (MO.isRegMask())
320 if (MO.clobbersPhysReg(Def)) {
321 LLVM_DEBUG(dbgs() << "MCP: Removed tracking of "
322 << printReg(Def, &TRI) << "\n");
323 return nullptr;
324 }
325
326 return DefCopy;
327 }
328
329 // Find last COPY that uses Reg.
330 MachineInstr *findLastSeenUseInCopy(MCRegister Reg,
331 const TargetRegisterInfo &TRI) {
332 MCRegUnit RU = *TRI.regunits(Reg).begin();
333 auto CI = Copies.find(RU);
334 if (CI == Copies.end())
335 return nullptr;
336 return CI->second.LastSeenUseInCopy;
337 }
338
339 void clear() {
340 Copies.clear();
341 }
342};
343
344class MachineCopyPropagation : public MachineFunctionPass {
345 const TargetRegisterInfo *TRI = nullptr;
346 const TargetInstrInfo *TII = nullptr;
347 const MachineRegisterInfo *MRI = nullptr;
348
349 // Return true if this is a copy instruction and false otherwise.
350 bool UseCopyInstr;
351
352public:
353 static char ID; // Pass identification, replacement for typeid
354
355 MachineCopyPropagation(bool CopyInstr = false)
356 : MachineFunctionPass(ID), UseCopyInstr(CopyInstr || MCPUseCopyInstr) {
358 }
359
360 void getAnalysisUsage(AnalysisUsage &AU) const override {
361 AU.setPreservesCFG();
363 }
364
365 bool runOnMachineFunction(MachineFunction &MF) override;
366
369 MachineFunctionProperties::Property::NoVRegs);
370 }
371
372private:
373 typedef enum { DebugUse = false, RegularUse = true } DebugType;
374
375 void ReadRegister(MCRegister Reg, MachineInstr &Reader, DebugType DT);
376 void ForwardCopyPropagateBlock(MachineBasicBlock &MBB);
377 void BackwardCopyPropagateBlock(MachineBasicBlock &MBB);
378 void EliminateSpillageCopies(MachineBasicBlock &MBB);
379 bool eraseIfRedundant(MachineInstr &Copy, MCRegister Src, MCRegister Def);
380 void forwardUses(MachineInstr &MI);
381 void propagateDefs(MachineInstr &MI);
382 bool isForwardableRegClassCopy(const MachineInstr &Copy,
383 const MachineInstr &UseI, unsigned UseIdx);
384 bool isBackwardPropagatableRegClassCopy(const MachineInstr &Copy,
385 const MachineInstr &UseI,
386 unsigned UseIdx);
387 bool hasImplicitOverlap(const MachineInstr &MI, const MachineOperand &Use);
388 bool hasOverlappingMultipleDef(const MachineInstr &MI,
389 const MachineOperand &MODef, Register Def);
390
391 /// Candidates for deletion.
392 SmallSetVector<MachineInstr *, 8> MaybeDeadCopies;
393
394 /// Multimap tracking debug users in current BB
396
397 CopyTracker Tracker;
398
399 bool Changed = false;
400};
401
402} // end anonymous namespace
403
404char MachineCopyPropagation::ID = 0;
405
406char &llvm::MachineCopyPropagationID = MachineCopyPropagation::ID;
407
408INITIALIZE_PASS(MachineCopyPropagation, DEBUG_TYPE,
409 "Machine Copy Propagation Pass", false, false)
410
411void MachineCopyPropagation::ReadRegister(MCRegister Reg, MachineInstr &Reader,
412 DebugType DT) {
413 // If 'Reg' is defined by a copy, the copy is no longer a candidate
414 // for elimination. If a copy is "read" by a debug user, record the user
415 // for propagation.
416 for (MCRegUnit Unit : TRI->regunits(Reg)) {
417 if (MachineInstr *Copy = Tracker.findCopyForUnit(Unit, *TRI)) {
418 if (DT == RegularUse) {
419 LLVM_DEBUG(dbgs() << "MCP: Copy is used - not dead: "; Copy->dump());
420 MaybeDeadCopies.remove(Copy);
421 } else {
422 CopyDbgUsers[Copy].insert(&Reader);
423 }
424 }
425 }
426}
427
428/// Return true if \p PreviousCopy did copy register \p Src to register \p Def.
429/// This fact may have been obscured by sub register usage or may not be true at
430/// all even though Src and Def are subregisters of the registers used in
431/// PreviousCopy. e.g.
432/// isNopCopy("ecx = COPY eax", AX, CX) == true
433/// isNopCopy("ecx = COPY eax", AH, CL) == false
434static bool isNopCopy(const MachineInstr &PreviousCopy, MCRegister Src,
436 const TargetInstrInfo *TII, bool UseCopyInstr) {
437
438 std::optional<DestSourcePair> CopyOperands =
439 isCopyInstr(PreviousCopy, *TII, UseCopyInstr);
440 MCRegister PreviousSrc = CopyOperands->Source->getReg().asMCReg();
441 MCRegister PreviousDef = CopyOperands->Destination->getReg().asMCReg();
442 if (Src == PreviousSrc && Def == PreviousDef)
443 return true;
444 if (!TRI->isSubRegister(PreviousSrc, Src))
445 return false;
446 unsigned SubIdx = TRI->getSubRegIndex(PreviousSrc, Src);
447 return SubIdx == TRI->getSubRegIndex(PreviousDef, Def);
448}
449
450/// Remove instruction \p Copy if there exists a previous copy that copies the
451/// register \p Src to the register \p Def; This may happen indirectly by
452/// copying the super registers.
453bool MachineCopyPropagation::eraseIfRedundant(MachineInstr &Copy,
454 MCRegister Src, MCRegister Def) {
455 // Avoid eliminating a copy from/to a reserved registers as we cannot predict
456 // the value (Example: The sparc zero register is writable but stays zero).
457 if (MRI->isReserved(Src) || MRI->isReserved(Def))
458 return false;
459
460 // Search for an existing copy.
461 MachineInstr *PrevCopy =
462 Tracker.findAvailCopy(Copy, Def, *TRI, *TII, UseCopyInstr);
463 if (!PrevCopy)
464 return false;
465
466 auto PrevCopyOperands = isCopyInstr(*PrevCopy, *TII, UseCopyInstr);
467 // Check that the existing copy uses the correct sub registers.
468 if (PrevCopyOperands->Destination->isDead())
469 return false;
470 if (!isNopCopy(*PrevCopy, Src, Def, TRI, TII, UseCopyInstr))
471 return false;
472
473 LLVM_DEBUG(dbgs() << "MCP: copy is a NOP, removing: "; Copy.dump());
474
475 // Copy was redundantly redefining either Src or Def. Remove earlier kill
476 // flags between Copy and PrevCopy because the value will be reused now.
477 std::optional<DestSourcePair> CopyOperands =
478 isCopyInstr(Copy, *TII, UseCopyInstr);
479 assert(CopyOperands);
480
481 Register CopyDef = CopyOperands->Destination->getReg();
482 assert(CopyDef == Src || CopyDef == Def);
483 for (MachineInstr &MI :
484 make_range(PrevCopy->getIterator(), Copy.getIterator()))
485 MI.clearRegisterKills(CopyDef, TRI);
486
487 // Clear undef flag from remaining copy if needed.
488 if (!CopyOperands->Source->isUndef()) {
489 PrevCopy->getOperand(PrevCopyOperands->Source->getOperandNo())
490 .setIsUndef(false);
491 }
492
493 Copy.eraseFromParent();
494 Changed = true;
495 ++NumDeletes;
496 return true;
497}
498
499bool MachineCopyPropagation::isBackwardPropagatableRegClassCopy(
500 const MachineInstr &Copy, const MachineInstr &UseI, unsigned UseIdx) {
501 std::optional<DestSourcePair> CopyOperands =
502 isCopyInstr(Copy, *TII, UseCopyInstr);
503 Register Def = CopyOperands->Destination->getReg();
504
505 if (const TargetRegisterClass *URC =
506 UseI.getRegClassConstraint(UseIdx, TII, TRI))
507 return URC->contains(Def);
508
509 // We don't process further if UseI is a COPY, since forward copy propagation
510 // should handle that.
511 return false;
512}
513
514/// Decide whether we should forward the source of \param Copy to its use in
515/// \param UseI based on the physical register class constraints of the opcode
516/// and avoiding introducing more cross-class COPYs.
517bool MachineCopyPropagation::isForwardableRegClassCopy(const MachineInstr &Copy,
518 const MachineInstr &UseI,
519 unsigned UseIdx) {
520 std::optional<DestSourcePair> CopyOperands =
521 isCopyInstr(Copy, *TII, UseCopyInstr);
522 Register CopySrcReg = CopyOperands->Source->getReg();
523
524 // If the new register meets the opcode register constraints, then allow
525 // forwarding.
526 if (const TargetRegisterClass *URC =
527 UseI.getRegClassConstraint(UseIdx, TII, TRI))
528 return URC->contains(CopySrcReg);
529
530 auto UseICopyOperands = isCopyInstr(UseI, *TII, UseCopyInstr);
531 if (!UseICopyOperands)
532 return false;
533
534 /// COPYs don't have register class constraints, so if the user instruction
535 /// is a COPY, we just try to avoid introducing additional cross-class
536 /// COPYs. For example:
537 ///
538 /// RegClassA = COPY RegClassB // Copy parameter
539 /// ...
540 /// RegClassB = COPY RegClassA // UseI parameter
541 ///
542 /// which after forwarding becomes
543 ///
544 /// RegClassA = COPY RegClassB
545 /// ...
546 /// RegClassB = COPY RegClassB
547 ///
548 /// so we have reduced the number of cross-class COPYs and potentially
549 /// introduced a nop COPY that can be removed.
550
551 // Allow forwarding if src and dst belong to any common class, so long as they
552 // don't belong to any (possibly smaller) common class that requires copies to
553 // go via a different class.
554 Register UseDstReg = UseICopyOperands->Destination->getReg();
555 bool Found = false;
556 bool IsCrossClass = false;
557 for (const TargetRegisterClass *RC : TRI->regclasses()) {
558 if (RC->contains(CopySrcReg) && RC->contains(UseDstReg)) {
559 Found = true;
560 if (TRI->getCrossCopyRegClass(RC) != RC) {
561 IsCrossClass = true;
562 break;
563 }
564 }
565 }
566 if (!Found)
567 return false;
568 if (!IsCrossClass)
569 return true;
570 // The forwarded copy would be cross-class. Only do this if the original copy
571 // was also cross-class.
572 Register CopyDstReg = CopyOperands->Destination->getReg();
573 for (const TargetRegisterClass *RC : TRI->regclasses()) {
574 if (RC->contains(CopySrcReg) && RC->contains(CopyDstReg) &&
575 TRI->getCrossCopyRegClass(RC) != RC)
576 return true;
577 }
578 return false;
579}
580
581/// Check that \p MI does not have implicit uses that overlap with it's \p Use
582/// operand (the register being replaced), since these can sometimes be
583/// implicitly tied to other operands. For example, on AMDGPU:
584///
585/// V_MOVRELS_B32_e32 %VGPR2, %M0<imp-use>, %EXEC<imp-use>, %VGPR2_VGPR3_VGPR4_VGPR5<imp-use>
586///
587/// the %VGPR2 is implicitly tied to the larger reg operand, but we have no
588/// way of knowing we need to update the latter when updating the former.
589bool MachineCopyPropagation::hasImplicitOverlap(const MachineInstr &MI,
590 const MachineOperand &Use) {
591 for (const MachineOperand &MIUse : MI.uses())
592 if (&MIUse != &Use && MIUse.isReg() && MIUse.isImplicit() &&
593 MIUse.isUse() && TRI->regsOverlap(Use.getReg(), MIUse.getReg()))
594 return true;
595
596 return false;
597}
598
599/// For an MI that has multiple definitions, check whether \p MI has
600/// a definition that overlaps with another of its definitions.
601/// For example, on ARM: umull r9, r9, lr, r0
602/// The umull instruction is unpredictable unless RdHi and RdLo are different.
603bool MachineCopyPropagation::hasOverlappingMultipleDef(
604 const MachineInstr &MI, const MachineOperand &MODef, Register Def) {
605 for (const MachineOperand &MIDef : MI.defs()) {
606 if ((&MIDef != &MODef) && MIDef.isReg() &&
607 TRI->regsOverlap(Def, MIDef.getReg()))
608 return true;
609 }
610
611 return false;
612}
613
614/// Look for available copies whose destination register is used by \p MI and
615/// replace the use in \p MI with the copy's source register.
616void MachineCopyPropagation::forwardUses(MachineInstr &MI) {
617 if (!Tracker.hasAnyCopies())
618 return;
619
620 // Look for non-tied explicit vreg uses that have an active COPY
621 // instruction that defines the physical register allocated to them.
622 // Replace the vreg with the source of the active COPY.
623 for (unsigned OpIdx = 0, OpEnd = MI.getNumOperands(); OpIdx < OpEnd;
624 ++OpIdx) {
625 MachineOperand &MOUse = MI.getOperand(OpIdx);
626 // Don't forward into undef use operands since doing so can cause problems
627 // with the machine verifier, since it doesn't treat undef reads as reads,
628 // so we can end up with a live range that ends on an undef read, leading to
629 // an error that the live range doesn't end on a read of the live range
630 // register.
631 if (!MOUse.isReg() || MOUse.isTied() || MOUse.isUndef() || MOUse.isDef() ||
632 MOUse.isImplicit())
633 continue;
634
635 if (!MOUse.getReg())
636 continue;
637
638 // Check that the register is marked 'renamable' so we know it is safe to
639 // rename it without violating any constraints that aren't expressed in the
640 // IR (e.g. ABI or opcode requirements).
641 if (!MOUse.isRenamable())
642 continue;
643
644 MachineInstr *Copy = Tracker.findAvailCopy(MI, MOUse.getReg().asMCReg(),
645 *TRI, *TII, UseCopyInstr);
646 if (!Copy)
647 continue;
648
649 std::optional<DestSourcePair> CopyOperands =
650 isCopyInstr(*Copy, *TII, UseCopyInstr);
651 Register CopyDstReg = CopyOperands->Destination->getReg();
652 const MachineOperand &CopySrc = *CopyOperands->Source;
653 Register CopySrcReg = CopySrc.getReg();
654
655 Register ForwardedReg = CopySrcReg;
656 // MI might use a sub-register of the Copy destination, in which case the
657 // forwarded register is the matching sub-register of the Copy source.
658 if (MOUse.getReg() != CopyDstReg) {
659 unsigned SubRegIdx = TRI->getSubRegIndex(CopyDstReg, MOUse.getReg());
660 assert(SubRegIdx &&
661 "MI source is not a sub-register of Copy destination");
662 ForwardedReg = TRI->getSubReg(CopySrcReg, SubRegIdx);
663 if (!ForwardedReg) {
664 LLVM_DEBUG(dbgs() << "MCP: Copy source does not have sub-register "
665 << TRI->getSubRegIndexName(SubRegIdx) << '\n');
666 continue;
667 }
668 }
669
670 // Don't forward COPYs of reserved regs unless they are constant.
671 if (MRI->isReserved(CopySrcReg) && !MRI->isConstantPhysReg(CopySrcReg))
672 continue;
673
674 if (!isForwardableRegClassCopy(*Copy, MI, OpIdx))
675 continue;
676
677 if (hasImplicitOverlap(MI, MOUse))
678 continue;
679
680 // Check that the instruction is not a copy that partially overwrites the
681 // original copy source that we are about to use. The tracker mechanism
682 // cannot cope with that.
683 if (isCopyInstr(MI, *TII, UseCopyInstr) &&
684 MI.modifiesRegister(CopySrcReg, TRI) &&
685 !MI.definesRegister(CopySrcReg)) {
686 LLVM_DEBUG(dbgs() << "MCP: Copy source overlap with dest in " << MI);
687 continue;
688 }
689
690 if (!DebugCounter::shouldExecute(FwdCounter)) {
691 LLVM_DEBUG(dbgs() << "MCP: Skipping forwarding due to debug counter:\n "
692 << MI);
693 continue;
694 }
695
696 LLVM_DEBUG(dbgs() << "MCP: Replacing " << printReg(MOUse.getReg(), TRI)
697 << "\n with " << printReg(ForwardedReg, TRI)
698 << "\n in " << MI << " from " << *Copy);
699
700 MOUse.setReg(ForwardedReg);
701
702 if (!CopySrc.isRenamable())
703 MOUse.setIsRenamable(false);
704 MOUse.setIsUndef(CopySrc.isUndef());
705
706 LLVM_DEBUG(dbgs() << "MCP: After replacement: " << MI << "\n");
707
708 // Clear kill markers that may have been invalidated.
709 for (MachineInstr &KMI :
710 make_range(Copy->getIterator(), std::next(MI.getIterator())))
711 KMI.clearRegisterKills(CopySrcReg, TRI);
712
713 ++NumCopyForwards;
714 Changed = true;
715 }
716}
717
718void MachineCopyPropagation::ForwardCopyPropagateBlock(MachineBasicBlock &MBB) {
719 LLVM_DEBUG(dbgs() << "MCP: ForwardCopyPropagateBlock " << MBB.getName()
720 << "\n");
721
723 // Analyze copies (which don't overlap themselves).
724 std::optional<DestSourcePair> CopyOperands =
725 isCopyInstr(MI, *TII, UseCopyInstr);
726 if (CopyOperands) {
727
728 Register RegSrc = CopyOperands->Source->getReg();
729 Register RegDef = CopyOperands->Destination->getReg();
730
731 if (!TRI->regsOverlap(RegDef, RegSrc)) {
732 assert(RegDef.isPhysical() && RegSrc.isPhysical() &&
733 "MachineCopyPropagation should be run after register allocation!");
734
735 MCRegister Def = RegDef.asMCReg();
736 MCRegister Src = RegSrc.asMCReg();
737
738 // The two copies cancel out and the source of the first copy
739 // hasn't been overridden, eliminate the second one. e.g.
740 // %ecx = COPY %eax
741 // ... nothing clobbered eax.
742 // %eax = COPY %ecx
743 // =>
744 // %ecx = COPY %eax
745 //
746 // or
747 //
748 // %ecx = COPY %eax
749 // ... nothing clobbered eax.
750 // %ecx = COPY %eax
751 // =>
752 // %ecx = COPY %eax
753 if (eraseIfRedundant(MI, Def, Src) || eraseIfRedundant(MI, Src, Def))
754 continue;
755
756 forwardUses(MI);
757
758 // Src may have been changed by forwardUses()
759 CopyOperands = isCopyInstr(MI, *TII, UseCopyInstr);
760 Src = CopyOperands->Source->getReg().asMCReg();
761
762 // If Src is defined by a previous copy, the previous copy cannot be
763 // eliminated.
764 ReadRegister(Src, MI, RegularUse);
765 for (const MachineOperand &MO : MI.implicit_operands()) {
766 if (!MO.isReg() || !MO.readsReg())
767 continue;
768 MCRegister Reg = MO.getReg().asMCReg();
769 if (!Reg)
770 continue;
771 ReadRegister(Reg, MI, RegularUse);
772 }
773
774 LLVM_DEBUG(dbgs() << "MCP: Copy is a deletion candidate: "; MI.dump());
775
776 // Copy is now a candidate for deletion.
777 if (!MRI->isReserved(Def))
778 MaybeDeadCopies.insert(&MI);
779
780 // If 'Def' is previously source of another copy, then this earlier copy's
781 // source is no longer available. e.g.
782 // %xmm9 = copy %xmm2
783 // ...
784 // %xmm2 = copy %xmm0
785 // ...
786 // %xmm2 = copy %xmm9
787 Tracker.clobberRegister(Def, *TRI, *TII, UseCopyInstr);
788 for (const MachineOperand &MO : MI.implicit_operands()) {
789 if (!MO.isReg() || !MO.isDef())
790 continue;
791 MCRegister Reg = MO.getReg().asMCReg();
792 if (!Reg)
793 continue;
794 Tracker.clobberRegister(Reg, *TRI, *TII, UseCopyInstr);
795 }
796
797 Tracker.trackCopy(&MI, *TRI, *TII, UseCopyInstr);
798
799 continue;
800 }
801 }
802
803 // Clobber any earlyclobber regs first.
804 for (const MachineOperand &MO : MI.operands())
805 if (MO.isReg() && MO.isEarlyClobber()) {
806 MCRegister Reg = MO.getReg().asMCReg();
807 // If we have a tied earlyclobber, that means it is also read by this
808 // instruction, so we need to make sure we don't remove it as dead
809 // later.
810 if (MO.isTied())
811 ReadRegister(Reg, MI, RegularUse);
812 Tracker.clobberRegister(Reg, *TRI, *TII, UseCopyInstr);
813 }
814
815 forwardUses(MI);
816
817 // Not a copy.
819 const MachineOperand *RegMask = nullptr;
820 for (const MachineOperand &MO : MI.operands()) {
821 if (MO.isRegMask())
822 RegMask = &MO;
823 if (!MO.isReg())
824 continue;
825 Register Reg = MO.getReg();
826 if (!Reg)
827 continue;
828
829 assert(!Reg.isVirtual() &&
830 "MachineCopyPropagation should be run after register allocation!");
831
832 if (MO.isDef() && !MO.isEarlyClobber()) {
833 Defs.push_back(Reg.asMCReg());
834 continue;
835 } else if (MO.readsReg())
836 ReadRegister(Reg.asMCReg(), MI, MO.isDebug() ? DebugUse : RegularUse);
837 }
838
839 // The instruction has a register mask operand which means that it clobbers
840 // a large set of registers. Treat clobbered registers the same way as
841 // defined registers.
842 if (RegMask) {
843 // Erase any MaybeDeadCopies whose destination register is clobbered.
845 MaybeDeadCopies.begin();
846 DI != MaybeDeadCopies.end();) {
847 MachineInstr *MaybeDead = *DI;
848 std::optional<DestSourcePair> CopyOperands =
849 isCopyInstr(*MaybeDead, *TII, UseCopyInstr);
850 MCRegister Reg = CopyOperands->Destination->getReg().asMCReg();
851 assert(!MRI->isReserved(Reg));
852
853 if (!RegMask->clobbersPhysReg(Reg)) {
854 ++DI;
855 continue;
856 }
857
858 LLVM_DEBUG(dbgs() << "MCP: Removing copy due to regmask clobbering: ";
859 MaybeDead->dump());
860
861 // Make sure we invalidate any entries in the copy maps before erasing
862 // the instruction.
863 Tracker.clobberRegister(Reg, *TRI, *TII, UseCopyInstr);
864
865 // erase() will return the next valid iterator pointing to the next
866 // element after the erased one.
867 DI = MaybeDeadCopies.erase(DI);
868 MaybeDead->eraseFromParent();
869 Changed = true;
870 ++NumDeletes;
871 }
872 }
873
874 // Any previous copy definition or reading the Defs is no longer available.
875 for (MCRegister Reg : Defs)
876 Tracker.clobberRegister(Reg, *TRI, *TII, UseCopyInstr);
877 }
878
879 // If MBB doesn't have successors, delete the copies whose defs are not used.
880 // If MBB does have successors, then conservative assume the defs are live-out
881 // since we don't want to trust live-in lists.
882 if (MBB.succ_empty()) {
883 for (MachineInstr *MaybeDead : MaybeDeadCopies) {
884 LLVM_DEBUG(dbgs() << "MCP: Removing copy due to no live-out succ: ";
885 MaybeDead->dump());
886
887 std::optional<DestSourcePair> CopyOperands =
888 isCopyInstr(*MaybeDead, *TII, UseCopyInstr);
889 assert(CopyOperands);
890
891 Register SrcReg = CopyOperands->Source->getReg();
892 Register DestReg = CopyOperands->Destination->getReg();
893 assert(!MRI->isReserved(DestReg));
894
895 // Update matching debug values, if any.
896 SmallVector<MachineInstr *> MaybeDeadDbgUsers(
897 CopyDbgUsers[MaybeDead].begin(), CopyDbgUsers[MaybeDead].end());
898 MRI->updateDbgUsersToReg(DestReg.asMCReg(), SrcReg.asMCReg(),
899 MaybeDeadDbgUsers);
900
901 MaybeDead->eraseFromParent();
902 Changed = true;
903 ++NumDeletes;
904 }
905 }
906
907 MaybeDeadCopies.clear();
908 CopyDbgUsers.clear();
909 Tracker.clear();
910}
911
912static bool isBackwardPropagatableCopy(const DestSourcePair &CopyOperands,
914 const TargetInstrInfo &TII) {
915 Register Def = CopyOperands.Destination->getReg();
916 Register Src = CopyOperands.Source->getReg();
917
918 if (!Def || !Src)
919 return false;
920
921 if (MRI.isReserved(Def) || MRI.isReserved(Src))
922 return false;
923
924 return CopyOperands.Source->isRenamable() && CopyOperands.Source->isKill();
925}
926
927void MachineCopyPropagation::propagateDefs(MachineInstr &MI) {
928 if (!Tracker.hasAnyCopies())
929 return;
930
931 for (unsigned OpIdx = 0, OpEnd = MI.getNumOperands(); OpIdx != OpEnd;
932 ++OpIdx) {
933 MachineOperand &MODef = MI.getOperand(OpIdx);
934
935 if (!MODef.isReg() || MODef.isUse())
936 continue;
937
938 // Ignore non-trivial cases.
939 if (MODef.isTied() || MODef.isUndef() || MODef.isImplicit())
940 continue;
941
942 if (!MODef.getReg())
943 continue;
944
945 // We only handle if the register comes from a vreg.
946 if (!MODef.isRenamable())
947 continue;
948
949 MachineInstr *Copy = Tracker.findAvailBackwardCopy(
950 MI, MODef.getReg().asMCReg(), *TRI, *TII, UseCopyInstr);
951 if (!Copy)
952 continue;
953
954 std::optional<DestSourcePair> CopyOperands =
955 isCopyInstr(*Copy, *TII, UseCopyInstr);
956 Register Def = CopyOperands->Destination->getReg();
957 Register Src = CopyOperands->Source->getReg();
958
959 if (MODef.getReg() != Src)
960 continue;
961
962 if (!isBackwardPropagatableRegClassCopy(*Copy, MI, OpIdx))
963 continue;
964
965 if (hasImplicitOverlap(MI, MODef))
966 continue;
967
968 if (hasOverlappingMultipleDef(MI, MODef, Def))
969 continue;
970
971 LLVM_DEBUG(dbgs() << "MCP: Replacing " << printReg(MODef.getReg(), TRI)
972 << "\n with " << printReg(Def, TRI) << "\n in "
973 << MI << " from " << *Copy);
974
975 MODef.setReg(Def);
976 MODef.setIsRenamable(CopyOperands->Destination->isRenamable());
977
978 LLVM_DEBUG(dbgs() << "MCP: After replacement: " << MI << "\n");
979 MaybeDeadCopies.insert(Copy);
980 Changed = true;
981 ++NumCopyBackwardPropagated;
982 }
983}
984
985void MachineCopyPropagation::BackwardCopyPropagateBlock(
987 LLVM_DEBUG(dbgs() << "MCP: BackwardCopyPropagateBlock " << MBB.getName()
988 << "\n");
989
991 // Ignore non-trivial COPYs.
992 std::optional<DestSourcePair> CopyOperands =
993 isCopyInstr(MI, *TII, UseCopyInstr);
994 if (CopyOperands && MI.getNumOperands() == 2) {
995 Register DefReg = CopyOperands->Destination->getReg();
996 Register SrcReg = CopyOperands->Source->getReg();
997
998 if (!TRI->regsOverlap(DefReg, SrcReg)) {
999 // Unlike forward cp, we don't invoke propagateDefs here,
1000 // just let forward cp do COPY-to-COPY propagation.
1001 if (isBackwardPropagatableCopy(*CopyOperands, *MRI, *TII)) {
1002 Tracker.invalidateRegister(SrcReg.asMCReg(), *TRI, *TII,
1003 UseCopyInstr);
1004 Tracker.invalidateRegister(DefReg.asMCReg(), *TRI, *TII,
1005 UseCopyInstr);
1006 Tracker.trackCopy(&MI, *TRI, *TII, UseCopyInstr);
1007 continue;
1008 }
1009 }
1010 }
1011
1012 // Invalidate any earlyclobber regs first.
1013 for (const MachineOperand &MO : MI.operands())
1014 if (MO.isReg() && MO.isEarlyClobber()) {
1015 MCRegister Reg = MO.getReg().asMCReg();
1016 if (!Reg)
1017 continue;
1018 Tracker.invalidateRegister(Reg, *TRI, *TII, UseCopyInstr);
1019 }
1020
1021 propagateDefs(MI);
1022 for (const MachineOperand &MO : MI.operands()) {
1023 if (!MO.isReg())
1024 continue;
1025
1026 if (!MO.getReg())
1027 continue;
1028
1029 if (MO.isDef())
1030 Tracker.invalidateRegister(MO.getReg().asMCReg(), *TRI, *TII,
1031 UseCopyInstr);
1032
1033 if (MO.readsReg()) {
1034 if (MO.isDebug()) {
1035 // Check if the register in the debug instruction is utilized
1036 // in a copy instruction, so we can update the debug info if the
1037 // register is changed.
1038 for (MCRegUnit Unit : TRI->regunits(MO.getReg().asMCReg())) {
1039 if (auto *Copy = Tracker.findCopyDefViaUnit(Unit, *TRI)) {
1040 CopyDbgUsers[Copy].insert(&MI);
1041 }
1042 }
1043 } else {
1044 Tracker.invalidateRegister(MO.getReg().asMCReg(), *TRI, *TII,
1045 UseCopyInstr);
1046 }
1047 }
1048 }
1049 }
1050
1051 for (auto *Copy : MaybeDeadCopies) {
1052 std::optional<DestSourcePair> CopyOperands =
1053 isCopyInstr(*Copy, *TII, UseCopyInstr);
1054 Register Src = CopyOperands->Source->getReg();
1055 Register Def = CopyOperands->Destination->getReg();
1056 SmallVector<MachineInstr *> MaybeDeadDbgUsers(CopyDbgUsers[Copy].begin(),
1057 CopyDbgUsers[Copy].end());
1058
1059 MRI->updateDbgUsersToReg(Src.asMCReg(), Def.asMCReg(), MaybeDeadDbgUsers);
1060 Copy->eraseFromParent();
1061 ++NumDeletes;
1062 }
1063
1064 MaybeDeadCopies.clear();
1065 CopyDbgUsers.clear();
1066 Tracker.clear();
1067}
1068
1072 MachineInstr *Leader) {
1073 auto &SC = SpillChain[Leader];
1074 auto &RC = ReloadChain[Leader];
1075 for (auto I = SC.rbegin(), E = SC.rend(); I != E; ++I)
1076 (*I)->dump();
1077 for (MachineInstr *MI : RC)
1078 MI->dump();
1079}
1080
1081// Remove spill-reload like copy chains. For example
1082// r0 = COPY r1
1083// r1 = COPY r2
1084// r2 = COPY r3
1085// r3 = COPY r4
1086// <def-use r4>
1087// r4 = COPY r3
1088// r3 = COPY r2
1089// r2 = COPY r1
1090// r1 = COPY r0
1091// will be folded into
1092// r0 = COPY r1
1093// r1 = COPY r4
1094// <def-use r4>
1095// r4 = COPY r1
1096// r1 = COPY r0
1097// TODO: Currently we don't track usage of r0 outside the chain, so we
1098// conservatively keep its value as it was before the rewrite.
1099//
1100// The algorithm is trying to keep
1101// property#1: No Def of spill COPY in the chain is used or defined until the
1102// paired reload COPY in the chain uses the Def.
1103//
1104// property#2: NO Source of COPY in the chain is used or defined until the next
1105// COPY in the chain defines the Source, except the innermost spill-reload
1106// pair.
1107//
1108// The algorithm is conducted by checking every COPY inside the MBB, assuming
1109// the COPY is a reload COPY, then try to find paired spill COPY by searching
1110// the COPY defines the Src of the reload COPY backward. If such pair is found,
1111// it either belongs to an existing chain or a new chain depends on
1112// last available COPY uses the Def of the reload COPY.
1113// Implementation notes, we use CopyTracker::findLastDefCopy(Reg, ...) to find
1114// out last COPY that defines Reg; we use CopyTracker::findLastUseCopy(Reg, ...)
1115// to find out last COPY that uses Reg. When we are encountered with a Non-COPY
1116// instruction, we check registers in the operands of this instruction. If this
1117// Reg is defined by a COPY, we untrack this Reg via
1118// CopyTracker::clobberRegister(Reg, ...).
1119void MachineCopyPropagation::EliminateSpillageCopies(MachineBasicBlock &MBB) {
1120 // ChainLeader maps MI inside a spill-reload chain to its innermost reload COPY.
1121 // Thus we can track if a MI belongs to an existing spill-reload chain.
1123 // SpillChain maps innermost reload COPY of a spill-reload chain to a sequence
1124 // of COPYs that forms spills of a spill-reload chain.
1125 // ReloadChain maps innermost reload COPY of a spill-reload chain to a
1126 // sequence of COPYs that forms reloads of a spill-reload chain.
1128 // If a COPY's Source has use or def until next COPY defines the Source,
1129 // we put the COPY in this set to keep property#2.
1130 DenseSet<const MachineInstr *> CopySourceInvalid;
1131
1132 auto TryFoldSpillageCopies =
1133 [&, this](const SmallVectorImpl<MachineInstr *> &SC,
1135 assert(SC.size() == RC.size() && "Spill-reload should be paired");
1136
1137 // We need at least 3 pairs of copies for the transformation to apply,
1138 // because the first outermost pair cannot be removed since we don't
1139 // recolor outside of the chain and that we need at least one temporary
1140 // spill slot to shorten the chain. If we only have a chain of two
1141 // pairs, we already have the shortest sequence this code can handle:
1142 // the outermost pair for the temporary spill slot, and the pair that
1143 // use that temporary spill slot for the other end of the chain.
1144 // TODO: We might be able to simplify to one spill-reload pair if collecting
1145 // more infomation about the outermost COPY.
1146 if (SC.size() <= 2)
1147 return;
1148
1149 // If violate property#2, we don't fold the chain.
1150 for (const MachineInstr *Spill : drop_begin(SC))
1151 if (CopySourceInvalid.count(Spill))
1152 return;
1153
1154 for (const MachineInstr *Reload : drop_end(RC))
1155 if (CopySourceInvalid.count(Reload))
1156 return;
1157
1158 auto CheckCopyConstraint = [this](Register Def, Register Src) {
1159 for (const TargetRegisterClass *RC : TRI->regclasses()) {
1160 if (RC->contains(Def) && RC->contains(Src))
1161 return true;
1162 }
1163 return false;
1164 };
1165
1166 auto UpdateReg = [](MachineInstr *MI, const MachineOperand *Old,
1167 const MachineOperand *New) {
1168 for (MachineOperand &MO : MI->operands()) {
1169 if (&MO == Old)
1170 MO.setReg(New->getReg());
1171 }
1172 };
1173
1174 std::optional<DestSourcePair> InnerMostSpillCopy =
1175 isCopyInstr(*SC[0], *TII, UseCopyInstr);
1176 std::optional<DestSourcePair> OuterMostSpillCopy =
1177 isCopyInstr(*SC.back(), *TII, UseCopyInstr);
1178 std::optional<DestSourcePair> InnerMostReloadCopy =
1179 isCopyInstr(*RC[0], *TII, UseCopyInstr);
1180 std::optional<DestSourcePair> OuterMostReloadCopy =
1181 isCopyInstr(*RC.back(), *TII, UseCopyInstr);
1182 if (!CheckCopyConstraint(OuterMostSpillCopy->Source->getReg(),
1183 InnerMostSpillCopy->Source->getReg()) ||
1184 !CheckCopyConstraint(InnerMostReloadCopy->Destination->getReg(),
1185 OuterMostReloadCopy->Destination->getReg()))
1186 return;
1187
1188 SpillageChainsLength += SC.size() + RC.size();
1189 NumSpillageChains += 1;
1190 UpdateReg(SC[0], InnerMostSpillCopy->Destination,
1191 OuterMostSpillCopy->Source);
1192 UpdateReg(RC[0], InnerMostReloadCopy->Source,
1193 OuterMostReloadCopy->Destination);
1194
1195 for (size_t I = 1; I < SC.size() - 1; ++I) {
1196 SC[I]->eraseFromParent();
1197 RC[I]->eraseFromParent();
1198 NumDeletes += 2;
1199 }
1200 };
1201
1202 auto IsFoldableCopy = [this](const MachineInstr &MaybeCopy) {
1203 if (MaybeCopy.getNumImplicitOperands() > 0)
1204 return false;
1205 std::optional<DestSourcePair> CopyOperands =
1206 isCopyInstr(MaybeCopy, *TII, UseCopyInstr);
1207 if (!CopyOperands)
1208 return false;
1209 Register Src = CopyOperands->Source->getReg();
1210 Register Def = CopyOperands->Destination->getReg();
1211 return Src && Def && !TRI->regsOverlap(Src, Def) &&
1212 CopyOperands->Source->isRenamable() &&
1213 CopyOperands->Destination->isRenamable();
1214 };
1215
1216 auto IsSpillReloadPair = [&, this](const MachineInstr &Spill,
1217 const MachineInstr &Reload) {
1218 if (!IsFoldableCopy(Spill) || !IsFoldableCopy(Reload))
1219 return false;
1220 std::optional<DestSourcePair> SpillCopy =
1221 isCopyInstr(Spill, *TII, UseCopyInstr);
1222 std::optional<DestSourcePair> ReloadCopy =
1223 isCopyInstr(Reload, *TII, UseCopyInstr);
1224 if (!SpillCopy || !ReloadCopy)
1225 return false;
1226 return SpillCopy->Source->getReg() == ReloadCopy->Destination->getReg() &&
1227 SpillCopy->Destination->getReg() == ReloadCopy->Source->getReg();
1228 };
1229
1230 auto IsChainedCopy = [&, this](const MachineInstr &Prev,
1231 const MachineInstr &Current) {
1232 if (!IsFoldableCopy(Prev) || !IsFoldableCopy(Current))
1233 return false;
1234 std::optional<DestSourcePair> PrevCopy =
1235 isCopyInstr(Prev, *TII, UseCopyInstr);
1236 std::optional<DestSourcePair> CurrentCopy =
1237 isCopyInstr(Current, *TII, UseCopyInstr);
1238 if (!PrevCopy || !CurrentCopy)
1239 return false;
1240 return PrevCopy->Source->getReg() == CurrentCopy->Destination->getReg();
1241 };
1242
1244 std::optional<DestSourcePair> CopyOperands =
1245 isCopyInstr(MI, *TII, UseCopyInstr);
1246
1247 // Update track information via non-copy instruction.
1248 SmallSet<Register, 8> RegsToClobber;
1249 if (!CopyOperands) {
1250 for (const MachineOperand &MO : MI.operands()) {
1251 if (!MO.isReg())
1252 continue;
1253 Register Reg = MO.getReg();
1254 if (!Reg)
1255 continue;
1256 MachineInstr *LastUseCopy =
1257 Tracker.findLastSeenUseInCopy(Reg.asMCReg(), *TRI);
1258 if (LastUseCopy) {
1259 LLVM_DEBUG(dbgs() << "MCP: Copy source of\n");
1260 LLVM_DEBUG(LastUseCopy->dump());
1261 LLVM_DEBUG(dbgs() << "might be invalidated by\n");
1262 LLVM_DEBUG(MI.dump());
1263 CopySourceInvalid.insert(LastUseCopy);
1264 }
1265 // Must be noted Tracker.clobberRegister(Reg, ...) removes tracking of
1266 // Reg, i.e, COPY that defines Reg is removed from the mapping as well
1267 // as marking COPYs that uses Reg unavailable.
1268 // We don't invoke CopyTracker::clobberRegister(Reg, ...) if Reg is not
1269 // defined by a previous COPY, since we don't want to make COPYs uses
1270 // Reg unavailable.
1271 if (Tracker.findLastSeenDefInCopy(MI, Reg.asMCReg(), *TRI, *TII,
1272 UseCopyInstr))
1273 // Thus we can keep the property#1.
1274 RegsToClobber.insert(Reg);
1275 }
1276 for (Register Reg : RegsToClobber) {
1277 Tracker.clobberRegister(Reg, *TRI, *TII, UseCopyInstr);
1278 LLVM_DEBUG(dbgs() << "MCP: Removed tracking of " << printReg(Reg, TRI)
1279 << "\n");
1280 }
1281 continue;
1282 }
1283
1284 Register Src = CopyOperands->Source->getReg();
1285 Register Def = CopyOperands->Destination->getReg();
1286 // Check if we can find a pair spill-reload copy.
1287 LLVM_DEBUG(dbgs() << "MCP: Searching paired spill for reload: ");
1288 LLVM_DEBUG(MI.dump());
1289 MachineInstr *MaybeSpill =
1290 Tracker.findLastSeenDefInCopy(MI, Src.asMCReg(), *TRI, *TII, UseCopyInstr);
1291 bool MaybeSpillIsChained = ChainLeader.count(MaybeSpill);
1292 if (!MaybeSpillIsChained && MaybeSpill &&
1293 IsSpillReloadPair(*MaybeSpill, MI)) {
1294 // Check if we already have an existing chain. Now we have a
1295 // spill-reload pair.
1296 // L2: r2 = COPY r3
1297 // L5: r3 = COPY r2
1298 // Looking for a valid COPY before L5 which uses r3.
1299 // This can be serverial cases.
1300 // Case #1:
1301 // No COPY is found, which can be r3 is def-use between (L2, L5), we
1302 // create a new chain for L2 and L5.
1303 // Case #2:
1304 // L2: r2 = COPY r3
1305 // L5: r3 = COPY r2
1306 // Such COPY is found and is L2, we create a new chain for L2 and L5.
1307 // Case #3:
1308 // L2: r2 = COPY r3
1309 // L3: r1 = COPY r3
1310 // L5: r3 = COPY r2
1311 // we create a new chain for L2 and L5.
1312 // Case #4:
1313 // L2: r2 = COPY r3
1314 // L3: r1 = COPY r3
1315 // L4: r3 = COPY r1
1316 // L5: r3 = COPY r2
1317 // Such COPY won't be found since L4 defines r3. we create a new chain
1318 // for L2 and L5.
1319 // Case #5:
1320 // L2: r2 = COPY r3
1321 // L3: r3 = COPY r1
1322 // L4: r1 = COPY r3
1323 // L5: r3 = COPY r2
1324 // COPY is found and is L4 which belongs to an existing chain, we add
1325 // L2 and L5 to this chain.
1326 LLVM_DEBUG(dbgs() << "MCP: Found spill: ");
1327 LLVM_DEBUG(MaybeSpill->dump());
1328 MachineInstr *MaybePrevReload =
1329 Tracker.findLastSeenUseInCopy(Def.asMCReg(), *TRI);
1330 auto Leader = ChainLeader.find(MaybePrevReload);
1331 MachineInstr *L = nullptr;
1332 if (Leader == ChainLeader.end() ||
1333 (MaybePrevReload && !IsChainedCopy(*MaybePrevReload, MI))) {
1334 L = &MI;
1335 assert(!SpillChain.count(L) &&
1336 "SpillChain should not have contained newly found chain");
1337 } else {
1338 assert(MaybePrevReload &&
1339 "Found a valid leader through nullptr should not happend");
1340 L = Leader->second;
1341 assert(SpillChain[L].size() > 0 &&
1342 "Existing chain's length should be larger than zero");
1343 }
1344 assert(!ChainLeader.count(&MI) && !ChainLeader.count(MaybeSpill) &&
1345 "Newly found paired spill-reload should not belong to any chain "
1346 "at this point");
1347 ChainLeader.insert({MaybeSpill, L});
1348 ChainLeader.insert({&MI, L});
1349 SpillChain[L].push_back(MaybeSpill);
1350 ReloadChain[L].push_back(&MI);
1351 LLVM_DEBUG(dbgs() << "MCP: Chain " << L << " now is:\n");
1352 LLVM_DEBUG(printSpillReloadChain(SpillChain, ReloadChain, L));
1353 } else if (MaybeSpill && !MaybeSpillIsChained) {
1354 // MaybeSpill is unable to pair with MI. That's to say adding MI makes
1355 // the chain invalid.
1356 // The COPY defines Src is no longer considered as a candidate of a
1357 // valid chain. Since we expect the Def of a spill copy isn't used by
1358 // any COPY instruction until a reload copy. For example:
1359 // L1: r1 = COPY r2
1360 // L2: r3 = COPY r1
1361 // If we later have
1362 // L1: r1 = COPY r2
1363 // L2: r3 = COPY r1
1364 // L3: r2 = COPY r1
1365 // L1 and L3 can't be a valid spill-reload pair.
1366 // Thus we keep the property#1.
1367 LLVM_DEBUG(dbgs() << "MCP: Not paired spill-reload:\n");
1368 LLVM_DEBUG(MaybeSpill->dump());
1369 LLVM_DEBUG(MI.dump());
1370 Tracker.clobberRegister(Src.asMCReg(), *TRI, *TII, UseCopyInstr);
1371 LLVM_DEBUG(dbgs() << "MCP: Removed tracking of " << printReg(Src, TRI)
1372 << "\n");
1373 }
1374 Tracker.trackCopy(&MI, *TRI, *TII, UseCopyInstr);
1375 }
1376
1377 for (auto I = SpillChain.begin(), E = SpillChain.end(); I != E; ++I) {
1378 auto &SC = I->second;
1379 assert(ReloadChain.count(I->first) &&
1380 "Reload chain of the same leader should exist");
1381 auto &RC = ReloadChain[I->first];
1382 TryFoldSpillageCopies(SC, RC);
1383 }
1384
1385 MaybeDeadCopies.clear();
1386 CopyDbgUsers.clear();
1387 Tracker.clear();
1388}
1389
1390bool MachineCopyPropagation::runOnMachineFunction(MachineFunction &MF) {
1391 if (skipFunction(MF.getFunction()))
1392 return false;
1393
1394 bool isSpillageCopyElimEnabled = false;
1396 case cl::BOU_UNSET:
1397 isSpillageCopyElimEnabled =
1399 break;
1400 case cl::BOU_TRUE:
1401 isSpillageCopyElimEnabled = true;
1402 break;
1403 case cl::BOU_FALSE:
1404 isSpillageCopyElimEnabled = false;
1405 break;
1406 }
1407
1408 Changed = false;
1409
1411 TII = MF.getSubtarget().getInstrInfo();
1412 MRI = &MF.getRegInfo();
1413
1414 for (MachineBasicBlock &MBB : MF) {
1415 if (isSpillageCopyElimEnabled)
1416 EliminateSpillageCopies(MBB);
1417 BackwardCopyPropagateBlock(MBB);
1418 ForwardCopyPropagateBlock(MBB);
1419 }
1420
1421 return Changed;
1422}
1423
1425llvm::createMachineCopyPropagationPass(bool UseCopyInstr = false) {
1426 return new MachineCopyPropagation(UseCopyInstr);
1427}
unsigned const MachineRegisterInfo * MRI
MachineBasicBlock & MBB
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
#define LLVM_ATTRIBUTE_UNUSED
Definition: Compiler.h:184
static void clear(coro::Shape &Shape)
Definition: Coroutines.cpp:150
This file provides an implementation of debug counters.
#define DEBUG_COUNTER(VARNAME, COUNTERNAME, DESC)
Definition: DebugCounter.h:182
#define LLVM_DEBUG(X)
Definition: Debug.h:101
This file defines the DenseMap class.
const HexagonInstrInfo * TII
IRTranslator LLVM IR MI
#define I(x, y, z)
Definition: MD5.cpp:58
static cl::opt< cl::boolOrDefault > EnableSpillageCopyElimination("enable-spill-copy-elim", cl::Hidden)
static void LLVM_ATTRIBUTE_UNUSED printSpillReloadChain(DenseMap< MachineInstr *, SmallVector< MachineInstr * > > &SpillChain, DenseMap< MachineInstr *, SmallVector< MachineInstr * > > &ReloadChain, MachineInstr *Leader)
static bool isBackwardPropagatableCopy(const DestSourcePair &CopyOperands, const MachineRegisterInfo &MRI, const TargetInstrInfo &TII)
static bool isNopCopy(const MachineInstr &PreviousCopy, MCRegister Src, MCRegister Def, const TargetRegisterInfo *TRI, const TargetInstrInfo *TII, bool UseCopyInstr)
Return true if PreviousCopy did copy register Src to register Def.
static cl::opt< bool > MCPUseCopyInstr("mcp-use-is-copy-instr", cl::init(false), cl::Hidden)
#define DEBUG_TYPE
unsigned const TargetRegisterInfo * TRI
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
Definition: PassSupport.h:38
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
SI Lower i1 Copies
This file contains some templates that are useful if you are working with the STL at all.
This file implements a set that has insertion order iteration characteristics.
This file defines the SmallSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
Definition: Statistic.h:167
Represent the analysis usage information of a pass.
void setPreservesCFG()
This function should be called by the pass, iff they do not:
Definition: Pass.cpp:269
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
Definition: ArrayRef.h:41
static bool shouldExecute(unsigned CounterName)
Definition: DebugCounter.h:72
iterator find(const_arg_type_t< KeyT > Val)
Definition: DenseMap.h:155
iterator begin()
Definition: DenseMap.h:75
size_type count(const_arg_type_t< KeyT > Val) const
Return 1 if the specified key is in the map, 0 otherwise.
Definition: DenseMap.h:151
iterator end()
Definition: DenseMap.h:84
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
Definition: DenseMap.h:220
Implements a dense probed hash-table based set.
Definition: DenseSet.h:271
Wrapper class representing physical registers. Should be passed by value.
Definition: MCRegister.h:33
StringRef getName() const
Return the name of the corresponding LLVM basic block, or an empty string.
MachineFunctionPass - This class adapts the FunctionPass interface to allow convenient creation of pa...
void getAnalysisUsage(AnalysisUsage &AU) const override
getAnalysisUsage - Subclasses that override getAnalysisUsage must call this.
virtual bool runOnMachineFunction(MachineFunction &MF)=0
runOnMachineFunction - This method must be overloaded to perform the desired machine code transformat...
virtual MachineFunctionProperties getRequiredProperties() const
Properties which a MachineFunction may have at a given point in time.
MachineFunctionProperties & set(Property P)
const TargetSubtargetInfo & getSubtarget() const
getSubtarget - Return the subtarget for which this machine code is being compiled.
MachineRegisterInfo & getRegInfo()
getRegInfo - Return information about the registers currently in use.
Function & getFunction()
Return the LLVM function that this machine code represents.
Representation of each machine instruction.
Definition: MachineInstr.h:68
void eraseFromParent()
Unlink 'this' from the containing basic block and delete it.
const MachineOperand & getOperand(unsigned i) const
Definition: MachineInstr.h:553
const TargetRegisterClass * getRegClassConstraint(unsigned OpIdx, const TargetInstrInfo *TII, const TargetRegisterInfo *TRI) const
Compute the static register class constraint for operand OpIdx.
MachineOperand class - Representation of each machine instruction operand.
bool isImplicit() const
void setIsRenamable(bool Val=true)
bool isReg() const
isReg - Tests if this is a MO_Register operand.
void setReg(Register Reg)
Change the register this operand corresponds to.
bool isRenamable() const
isRenamable - Returns true if this register may be renamed, i.e.
void setIsUndef(bool Val=true)
Register getReg() const
getReg - Returns the register number.
static bool clobbersPhysReg(const uint32_t *RegMask, MCRegister PhysReg)
clobbersPhysReg - Returns true if this RegMask clobbers PhysReg.
MachineRegisterInfo - Keep track of information for virtual and physical registers,...
static PassRegistry * getPassRegistry()
getPassRegistry - Access the global registry object, which is automatically initialized at applicatio...
void dump() const
Definition: Pass.cpp:136
Wrapper class representing virtual and physical registers.
Definition: Register.h:19
MCRegister asMCReg() const
Utility to check-convert this value to a MCRegister.
Definition: Register.h:110
constexpr bool isPhysical() const
Return true if the specified register number is in the physical register namespace.
Definition: Register.h:95
iterator begin()
Get an iterator to the beginning of the SetVector.
Definition: SetVector.h:103
A SetVector that performs no allocations if smaller than a certain size.
Definition: SetVector.h:370
SmallSet - This maintains a set of unique values, optimizing for the case when the set is small (less...
Definition: SmallSet.h:135
std::pair< const_iterator, bool > insert(const T &V)
insert - Insert an element into the set if it isn't already there.
Definition: SmallSet.h:179
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
Definition: SmallVector.h:577
void push_back(const T &Elt)
Definition: SmallVector.h:416
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
Definition: SmallVector.h:1200
TargetInstrInfo - Interface to description of machine instruction set.
TargetRegisterInfo base class - We assume that the target defines a static array of TargetRegisterDes...
virtual const TargetRegisterInfo * getRegisterInfo() const
getRegisterInfo - If register information is available, return it.
virtual bool enableSpillageCopyElimination() const
Enable spillage copy elimination in MachineCopyPropagation pass.
virtual const TargetInstrInfo * getInstrInfo() const
A Use represents the edge between a Value definition and its users.
Definition: Use.h:43
std::pair< iterator, bool > insert(const ValueT &V)
Definition: DenseSet.h:206
size_type count(const_arg_type_t< ValueT > V) const
Return 1 if the specified key is in the set, 0 otherwise.
Definition: DenseSet.h:97
reverse_self_iterator getReverseIterator()
Definition: ilist_node.h:85
self_iterator getIterator()
Definition: ilist_node.h:82
This provides a very simple, boring adaptor for a begin and end iterator into a range type.
DebugType
Definition: COFF.h:666
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
Definition: CallingConv.h:24
@ SC
CHAIN = SC CHAIN, Imm128 - System call.
Reg
All possible values of the reg field in the ModR/M byte.
initializer< Ty > init(const Ty &Val)
Definition: CommandLine.h:445
NodeAddr< DefNode * > Def
Definition: RDFGraph.h:384
const_iterator begin(StringRef path, Style style=Style::native)
Get begin iterator over path.
Definition: Path.cpp:228
const_iterator end(StringRef path)
Get end iterator over path.
Definition: Path.cpp:237
This is an optimization pass for GlobalISel generic memory operations.
Definition: AddressRanges.h:18
auto drop_begin(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the first N elements excluded.
Definition: STLExtras.h:330
auto size(R &&Range, std::enable_if_t< std::is_base_of< std::random_access_iterator_tag, typename std::iterator_traits< decltype(Range.begin())>::iterator_category >::value, void > *=nullptr)
Get the size of a range.
Definition: STLExtras.h:1685
iterator_range< T > make_range(T x, T y)
Convenience function for iterating over sub-ranges.
iterator_range< early_inc_iterator_impl< detail::IterOfRange< RangeT > > > make_early_inc_range(RangeT &&Range)
Make a range that does early increment to allow mutation of the underlying range without disrupting i...
Definition: STLExtras.h:666
auto reverse(ContainerTy &&C)
Definition: STLExtras.h:429
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
Definition: Debug.cpp:163
auto drop_end(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the last N elements excluded.
Definition: STLExtras.h:337
bool is_contained(R &&Range, const E &Element)
Returns true if Element is found in Range.
Definition: STLExtras.h:1884
MachineFunctionPass * createMachineCopyPropagationPass(bool UseCopyInstr)
void initializeMachineCopyPropagationPass(PassRegistry &)
Printable printReg(Register Reg, const TargetRegisterInfo *TRI=nullptr, unsigned SubIdx=0, const MachineRegisterInfo *MRI=nullptr)
Prints virtual and physical registers with or without a TRI instance.
char & MachineCopyPropagationID
MachineCopyPropagation - This pass performs copy propagation on machine instructions.
const MachineOperand * Source
const MachineOperand * Destination