LLVM  9.0.0svn
RegAllocFast.cpp
Go to the documentation of this file.
1 //===- RegAllocFast.cpp - A fast register allocator for debug code --------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 /// \file This register allocator allocates registers to a basic block at a
10 /// time, attempting to keep values in registers and reusing registers as
11 /// appropriate.
12 //
13 //===----------------------------------------------------------------------===//
14 
15 #include "llvm/ADT/ArrayRef.h"
16 #include "llvm/ADT/DenseMap.h"
17 #include "llvm/ADT/IndexedMap.h"
18 #include "llvm/ADT/SmallSet.h"
19 #include "llvm/ADT/SmallVector.h"
20 #include "llvm/ADT/SparseSet.h"
21 #include "llvm/ADT/Statistic.h"
36 #include "llvm/IR/DebugLoc.h"
37 #include "llvm/IR/Metadata.h"
38 #include "llvm/MC/MCInstrDesc.h"
39 #include "llvm/MC/MCRegisterInfo.h"
40 #include "llvm/Pass.h"
41 #include "llvm/Support/Casting.h"
42 #include "llvm/Support/Compiler.h"
43 #include "llvm/Support/Debug.h"
46 #include <cassert>
47 #include <tuple>
48 #include <vector>
49 
50 using namespace llvm;
51 
52 #define DEBUG_TYPE "regalloc"
53 
54 STATISTIC(NumStores, "Number of stores added");
55 STATISTIC(NumLoads , "Number of loads added");
56 STATISTIC(NumCoalesced, "Number of copies coalesced");
57 
58 static RegisterRegAlloc
59  fastRegAlloc("fast", "fast register allocator", createFastRegisterAllocator);
60 
61 namespace {
62 
63  class RegAllocFast : public MachineFunctionPass {
64  public:
65  static char ID;
66 
67  RegAllocFast() : MachineFunctionPass(ID), StackSlotForVirtReg(-1) {}
68 
69  private:
70  MachineFrameInfo *MFI;
72  const TargetRegisterInfo *TRI;
73  const TargetInstrInfo *TII;
74  RegisterClassInfo RegClassInfo;
75 
76  /// Basic block currently being allocated.
77  MachineBasicBlock *MBB;
78 
79  /// Maps virtual regs to the frame index where these values are spilled.
80  IndexedMap<int, VirtReg2IndexFunctor> StackSlotForVirtReg;
81 
82  /// Everything we know about a live virtual register.
83  struct LiveReg {
84  MachineInstr *LastUse = nullptr; ///< Last instr to use reg.
85  unsigned VirtReg; ///< Virtual register number.
86  MCPhysReg PhysReg = 0; ///< Currently held here.
87  unsigned short LastOpNum = 0; ///< OpNum on LastUse.
88  bool Dirty = false; ///< Register needs spill.
89 
90  explicit LiveReg(unsigned VirtReg) : VirtReg(VirtReg) {}
91 
92  unsigned getSparseSetIndex() const {
93  return TargetRegisterInfo::virtReg2Index(VirtReg);
94  }
95  };
96 
97  using LiveRegMap = SparseSet<LiveReg>;
98  /// This map contains entries for each virtual register that is currently
99  /// available in a physical register.
100  LiveRegMap LiveVirtRegs;
101 
103 
104  /// Has a bit set for every virtual register for which it was determined
105  /// that it is alive across blocks.
106  BitVector MayLiveAcrossBlocks;
107 
108  /// State of a physical register.
109  enum RegState {
110  /// A disabled register is not available for allocation, but an alias may
111  /// be in use. A register can only be moved out of the disabled state if
112  /// all aliases are disabled.
113  regDisabled,
114 
115  /// A free register is not currently in use and can be allocated
116  /// immediately without checking aliases.
117  regFree,
118 
119  /// A reserved register has been assigned explicitly (e.g., setting up a
120  /// call parameter), and it remains reserved until it is used.
121  regReserved
122 
123  /// A register state may also be a virtual register number, indication
124  /// that the physical register is currently allocated to a virtual
125  /// register. In that case, LiveVirtRegs contains the inverse mapping.
126  };
127 
128  /// Maps each physical register to a RegState enum or a virtual register.
129  std::vector<unsigned> PhysRegState;
130 
131  SmallVector<unsigned, 16> VirtDead;
133 
134  using RegUnitSet = SparseSet<uint16_t, identity<uint16_t>>;
135  /// Set of register units that are used in the current instruction, and so
136  /// cannot be allocated.
137  RegUnitSet UsedInInstr;
138 
139  void setPhysRegState(MCPhysReg PhysReg, unsigned NewState);
140 
141  /// Mark a physreg as used in this instruction.
142  void markRegUsedInInstr(MCPhysReg PhysReg) {
143  for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units)
144  UsedInInstr.insert(*Units);
145  }
146 
147  /// Check if a physreg or any of its aliases are used in this instruction.
148  bool isRegUsedInInstr(MCPhysReg PhysReg) const {
149  for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units)
150  if (UsedInInstr.count(*Units))
151  return true;
152  return false;
153  }
154 
155  enum : unsigned {
156  spillClean = 50,
157  spillDirty = 100,
158  spillPrefBonus = 20,
159  spillImpossible = ~0u
160  };
161 
162  public:
163  StringRef getPassName() const override { return "Fast Register Allocator"; }
164 
165  void getAnalysisUsage(AnalysisUsage &AU) const override {
166  AU.setPreservesCFG();
168  }
169 
170  MachineFunctionProperties getRequiredProperties() const override {
173  }
174 
175  MachineFunctionProperties getSetProperties() const override {
178  }
179 
180  private:
181  bool runOnMachineFunction(MachineFunction &MF) override;
182 
183  void allocateBasicBlock(MachineBasicBlock &MBB);
184  void allocateInstruction(MachineInstr &MI);
185  void handleDebugValue(MachineInstr &MI);
186  void handleThroughOperands(MachineInstr &MI,
187  SmallVectorImpl<unsigned> &VirtDead);
188  bool isLastUseOfLocalReg(const MachineOperand &MO) const;
189 
190  void addKillFlag(const LiveReg &LRI);
191  void killVirtReg(LiveReg &LR);
192  void killVirtReg(unsigned VirtReg);
193  void spillVirtReg(MachineBasicBlock::iterator MI, LiveReg &LR);
194  void spillVirtReg(MachineBasicBlock::iterator MI, unsigned VirtReg);
195 
196  void usePhysReg(MachineOperand &MO);
197  void definePhysReg(MachineBasicBlock::iterator MI, MCPhysReg PhysReg,
198  RegState NewState);
199  unsigned calcSpillCost(MCPhysReg PhysReg) const;
200  void assignVirtToPhysReg(LiveReg &, MCPhysReg PhysReg);
201 
202  LiveRegMap::iterator findLiveVirtReg(unsigned VirtReg) {
203  return LiveVirtRegs.find(TargetRegisterInfo::virtReg2Index(VirtReg));
204  }
205 
206  LiveRegMap::const_iterator findLiveVirtReg(unsigned VirtReg) const {
207  return LiveVirtRegs.find(TargetRegisterInfo::virtReg2Index(VirtReg));
208  }
209 
210  void allocVirtReg(MachineInstr &MI, LiveReg &LR, unsigned Hint);
211  void allocVirtRegUndef(MachineOperand &MO);
212  MCPhysReg defineVirtReg(MachineInstr &MI, unsigned OpNum, unsigned VirtReg,
213  unsigned Hint);
214  LiveReg &reloadVirtReg(MachineInstr &MI, unsigned OpNum, unsigned VirtReg,
215  unsigned Hint);
216  void spillAll(MachineBasicBlock::iterator MI, bool OnlyLiveOut);
217  bool setPhysReg(MachineInstr &MI, MachineOperand &MO, MCPhysReg PhysReg);
218 
219  unsigned traceCopies(unsigned VirtReg) const;
220  unsigned traceCopyChain(unsigned Reg) const;
221 
222  int getStackSpaceFor(unsigned VirtReg);
223  void spill(MachineBasicBlock::iterator Before, unsigned VirtReg,
224  MCPhysReg AssignedReg, bool Kill);
225  void reload(MachineBasicBlock::iterator Before, unsigned VirtReg,
226  MCPhysReg PhysReg);
227 
228  bool mayLiveOut(unsigned VirtReg);
229  bool mayLiveIn(unsigned VirtReg);
230 
231  void dumpState();
232  };
233 
234 } // end anonymous namespace
235 
236 char RegAllocFast::ID = 0;
237 
238 INITIALIZE_PASS(RegAllocFast, "regallocfast", "Fast Register Allocator", false,
239  false)
240 
241 void RegAllocFast::setPhysRegState(MCPhysReg PhysReg, unsigned NewState) {
242  PhysRegState[PhysReg] = NewState;
243 }
244 
245 /// This allocates space for the specified virtual register to be held on the
246 /// stack.
247 int RegAllocFast::getStackSpaceFor(unsigned VirtReg) {
248  // Find the location Reg would belong...
249  int SS = StackSlotForVirtReg[VirtReg];
250  // Already has space allocated?
251  if (SS != -1)
252  return SS;
253 
254  // Allocate a new stack object for this spill location...
255  const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg);
256  unsigned Size = TRI->getSpillSize(RC);
257  unsigned Align = TRI->getSpillAlignment(RC);
258  int FrameIdx = MFI->CreateSpillStackObject(Size, Align);
259 
260  // Assign the slot.
261  StackSlotForVirtReg[VirtReg] = FrameIdx;
262  return FrameIdx;
263 }
264 
265 /// Returns false if \p VirtReg is known to not live out of the current block.
266 bool RegAllocFast::mayLiveOut(unsigned VirtReg) {
267  if (MayLiveAcrossBlocks.test(TargetRegisterInfo::virtReg2Index(VirtReg))) {
268  // Cannot be live-out if there are no successors.
269  return !MBB->succ_empty();
270  }
271 
272  // If this block loops back to itself, it would be necessary to check whether
273  // the use comes after the def.
274  if (MBB->isSuccessor(MBB)) {
275  MayLiveAcrossBlocks.set(TargetRegisterInfo::virtReg2Index(VirtReg));
276  return true;
277  }
278 
279  // See if the first \p Limit uses of the register are all in the current
280  // block.
281  static const unsigned Limit = 8;
282  unsigned C = 0;
283  for (const MachineInstr &UseInst : MRI->reg_nodbg_instructions(VirtReg)) {
284  if (UseInst.getParent() != MBB || ++C >= Limit) {
285  MayLiveAcrossBlocks.set(TargetRegisterInfo::virtReg2Index(VirtReg));
286  // Cannot be live-out if there are no successors.
287  return !MBB->succ_empty();
288  }
289  }
290 
291  return false;
292 }
293 
294 /// Returns false if \p VirtReg is known to not be live into the current block.
295 bool RegAllocFast::mayLiveIn(unsigned VirtReg) {
296  if (MayLiveAcrossBlocks.test(TargetRegisterInfo::virtReg2Index(VirtReg)))
297  return !MBB->pred_empty();
298 
299  // See if the first \p Limit def of the register are all in the current block.
300  static const unsigned Limit = 8;
301  unsigned C = 0;
302  for (const MachineInstr &DefInst : MRI->def_instructions(VirtReg)) {
303  if (DefInst.getParent() != MBB || ++C >= Limit) {
304  MayLiveAcrossBlocks.set(TargetRegisterInfo::virtReg2Index(VirtReg));
305  return !MBB->pred_empty();
306  }
307  }
308 
309  return false;
310 }
311 
312 /// Insert spill instruction for \p AssignedReg before \p Before. Update
313 /// DBG_VALUEs with \p VirtReg operands with the stack slot.
314 void RegAllocFast::spill(MachineBasicBlock::iterator Before, unsigned VirtReg,
315  MCPhysReg AssignedReg, bool Kill) {
316  LLVM_DEBUG(dbgs() << "Spilling " << printReg(VirtReg, TRI)
317  << " in " << printReg(AssignedReg, TRI));
318  int FI = getStackSpaceFor(VirtReg);
319  LLVM_DEBUG(dbgs() << " to stack slot #" << FI << '\n');
320 
321  const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg);
322  TII->storeRegToStackSlot(*MBB, Before, AssignedReg, Kill, FI, &RC, TRI);
323  ++NumStores;
324 
325  // If this register is used by DBG_VALUE then insert new DBG_VALUE to
326  // identify spilled location as the place to find corresponding variable's
327  // value.
328  SmallVectorImpl<MachineInstr *> &LRIDbgValues = LiveDbgValueMap[VirtReg];
329  for (MachineInstr *DBG : LRIDbgValues) {
330  MachineInstr *NewDV = buildDbgValueForSpill(*MBB, Before, *DBG, FI);
331  assert(NewDV->getParent() == MBB && "dangling parent pointer");
332  (void)NewDV;
333  LLVM_DEBUG(dbgs() << "Inserting debug info due to spill:\n" << *NewDV);
334  }
335  // Now this register is spilled there is should not be any DBG_VALUE
336  // pointing to this register because they are all pointing to spilled value
337  // now.
338  LRIDbgValues.clear();
339 }
340 
341 /// Insert reload instruction for \p PhysReg before \p Before.
342 void RegAllocFast::reload(MachineBasicBlock::iterator Before, unsigned VirtReg,
343  MCPhysReg PhysReg) {
344  LLVM_DEBUG(dbgs() << "Reloading " << printReg(VirtReg, TRI) << " into "
345  << printReg(PhysReg, TRI) << '\n');
346  int FI = getStackSpaceFor(VirtReg);
347  const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg);
348  TII->loadRegFromStackSlot(*MBB, Before, PhysReg, FI, &RC, TRI);
349  ++NumLoads;
350 }
351 
352 /// Return true if MO is the only remaining reference to its virtual register,
353 /// and it is guaranteed to be a block-local register.
354 bool RegAllocFast::isLastUseOfLocalReg(const MachineOperand &MO) const {
355  // If the register has ever been spilled or reloaded, we conservatively assume
356  // it is a global register used in multiple blocks.
357  if (StackSlotForVirtReg[MO.getReg()] != -1)
358  return false;
359 
360  // Check that the use/def chain has exactly one operand - MO.
362  if (&*I != &MO)
363  return false;
364  return ++I == MRI->reg_nodbg_end();
365 }
366 
367 /// Set kill flags on last use of a virtual register.
368 void RegAllocFast::addKillFlag(const LiveReg &LR) {
369  if (!LR.LastUse) return;
370  MachineOperand &MO = LR.LastUse->getOperand(LR.LastOpNum);
371  if (MO.isUse() && !LR.LastUse->isRegTiedToDefOperand(LR.LastOpNum)) {
372  if (MO.getReg() == LR.PhysReg)
373  MO.setIsKill();
374  // else, don't do anything we are problably redefining a
375  // subreg of this register and given we don't track which
376  // lanes are actually dead, we cannot insert a kill flag here.
377  // Otherwise we may end up in a situation like this:
378  // ... = (MO) physreg:sub1, implicit killed physreg
379  // ... <== Here we would allow later pass to reuse physreg:sub1
380  // which is potentially wrong.
381  // LR:sub0 = ...
382  // ... = LR.sub1 <== This is going to use physreg:sub1
383  }
384 }
385 
386 /// Mark virtreg as no longer available.
387 void RegAllocFast::killVirtReg(LiveReg &LR) {
388  addKillFlag(LR);
389  assert(PhysRegState[LR.PhysReg] == LR.VirtReg &&
390  "Broken RegState mapping");
391  setPhysRegState(LR.PhysReg, regFree);
392  LR.PhysReg = 0;
393 }
394 
395 /// Mark virtreg as no longer available.
396 void RegAllocFast::killVirtReg(unsigned VirtReg) {
398  "killVirtReg needs a virtual register");
399  LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
400  if (LRI != LiveVirtRegs.end() && LRI->PhysReg)
401  killVirtReg(*LRI);
402 }
403 
404 /// This method spills the value specified by VirtReg into the corresponding
405 /// stack slot if needed.
406 void RegAllocFast::spillVirtReg(MachineBasicBlock::iterator MI,
407  unsigned VirtReg) {
409  "Spilling a physical register is illegal!");
410  LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
411  assert(LRI != LiveVirtRegs.end() && LRI->PhysReg &&
412  "Spilling unmapped virtual register");
413  spillVirtReg(MI, *LRI);
414 }
415 
416 /// Do the actual work of spilling.
417 void RegAllocFast::spillVirtReg(MachineBasicBlock::iterator MI, LiveReg &LR) {
418  assert(PhysRegState[LR.PhysReg] == LR.VirtReg && "Broken RegState mapping");
419 
420  if (LR.Dirty) {
421  // If this physreg is used by the instruction, we want to kill it on the
422  // instruction, not on the spill.
423  bool SpillKill = MachineBasicBlock::iterator(LR.LastUse) != MI;
424  LR.Dirty = false;
425 
426  spill(MI, LR.VirtReg, LR.PhysReg, SpillKill);
427 
428  if (SpillKill)
429  LR.LastUse = nullptr; // Don't kill register again
430  }
431  killVirtReg(LR);
432 }
433 
434 /// Spill all dirty virtregs without killing them.
435 void RegAllocFast::spillAll(MachineBasicBlock::iterator MI, bool OnlyLiveOut) {
436  if (LiveVirtRegs.empty())
437  return;
438  // The LiveRegMap is keyed by an unsigned (the virtreg number), so the order
439  // of spilling here is deterministic, if arbitrary.
440  for (LiveReg &LR : LiveVirtRegs) {
441  if (!LR.PhysReg)
442  continue;
443  if (OnlyLiveOut && !mayLiveOut(LR.VirtReg))
444  continue;
445  spillVirtReg(MI, LR);
446  }
447  LiveVirtRegs.clear();
448 }
449 
450 /// Handle the direct use of a physical register. Check that the register is
451 /// not used by a virtreg. Kill the physreg, marking it free. This may add
452 /// implicit kills to MO->getParent() and invalidate MO.
453 void RegAllocFast::usePhysReg(MachineOperand &MO) {
454  // Ignore undef uses.
455  if (MO.isUndef())
456  return;
457 
458  unsigned PhysReg = MO.getReg();
460  "Bad usePhysReg operand");
461 
462  markRegUsedInInstr(PhysReg);
463  switch (PhysRegState[PhysReg]) {
464  case regDisabled:
465  break;
466  case regReserved:
467  PhysRegState[PhysReg] = regFree;
469  case regFree:
470  MO.setIsKill();
471  return;
472  default:
473  // The physreg was allocated to a virtual register. That means the value we
474  // wanted has been clobbered.
475  llvm_unreachable("Instruction uses an allocated register");
476  }
477 
478  // Maybe a superregister is reserved?
479  for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) {
480  MCPhysReg Alias = *AI;
481  switch (PhysRegState[Alias]) {
482  case regDisabled:
483  break;
484  case regReserved:
485  // Either PhysReg is a subregister of Alias and we mark the
486  // whole register as free, or PhysReg is the superregister of
487  // Alias and we mark all the aliases as disabled before freeing
488  // PhysReg.
489  // In the latter case, since PhysReg was disabled, this means that
490  // its value is defined only by physical sub-registers. This check
491  // is performed by the assert of the default case in this loop.
492  // Note: The value of the superregister may only be partial
493  // defined, that is why regDisabled is a valid state for aliases.
494  assert((TRI->isSuperRegister(PhysReg, Alias) ||
495  TRI->isSuperRegister(Alias, PhysReg)) &&
496  "Instruction is not using a subregister of a reserved register");
498  case regFree:
499  if (TRI->isSuperRegister(PhysReg, Alias)) {
500  // Leave the superregister in the working set.
501  setPhysRegState(Alias, regFree);
502  MO.getParent()->addRegisterKilled(Alias, TRI, true);
503  return;
504  }
505  // Some other alias was in the working set - clear it.
506  setPhysRegState(Alias, regDisabled);
507  break;
508  default:
509  llvm_unreachable("Instruction uses an alias of an allocated register");
510  }
511  }
512 
513  // All aliases are disabled, bring register into working set.
514  setPhysRegState(PhysReg, regFree);
515  MO.setIsKill();
516 }
517 
518 /// Mark PhysReg as reserved or free after spilling any virtregs. This is very
519 /// similar to defineVirtReg except the physreg is reserved instead of
520 /// allocated.
521 void RegAllocFast::definePhysReg(MachineBasicBlock::iterator MI,
522  MCPhysReg PhysReg, RegState NewState) {
523  markRegUsedInInstr(PhysReg);
524  switch (unsigned VirtReg = PhysRegState[PhysReg]) {
525  case regDisabled:
526  break;
527  default:
528  spillVirtReg(MI, VirtReg);
530  case regFree:
531  case regReserved:
532  setPhysRegState(PhysReg, NewState);
533  return;
534  }
535 
536  // This is a disabled register, disable all aliases.
537  setPhysRegState(PhysReg, NewState);
538  for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) {
539  MCPhysReg Alias = *AI;
540  switch (unsigned VirtReg = PhysRegState[Alias]) {
541  case regDisabled:
542  break;
543  default:
544  spillVirtReg(MI, VirtReg);
546  case regFree:
547  case regReserved:
548  setPhysRegState(Alias, regDisabled);
549  if (TRI->isSuperRegister(PhysReg, Alias))
550  return;
551  break;
552  }
553  }
554 }
555 
556 /// Return the cost of spilling clearing out PhysReg and aliases so it is free
557 /// for allocation. Returns 0 when PhysReg is free or disabled with all aliases
558 /// disabled - it can be allocated directly.
559 /// \returns spillImpossible when PhysReg or an alias can't be spilled.
560 unsigned RegAllocFast::calcSpillCost(MCPhysReg PhysReg) const {
561  if (isRegUsedInInstr(PhysReg)) {
562  LLVM_DEBUG(dbgs() << printReg(PhysReg, TRI)
563  << " is already used in instr.\n");
564  return spillImpossible;
565  }
566  switch (unsigned VirtReg = PhysRegState[PhysReg]) {
567  case regDisabled:
568  break;
569  case regFree:
570  return 0;
571  case regReserved:
572  LLVM_DEBUG(dbgs() << printReg(VirtReg, TRI) << " corresponding "
573  << printReg(PhysReg, TRI) << " is reserved already.\n");
574  return spillImpossible;
575  default: {
576  LiveRegMap::const_iterator LRI = findLiveVirtReg(VirtReg);
577  assert(LRI != LiveVirtRegs.end() && LRI->PhysReg &&
578  "Missing VirtReg entry");
579  return LRI->Dirty ? spillDirty : spillClean;
580  }
581  }
582 
583  // This is a disabled register, add up cost of aliases.
584  LLVM_DEBUG(dbgs() << printReg(PhysReg, TRI) << " is disabled.\n");
585  unsigned Cost = 0;
586  for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) {
587  MCPhysReg Alias = *AI;
588  switch (unsigned VirtReg = PhysRegState[Alias]) {
589  case regDisabled:
590  break;
591  case regFree:
592  ++Cost;
593  break;
594  case regReserved:
595  return spillImpossible;
596  default: {
597  LiveRegMap::const_iterator LRI = findLiveVirtReg(VirtReg);
598  assert(LRI != LiveVirtRegs.end() && LRI->PhysReg &&
599  "Missing VirtReg entry");
600  Cost += LRI->Dirty ? spillDirty : spillClean;
601  break;
602  }
603  }
604  }
605  return Cost;
606 }
607 
608 /// This method updates local state so that we know that PhysReg is the
609 /// proper container for VirtReg now. The physical register must not be used
610 /// for anything else when this is called.
611 void RegAllocFast::assignVirtToPhysReg(LiveReg &LR, MCPhysReg PhysReg) {
612  unsigned VirtReg = LR.VirtReg;
613  LLVM_DEBUG(dbgs() << "Assigning " << printReg(VirtReg, TRI) << " to "
614  << printReg(PhysReg, TRI) << '\n');
615  assert(LR.PhysReg == 0 && "Already assigned a physreg");
616  assert(PhysReg != 0 && "Trying to assign no register");
617  LR.PhysReg = PhysReg;
618  setPhysRegState(PhysReg, VirtReg);
619 }
620 
621 static bool isCoalescable(const MachineInstr &MI) {
622  return MI.isFullCopy();
623 }
624 
625 unsigned RegAllocFast::traceCopyChain(unsigned Reg) const {
626  static const unsigned ChainLengthLimit = 3;
627  unsigned C = 0;
628  do {
630  return Reg;
632 
633  MachineInstr *VRegDef = MRI->getUniqueVRegDef(Reg);
634  if (!VRegDef || !isCoalescable(*VRegDef))
635  return 0;
636  Reg = VRegDef->getOperand(1).getReg();
637  } while (++C <= ChainLengthLimit);
638  return 0;
639 }
640 
641 /// Check if any of \p VirtReg's definitions is a copy. If it is follow the
642 /// chain of copies to check whether we reach a physical register we can
643 /// coalesce with.
644 unsigned RegAllocFast::traceCopies(unsigned VirtReg) const {
645  static const unsigned DefLimit = 3;
646  unsigned C = 0;
647  for (const MachineInstr &MI : MRI->def_instructions(VirtReg)) {
648  if (isCoalescable(MI)) {
649  unsigned Reg = MI.getOperand(1).getReg();
650  Reg = traceCopyChain(Reg);
651  if (Reg != 0)
652  return Reg;
653  }
654 
655  if (++C >= DefLimit)
656  break;
657  }
658  return 0;
659 }
660 
661 /// Allocates a physical register for VirtReg.
662 void RegAllocFast::allocVirtReg(MachineInstr &MI, LiveReg &LR, unsigned Hint0) {
663  const unsigned VirtReg = LR.VirtReg;
664 
666  "Can only allocate virtual registers");
667 
668  const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg);
669  LLVM_DEBUG(dbgs() << "Search register for " << printReg(VirtReg)
670  << " in class " << TRI->getRegClassName(&RC)
671  << " with hint " << printReg(Hint0, TRI) << '\n');
672 
673  // Take hint when possible.
675  MRI->isAllocatable(Hint0) && RC.contains(Hint0)) {
676  // Ignore the hint if we would have to spill a dirty register.
677  unsigned Cost = calcSpillCost(Hint0);
678  if (Cost < spillDirty) {
679  LLVM_DEBUG(dbgs() << "\tPreferred Register 1: " << printReg(Hint0, TRI)
680  << '\n');
681  if (Cost)
682  definePhysReg(MI, Hint0, regFree);
683  assignVirtToPhysReg(LR, Hint0);
684  return;
685  } else {
686  LLVM_DEBUG(dbgs() << "\tPreferred Register 1: " << printReg(Hint0, TRI)
687  << "occupied\n");
688  }
689  } else {
690  Hint0 = 0;
691  }
692 
693  // Try other hint.
694  unsigned Hint1 = traceCopies(VirtReg);
696  MRI->isAllocatable(Hint1) && RC.contains(Hint1) &&
697  !isRegUsedInInstr(Hint1)) {
698  // Ignore the hint if we would have to spill a dirty register.
699  unsigned Cost = calcSpillCost(Hint1);
700  if (Cost < spillDirty) {
701  LLVM_DEBUG(dbgs() << "\tPreferred Register 0: " << printReg(Hint1, TRI)
702  << '\n');
703  if (Cost)
704  definePhysReg(MI, Hint1, regFree);
705  assignVirtToPhysReg(LR, Hint1);
706  return;
707  } else {
708  LLVM_DEBUG(dbgs() << "\tPreferred Register 0: " << printReg(Hint1, TRI)
709  << "occupied\n");
710  }
711  } else {
712  Hint1 = 0;
713  }
714 
715  MCPhysReg BestReg = 0;
716  unsigned BestCost = spillImpossible;
717  ArrayRef<MCPhysReg> AllocationOrder = RegClassInfo.getOrder(&RC);
718  for (MCPhysReg PhysReg : AllocationOrder) {
719  LLVM_DEBUG(dbgs() << "\tRegister: " << printReg(PhysReg, TRI) << ' ');
720  unsigned Cost = calcSpillCost(PhysReg);
721  LLVM_DEBUG(dbgs() << "Cost: " << Cost << " BestCost: " << BestCost << '\n');
722  // Immediate take a register with cost 0.
723  if (Cost == 0) {
724  assignVirtToPhysReg(LR, PhysReg);
725  return;
726  }
727 
728  if (PhysReg == Hint1 || PhysReg == Hint0)
729  Cost -= spillPrefBonus;
730 
731  if (Cost < BestCost) {
732  BestReg = PhysReg;
733  BestCost = Cost;
734  }
735  }
736 
737  if (!BestReg) {
738  // Nothing we can do: Report an error and keep going with an invalid
739  // allocation.
740  if (MI.isInlineAsm())
741  MI.emitError("inline assembly requires more registers than available");
742  else
743  MI.emitError("ran out of registers during register allocation");
744  definePhysReg(MI, *AllocationOrder.begin(), regFree);
745  assignVirtToPhysReg(LR, *AllocationOrder.begin());
746  return;
747  }
748 
749  definePhysReg(MI, BestReg, regFree);
750  assignVirtToPhysReg(LR, BestReg);
751 }
752 
753 void RegAllocFast::allocVirtRegUndef(MachineOperand &MO) {
754  assert(MO.isUndef() && "expected undef use");
755  unsigned VirtReg = MO.getReg();
756  assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && "Expected virtreg");
757 
758  LiveRegMap::const_iterator LRI = findLiveVirtReg(VirtReg);
759  MCPhysReg PhysReg;
760  if (LRI != LiveVirtRegs.end() && LRI->PhysReg) {
761  PhysReg = LRI->PhysReg;
762  } else {
763  const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg);
764  ArrayRef<MCPhysReg> AllocationOrder = RegClassInfo.getOrder(&RC);
765  assert(!AllocationOrder.empty() && "Allocation order must not be empty");
766  PhysReg = AllocationOrder[0];
767  }
768 
769  unsigned SubRegIdx = MO.getSubReg();
770  if (SubRegIdx != 0) {
771  PhysReg = TRI->getSubReg(PhysReg, SubRegIdx);
772  MO.setSubReg(0);
773  }
774  MO.setReg(PhysReg);
775  MO.setIsRenamable(true);
776 }
777 
778 /// Allocates a register for VirtReg and mark it as dirty.
779 MCPhysReg RegAllocFast::defineVirtReg(MachineInstr &MI, unsigned OpNum,
780  unsigned VirtReg, unsigned Hint) {
782  "Not a virtual register");
783  LiveRegMap::iterator LRI;
784  bool New;
785  std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg));
786  if (!LRI->PhysReg) {
787  // If there is no hint, peek at the only use of this register.
788  if ((!Hint || !TargetRegisterInfo::isPhysicalRegister(Hint)) &&
789  MRI->hasOneNonDBGUse(VirtReg)) {
790  const MachineInstr &UseMI = *MRI->use_instr_nodbg_begin(VirtReg);
791  // It's a copy, use the destination register as a hint.
792  if (UseMI.isCopyLike())
793  Hint = UseMI.getOperand(0).getReg();
794  }
795  allocVirtReg(MI, *LRI, Hint);
796  } else if (LRI->LastUse) {
797  // Redefining a live register - kill at the last use, unless it is this
798  // instruction defining VirtReg multiple times.
799  if (LRI->LastUse != &MI || LRI->LastUse->getOperand(LRI->LastOpNum).isUse())
800  addKillFlag(*LRI);
801  }
802  assert(LRI->PhysReg && "Register not assigned");
803  LRI->LastUse = &MI;
804  LRI->LastOpNum = OpNum;
805  LRI->Dirty = true;
806  markRegUsedInInstr(LRI->PhysReg);
807  return LRI->PhysReg;
808 }
809 
810 /// Make sure VirtReg is available in a physreg and return it.
811 RegAllocFast::LiveReg &RegAllocFast::reloadVirtReg(MachineInstr &MI,
812  unsigned OpNum,
813  unsigned VirtReg,
814  unsigned Hint) {
816  "Not a virtual register");
817  LiveRegMap::iterator LRI;
818  bool New;
819  std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg));
820  MachineOperand &MO = MI.getOperand(OpNum);
821  if (!LRI->PhysReg) {
822  allocVirtReg(MI, *LRI, Hint);
823  reload(MI, VirtReg, LRI->PhysReg);
824  } else if (LRI->Dirty) {
825  if (isLastUseOfLocalReg(MO)) {
826  LLVM_DEBUG(dbgs() << "Killing last use: " << MO << '\n');
827  if (MO.isUse())
828  MO.setIsKill();
829  else
830  MO.setIsDead();
831  } else if (MO.isKill()) {
832  LLVM_DEBUG(dbgs() << "Clearing dubious kill: " << MO << '\n');
833  MO.setIsKill(false);
834  } else if (MO.isDead()) {
835  LLVM_DEBUG(dbgs() << "Clearing dubious dead: " << MO << '\n');
836  MO.setIsDead(false);
837  }
838  } else if (MO.isKill()) {
839  // We must remove kill flags from uses of reloaded registers because the
840  // register would be killed immediately, and there might be a second use:
841  // %foo = OR killed %x, %x
842  // This would cause a second reload of %x into a different register.
843  LLVM_DEBUG(dbgs() << "Clearing clean kill: " << MO << '\n');
844  MO.setIsKill(false);
845  } else if (MO.isDead()) {
846  LLVM_DEBUG(dbgs() << "Clearing clean dead: " << MO << '\n');
847  MO.setIsDead(false);
848  }
849  assert(LRI->PhysReg && "Register not assigned");
850  LRI->LastUse = &MI;
851  LRI->LastOpNum = OpNum;
852  markRegUsedInInstr(LRI->PhysReg);
853  return *LRI;
854 }
855 
856 /// Changes operand OpNum in MI the refer the PhysReg, considering subregs. This
857 /// may invalidate any operand pointers. Return true if the operand kills its
858 /// register.
859 bool RegAllocFast::setPhysReg(MachineInstr &MI, MachineOperand &MO,
860  MCPhysReg PhysReg) {
861  bool Dead = MO.isDead();
862  if (!MO.getSubReg()) {
863  MO.setReg(PhysReg);
864  MO.setIsRenamable(true);
865  return MO.isKill() || Dead;
866  }
867 
868  // Handle subregister index.
869  MO.setReg(PhysReg ? TRI->getSubReg(PhysReg, MO.getSubReg()) : 0);
870  MO.setIsRenamable(true);
871  MO.setSubReg(0);
872 
873  // A kill flag implies killing the full register. Add corresponding super
874  // register kill.
875  if (MO.isKill()) {
876  MI.addRegisterKilled(PhysReg, TRI, true);
877  return true;
878  }
879 
880  // A <def,read-undef> of a sub-register requires an implicit def of the full
881  // register.
882  if (MO.isDef() && MO.isUndef())
883  MI.addRegisterDefined(PhysReg, TRI);
884 
885  return Dead;
886 }
887 
888 // Handles special instruction operand like early clobbers and tied ops when
889 // there are additional physreg defines.
890 void RegAllocFast::handleThroughOperands(MachineInstr &MI,
891  SmallVectorImpl<unsigned> &VirtDead) {
892  LLVM_DEBUG(dbgs() << "Scanning for through registers:");
893  SmallSet<unsigned, 8> ThroughRegs;
894  for (const MachineOperand &MO : MI.operands()) {
895  if (!MO.isReg()) continue;
896  unsigned Reg = MO.getReg();
898  continue;
899  if (MO.isEarlyClobber() || (MO.isUse() && MO.isTied()) ||
900  (MO.getSubReg() && MI.readsVirtualRegister(Reg))) {
901  if (ThroughRegs.insert(Reg).second)
902  LLVM_DEBUG(dbgs() << ' ' << printReg(Reg));
903  }
904  }
905 
906  // If any physreg defines collide with preallocated through registers,
907  // we must spill and reallocate.
908  LLVM_DEBUG(dbgs() << "\nChecking for physdef collisions.\n");
909  for (const MachineOperand &MO : MI.operands()) {
910  if (!MO.isReg() || !MO.isDef()) continue;
911  unsigned Reg = MO.getReg();
912  if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue;
913  markRegUsedInInstr(Reg);
914  for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI) {
915  if (ThroughRegs.count(PhysRegState[*AI]))
916  definePhysReg(MI, *AI, regFree);
917  }
918  }
919 
920  SmallVector<unsigned, 8> PartialDefs;
921  LLVM_DEBUG(dbgs() << "Allocating tied uses.\n");
922  for (unsigned I = 0, E = MI.getNumOperands(); I != E; ++I) {
923  MachineOperand &MO = MI.getOperand(I);
924  if (!MO.isReg()) continue;
925  unsigned Reg = MO.getReg();
926  if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue;
927  if (MO.isUse()) {
928  if (!MO.isTied()) continue;
929  LLVM_DEBUG(dbgs() << "Operand " << I << "(" << MO
930  << ") is tied to operand " << MI.findTiedOperandIdx(I)
931  << ".\n");
932  LiveReg &LR = reloadVirtReg(MI, I, Reg, 0);
933  MCPhysReg PhysReg = LR.PhysReg;
934  setPhysReg(MI, MO, PhysReg);
935  // Note: we don't update the def operand yet. That would cause the normal
936  // def-scan to attempt spilling.
937  } else if (MO.getSubReg() && MI.readsVirtualRegister(Reg)) {
938  LLVM_DEBUG(dbgs() << "Partial redefine: " << MO << '\n');
939  // Reload the register, but don't assign to the operand just yet.
940  // That would confuse the later phys-def processing pass.
941  LiveReg &LR = reloadVirtReg(MI, I, Reg, 0);
942  PartialDefs.push_back(LR.PhysReg);
943  }
944  }
945 
946  LLVM_DEBUG(dbgs() << "Allocating early clobbers.\n");
947  for (unsigned I = 0, E = MI.getNumOperands(); I != E; ++I) {
948  const MachineOperand &MO = MI.getOperand(I);
949  if (!MO.isReg()) continue;
950  unsigned Reg = MO.getReg();
951  if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue;
952  if (!MO.isEarlyClobber())
953  continue;
954  // Note: defineVirtReg may invalidate MO.
955  MCPhysReg PhysReg = defineVirtReg(MI, I, Reg, 0);
956  if (setPhysReg(MI, MI.getOperand(I), PhysReg))
957  VirtDead.push_back(Reg);
958  }
959 
960  // Restore UsedInInstr to a state usable for allocating normal virtual uses.
961  UsedInInstr.clear();
962  for (const MachineOperand &MO : MI.operands()) {
963  if (!MO.isReg() || (MO.isDef() && !MO.isEarlyClobber())) continue;
964  unsigned Reg = MO.getReg();
965  if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue;
966  LLVM_DEBUG(dbgs() << "\tSetting " << printReg(Reg, TRI)
967  << " as used in instr\n");
968  markRegUsedInInstr(Reg);
969  }
970 
971  // Also mark PartialDefs as used to avoid reallocation.
972  for (unsigned PartialDef : PartialDefs)
973  markRegUsedInInstr(PartialDef);
974 }
975 
976 #ifndef NDEBUG
977 void RegAllocFast::dumpState() {
978  for (unsigned Reg = 1, E = TRI->getNumRegs(); Reg != E; ++Reg) {
979  if (PhysRegState[Reg] == regDisabled) continue;
980  dbgs() << " " << printReg(Reg, TRI);
981  switch(PhysRegState[Reg]) {
982  case regFree:
983  break;
984  case regReserved:
985  dbgs() << "*";
986  break;
987  default: {
988  dbgs() << '=' << printReg(PhysRegState[Reg]);
989  LiveRegMap::iterator LRI = findLiveVirtReg(PhysRegState[Reg]);
990  assert(LRI != LiveVirtRegs.end() && LRI->PhysReg &&
991  "Missing VirtReg entry");
992  if (LRI->Dirty)
993  dbgs() << "*";
994  assert(LRI->PhysReg == Reg && "Bad inverse map");
995  break;
996  }
997  }
998  }
999  dbgs() << '\n';
1000  // Check that LiveVirtRegs is the inverse.
1001  for (LiveRegMap::iterator i = LiveVirtRegs.begin(),
1002  e = LiveVirtRegs.end(); i != e; ++i) {
1003  if (!i->PhysReg)
1004  continue;
1006  "Bad map key");
1008  "Bad map value");
1009  assert(PhysRegState[i->PhysReg] == i->VirtReg && "Bad inverse map");
1010  }
1011 }
1012 #endif
1013 
1014 void RegAllocFast::allocateInstruction(MachineInstr &MI) {
1015  const MCInstrDesc &MCID = MI.getDesc();
1016 
1017  // If this is a copy, we may be able to coalesce.
1018  unsigned CopySrcReg = 0;
1019  unsigned CopyDstReg = 0;
1020  unsigned CopySrcSub = 0;
1021  unsigned CopyDstSub = 0;
1022  if (MI.isCopy()) {
1023  CopyDstReg = MI.getOperand(0).getReg();
1024  CopySrcReg = MI.getOperand(1).getReg();
1025  CopyDstSub = MI.getOperand(0).getSubReg();
1026  CopySrcSub = MI.getOperand(1).getSubReg();
1027  }
1028 
1029  // Track registers used by instruction.
1030  UsedInInstr.clear();
1031 
1032  // First scan.
1033  // Mark physreg uses and early clobbers as used.
1034  // Find the end of the virtreg operands
1035  unsigned VirtOpEnd = 0;
1036  bool hasTiedOps = false;
1037  bool hasEarlyClobbers = false;
1038  bool hasPartialRedefs = false;
1039  bool hasPhysDefs = false;
1040  for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) {
1041  MachineOperand &MO = MI.getOperand(i);
1042  // Make sure MRI knows about registers clobbered by regmasks.
1043  if (MO.isRegMask()) {
1045  continue;
1046  }
1047  if (!MO.isReg()) continue;
1048  unsigned Reg = MO.getReg();
1049  if (!Reg) continue;
1051  VirtOpEnd = i+1;
1052  if (MO.isUse()) {
1053  hasTiedOps = hasTiedOps ||
1054  MCID.getOperandConstraint(i, MCOI::TIED_TO) != -1;
1055  } else {
1056  if (MO.isEarlyClobber())
1057  hasEarlyClobbers = true;
1058  if (MO.getSubReg() && MI.readsVirtualRegister(Reg))
1059  hasPartialRedefs = true;
1060  }
1061  continue;
1062  }
1063  if (!MRI->isAllocatable(Reg)) continue;
1064  if (MO.isUse()) {
1065  usePhysReg(MO);
1066  } else if (MO.isEarlyClobber()) {
1067  definePhysReg(MI, Reg,
1068  (MO.isImplicit() || MO.isDead()) ? regFree : regReserved);
1069  hasEarlyClobbers = true;
1070  } else
1071  hasPhysDefs = true;
1072  }
1073 
1074  // The instruction may have virtual register operands that must be allocated
1075  // the same register at use-time and def-time: early clobbers and tied
1076  // operands. If there are also physical defs, these registers must avoid
1077  // both physical defs and uses, making them more constrained than normal
1078  // operands.
1079  // Similarly, if there are multiple defs and tied operands, we must make
1080  // sure the same register is allocated to uses and defs.
1081  // We didn't detect inline asm tied operands above, so just make this extra
1082  // pass for all inline asm.
1083  if (MI.isInlineAsm() || hasEarlyClobbers || hasPartialRedefs ||
1084  (hasTiedOps && (hasPhysDefs || MCID.getNumDefs() > 1))) {
1085  handleThroughOperands(MI, VirtDead);
1086  // Don't attempt coalescing when we have funny stuff going on.
1087  CopyDstReg = 0;
1088  // Pretend we have early clobbers so the use operands get marked below.
1089  // This is not necessary for the common case of a single tied use.
1090  hasEarlyClobbers = true;
1091  }
1092 
1093  // Second scan.
1094  // Allocate virtreg uses.
1095  bool HasUndefUse = false;
1096  for (unsigned I = 0; I != VirtOpEnd; ++I) {
1097  MachineOperand &MO = MI.getOperand(I);
1098  if (!MO.isReg()) continue;
1099  unsigned Reg = MO.getReg();
1100  if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue;
1101  if (MO.isUse()) {
1102  if (MO.isUndef()) {
1103  HasUndefUse = true;
1104  // There is no need to allocate a register for an undef use.
1105  continue;
1106  }
1107 
1108  // Populate MayLiveAcrossBlocks in case the use block is allocated before
1109  // the def block (removing the vreg uses).
1110  mayLiveIn(Reg);
1111 
1112  LiveReg &LR = reloadVirtReg(MI, I, Reg, CopyDstReg);
1113  MCPhysReg PhysReg = LR.PhysReg;
1114  CopySrcReg = (CopySrcReg == Reg || CopySrcReg == PhysReg) ? PhysReg : 0;
1115  if (setPhysReg(MI, MO, PhysReg))
1116  killVirtReg(LR);
1117  }
1118  }
1119 
1120  // Allocate undef operands. This is a separate step because in a situation
1121  // like ` = OP undef %X, %X` both operands need the same register assign
1122  // so we should perform the normal assignment first.
1123  if (HasUndefUse) {
1124  for (MachineOperand &MO : MI.uses()) {
1125  if (!MO.isReg() || !MO.isUse())
1126  continue;
1127  unsigned Reg = MO.getReg();
1129  continue;
1130 
1131  assert(MO.isUndef() && "Should only have undef virtreg uses left");
1132  allocVirtRegUndef(MO);
1133  }
1134  }
1135 
1136  // Track registers defined by instruction - early clobbers and tied uses at
1137  // this point.
1138  UsedInInstr.clear();
1139  if (hasEarlyClobbers) {
1140  for (const MachineOperand &MO : MI.operands()) {
1141  if (!MO.isReg()) continue;
1142  unsigned Reg = MO.getReg();
1143  if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue;
1144  // Look for physreg defs and tied uses.
1145  if (!MO.isDef() && !MO.isTied()) continue;
1146  markRegUsedInInstr(Reg);
1147  }
1148  }
1149 
1150  unsigned DefOpEnd = MI.getNumOperands();
1151  if (MI.isCall()) {
1152  // Spill all virtregs before a call. This serves one purpose: If an
1153  // exception is thrown, the landing pad is going to expect to find
1154  // registers in their spill slots.
1155  // Note: although this is appealing to just consider all definitions
1156  // as call-clobbered, this is not correct because some of those
1157  // definitions may be used later on and we do not want to reuse
1158  // those for virtual registers in between.
1159  LLVM_DEBUG(dbgs() << " Spilling remaining registers before call.\n");
1160  spillAll(MI, /*OnlyLiveOut*/ false);
1161  }
1162 
1163  // Third scan.
1164  // Mark all physreg defs as used before allocating virtreg defs.
1165  for (unsigned I = 0; I != DefOpEnd; ++I) {
1166  const MachineOperand &MO = MI.getOperand(I);
1167  if (!MO.isReg() || !MO.isDef() || !MO.getReg() || MO.isEarlyClobber())
1168  continue;
1169  unsigned Reg = MO.getReg();
1170 
1171  if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg) ||
1172  !MRI->isAllocatable(Reg))
1173  continue;
1174  definePhysReg(MI, Reg, MO.isDead() ? regFree : regReserved);
1175  }
1176 
1177  // Fourth scan.
1178  // Allocate defs and collect dead defs.
1179  for (unsigned I = 0; I != DefOpEnd; ++I) {
1180  const MachineOperand &MO = MI.getOperand(I);
1181  if (!MO.isReg() || !MO.isDef() || !MO.getReg() || MO.isEarlyClobber())
1182  continue;
1183  unsigned Reg = MO.getReg();
1184 
1185  // We have already dealt with phys regs in the previous scan.
1187  continue;
1188  MCPhysReg PhysReg = defineVirtReg(MI, I, Reg, CopySrcReg);
1189  if (setPhysReg(MI, MI.getOperand(I), PhysReg)) {
1190  VirtDead.push_back(Reg);
1191  CopyDstReg = 0; // cancel coalescing;
1192  } else
1193  CopyDstReg = (CopyDstReg == Reg || CopyDstReg == PhysReg) ? PhysReg : 0;
1194  }
1195 
1196  // Kill dead defs after the scan to ensure that multiple defs of the same
1197  // register are allocated identically. We didn't need to do this for uses
1198  // because we are crerating our own kill flags, and they are always at the
1199  // last use.
1200  for (unsigned VirtReg : VirtDead)
1201  killVirtReg(VirtReg);
1202  VirtDead.clear();
1203 
1204  LLVM_DEBUG(dbgs() << "<< " << MI);
1205  if (CopyDstReg && CopyDstReg == CopySrcReg && CopyDstSub == CopySrcSub) {
1206  LLVM_DEBUG(dbgs() << "Mark identity copy for removal\n");
1207  Coalesced.push_back(&MI);
1208  }
1209 }
1210 
1211 void RegAllocFast::handleDebugValue(MachineInstr &MI) {
1212  MachineOperand &MO = MI.getOperand(0);
1213 
1214  // Ignore DBG_VALUEs that aren't based on virtual registers. These are
1215  // mostly constants and frame indices.
1216  if (!MO.isReg())
1217  return;
1218  unsigned Reg = MO.getReg();
1220  return;
1221 
1222  // See if this virtual register has already been allocated to a physical
1223  // register or spilled to a stack slot.
1224  LiveRegMap::iterator LRI = findLiveVirtReg(Reg);
1225  if (LRI != LiveVirtRegs.end() && LRI->PhysReg) {
1226  setPhysReg(MI, MO, LRI->PhysReg);
1227  } else {
1228  int SS = StackSlotForVirtReg[Reg];
1229  if (SS != -1) {
1230  // Modify DBG_VALUE now that the value is in a spill slot.
1231  updateDbgValueForSpill(MI, SS);
1232  LLVM_DEBUG(dbgs() << "Modifying debug info due to spill:" << "\t" << MI);
1233  return;
1234  }
1235 
1236  // We can't allocate a physreg for a DebugValue, sorry!
1237  LLVM_DEBUG(dbgs() << "Unable to allocate vreg used by DBG_VALUE");
1238  MO.setReg(0);
1239  }
1240 
1241  // If Reg hasn't been spilled, put this DBG_VALUE in LiveDbgValueMap so
1242  // that future spills of Reg will have DBG_VALUEs.
1243  LiveDbgValueMap[Reg].push_back(&MI);
1244 }
1245 
1246 void RegAllocFast::allocateBasicBlock(MachineBasicBlock &MBB) {
1247  this->MBB = &MBB;
1248  LLVM_DEBUG(dbgs() << "\nAllocating " << MBB);
1249 
1250  PhysRegState.assign(TRI->getNumRegs(), regDisabled);
1251  assert(LiveVirtRegs.empty() && "Mapping not cleared from last block?");
1252 
1253  MachineBasicBlock::iterator MII = MBB.begin();
1254 
1255  // Add live-in registers as live.
1256  for (const MachineBasicBlock::RegisterMaskPair LI : MBB.liveins())
1257  if (MRI->isAllocatable(LI.PhysReg))
1258  definePhysReg(MII, LI.PhysReg, regReserved);
1259 
1260  VirtDead.clear();
1261  Coalesced.clear();
1262 
1263  // Otherwise, sequentially allocate each instruction in the MBB.
1264  for (MachineInstr &MI : MBB) {
1265  LLVM_DEBUG(
1266  dbgs() << "\n>> " << MI << "Regs:";
1267  dumpState()
1268  );
1269 
1270  // Special handling for debug values. Note that they are not allowed to
1271  // affect codegen of the other instructions in any way.
1272  if (MI.isDebugValue()) {
1273  handleDebugValue(MI);
1274  continue;
1275  }
1276 
1277  allocateInstruction(MI);
1278  }
1279 
1280  // Spill all physical registers holding virtual registers now.
1281  LLVM_DEBUG(dbgs() << "Spilling live registers at end of block.\n");
1282  spillAll(MBB.getFirstTerminator(), /*OnlyLiveOut*/ true);
1283 
1284  // Erase all the coalesced copies. We are delaying it until now because
1285  // LiveVirtRegs might refer to the instrs.
1286  for (MachineInstr *MI : Coalesced)
1287  MBB.erase(MI);
1288  NumCoalesced += Coalesced.size();
1289 
1290  LLVM_DEBUG(MBB.dump());
1291 }
1292 
1293 bool RegAllocFast::runOnMachineFunction(MachineFunction &MF) {
1294  LLVM_DEBUG(dbgs() << "********** FAST REGISTER ALLOCATION **********\n"
1295  << "********** Function: " << MF.getName() << '\n');
1296  MRI = &MF.getRegInfo();
1297  const TargetSubtargetInfo &STI = MF.getSubtarget();
1298  TRI = STI.getRegisterInfo();
1299  TII = STI.getInstrInfo();
1300  MFI = &MF.getFrameInfo();
1301  MRI->freezeReservedRegs(MF);
1302  RegClassInfo.runOnMachineFunction(MF);
1303  UsedInInstr.clear();
1304  UsedInInstr.setUniverse(TRI->getNumRegUnits());
1305 
1306  // initialize the virtual->physical register map to have a 'null'
1307  // mapping for all virtual registers
1308  unsigned NumVirtRegs = MRI->getNumVirtRegs();
1309  StackSlotForVirtReg.resize(NumVirtRegs);
1310  LiveVirtRegs.setUniverse(NumVirtRegs);
1311  MayLiveAcrossBlocks.clear();
1312  MayLiveAcrossBlocks.resize(NumVirtRegs);
1313 
1314  // Loop over all of the basic blocks, eliminating virtual register references
1315  for (MachineBasicBlock &MBB : MF)
1316  allocateBasicBlock(MBB);
1317 
1318  // All machine operands and other references to virtual registers have been
1319  // replaced. Remove the virtual registers.
1320  MRI->clearVirtRegs();
1321 
1322  StackSlotForVirtReg.clear();
1323  LiveDbgValueMap.clear();
1324  return true;
1325 }
1326 
1328  return new RegAllocFast();
1329 }
void resize(unsigned N, bool t=false)
resize - Grow or shrink the bitvector.
Definition: BitVector.h:371
bool isRegMask() const
isRegMask - Tests if this is a MO_RegisterMask operand.
uint64_t CallInst * C
static bool isCoalescable(const MachineInstr &MI)
ArrayRef< MCPhysReg > getOrder(const TargetRegisterClass *RC) const
getOrder - Returns the preferred allocation order for RC.
iterator_range< reg_instr_nodbg_iterator > reg_nodbg_instructions(unsigned Reg) const
constexpr char Align[]
Key for Kernel::Arg::Metadata::mAlign.
BitVector & set()
Definition: BitVector.h:397
MachineInstr * getParent()
getParent - Return the instruction that this operand belongs to.
bool contains(unsigned Reg) const
Return true if the specified register is included in this register class.
bool isCall(QueryType Type=AnyInBundle) const
Definition: MachineInstr.h:634
const TargetRegisterClass * getRegClass(unsigned Reg) const
Return the register class of the specified virtual register.
void emitError(StringRef Msg) const
Emit an error referring to the source location of this instruction.
bool isAllocatable(unsigned PhysReg) const
isAllocatable - Returns true when PhysReg belongs to an allocatable register class and it hasn&#39;t been...
static unsigned virtReg2Index(unsigned Reg)
Convert a virtual register number to a 0-based index.
This class represents lattice values for constants.
Definition: AllocatorList.h:23
iterator_range< mop_iterator > uses()
Returns a range that includes all operands that are register uses.
Definition: MachineInstr.h:493
virtual const TargetRegisterInfo * getRegisterInfo() const
getRegisterInfo - If register information is available, return it.
void push_back(const T &Elt)
Definition: SmallVector.h:211
Describe properties that are true of each instruction in the target description file.
Definition: MCInstrDesc.h:164
static bool isVirtualRegister(unsigned Reg)
Return true if the specified register number is in the virtual register namespace.
unsigned Reg
This file contains the declarations for metadata subclasses.
unsigned getSubReg() const
bool isInlineAsm() const
bool test(unsigned Idx) const
Definition: BitVector.h:501
STATISTIC(NumFunctions, "Total number of functions")
unsigned const TargetRegisterInfo * TRI
void setIsDead(bool Val=true)
reg_nodbg_iterator reg_nodbg_begin(unsigned RegNo) const
iterator_range< mop_iterator > operands()
Definition: MachineInstr.h:460
bool isCopyLike() const
Return true if the instruction behaves like a copy.
void setIsRenamable(bool Val=true)
unsigned getNumRegUnits() const
Return the number of (native) register units in the target.
unsigned getSpillSize(const TargetRegisterClass &RC) const
Return the size in bytes of the stack slot allocated to hold a spilled copy of a register from class ...
bool isEarlyClobber() const
const char * getRegClassName(const TargetRegisterClass *Class) const
Returns the name of the register class.
unsigned getSpillAlignment(const TargetRegisterClass &RC) const
Return the minimum required alignment in bytes for a spill slot for a register of this class...
void clear()
clear - Removes all bits from the bitvector. Does not change capacity.
Definition: BitVector.h:366
MachineInstr * buildDbgValueForSpill(MachineBasicBlock &BB, MachineBasicBlock::iterator I, const MachineInstr &Orig, int FrameIndex)
Clone a DBG_VALUE whose value has been spilled to FrameIndex.
MachineFunctionPass - This class adapts the FunctionPass interface to allow convenient creation of pa...
const HexagonInstrInfo * TII
unsigned getNumOperands() const
Retuns the total number of operands.
Definition: MachineInstr.h:413
Printable printReg(unsigned Reg, const TargetRegisterInfo *TRI=nullptr, unsigned SubIdx=0, const MachineRegisterInfo *MRI=nullptr)
Prints virtual and physical registers with or without a TRI instance.
The MachineFrameInfo class represents an abstract stack frame until prolog/epilog code is inserted...
const MCInstrDesc & getDesc() const
Returns the target instruction descriptor of this MachineInstr.
Definition: MachineInstr.h:407
bool isFullCopy() const
virtual const TargetInstrInfo * getInstrInfo() const
StringRef getName() const
getName - Return the name of the corresponding LLVM function.
uint16_t MCPhysReg
An unsigned integer type large enough to represent all physical registers, but not necessarily virtua...
TargetInstrInfo - Interface to description of machine instruction set.
bool isSuperRegister(unsigned RegA, unsigned RegB) const
Returns true if RegB is a super-register of RegA.
unsigned getNumRegs() const
Return the number of registers this target has (useful for sizing arrays holding per register informa...
MachineInstrBundleIterator< MachineInstr > iterator
unsigned const MachineRegisterInfo * MRI
const TargetSubtargetInfo & getSubtarget() const
getSubtarget - Return the subtarget for which this machine code is being compiled.
void getAnalysisUsage(AnalysisUsage &AU) const override
getAnalysisUsage - Subclasses that override getAnalysisUsage must call this.
bool readsVirtualRegister(unsigned Reg) const
Return true if the MachineInstr reads the specified virtual register.
MachineFrameInfo & getFrameInfo()
getFrameInfo - Return the frame info object for the current function.
MachineInstrBuilder & UseMI
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
unsigned getSubReg(unsigned Reg, unsigned Idx) const
Returns the physical register number of sub-register "Index" for physical register RegNo...
MCRegAliasIterator enumerates all registers aliasing Reg.
Represent the analysis usage information of a pass.
void resize(typename StorageT::size_type s)
Definition: IndexedMap.h:59
FunctionPass class - This class is used to implement most global optimizations.
Definition: Pass.h:284
std::pair< NoneType, bool > insert(const T &V)
insert - Insert an element into the set if it isn&#39;t already there.
Definition: SmallSet.h:180
void runOnMachineFunction(const MachineFunction &MF)
runOnFunction - Prepare to answer questions about MF.
bool isCopy() const
int CreateSpillStackObject(uint64_t Size, unsigned Alignment)
Create a new statically sized stack object that represents a spill slot, returning a nonnegative iden...
TargetRegisterInfo base class - We assume that the target defines a static array of TargetRegisterDes...
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
void setIsKill(bool Val=true)
int getOperandConstraint(unsigned OpNum, MCOI::OperandConstraint Constraint) const
Returns the value of the specific constraint if it is set.
Definition: MCInstrDesc.h:188
unsigned findTiedOperandIdx(unsigned OpIdx) const
Given the index of a tied register operand, find the operand it is tied to.
bool isDebugValue() const
MachineOperand class - Representation of each machine instruction operand.
static RegisterRegAlloc fastRegAlloc("fast", "fast register allocator", createFastRegisterAllocator)
void updateDbgValueForSpill(MachineInstr &Orig, int FrameIndex)
Update a DBG_VALUE whose value has been spilled to FrameIndex.
unsigned getNumDefs() const
Return the number of MachineOperands that are register definitions.
Definition: MCInstrDesc.h:226
void setPreservesCFG()
This function should be called by the pass, iff they do not:
Definition: Pass.cpp:301
MachineInstr * getUniqueVRegDef(unsigned Reg) const
getUniqueVRegDef - Return the unique machine instr that defines the specified virtual register or nul...
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
Definition: Debug.cpp:132
void addRegisterDefined(unsigned Reg, const TargetRegisterInfo *RegInfo=nullptr)
We have determined MI defines a register.
const uint32_t * getRegMask() const
getRegMask - Returns a bit mask of registers preserved by this RegMask operand.
bool isValid() const
isValid - returns true if this iterator is not yet at the end.
virtual void storeRegToStackSlot(MachineBasicBlock &MBB, MachineBasicBlock::iterator MI, unsigned SrcReg, bool isKill, int FrameIndex, const TargetRegisterClass *RC, const TargetRegisterInfo *TRI) const
Store the specified register of the given register class to the specified stack frame index...
const MachineBasicBlock * getParent() const
Definition: MachineInstr.h:255
MachineRegisterInfo - Keep track of information for virtual and physical registers, including vreg register classes, use/def chains for registers, etc.
MachineFunctionProperties & set(Property P)
TargetSubtargetInfo - Generic base class for all target subtargets.
Representation of each machine instruction.
Definition: MachineInstr.h:63
static bool isPhysicalRegister(unsigned Reg)
Return true if the specified register number is in the physical register namespace.
SparseSet - Fast set implmentation for objects that can be identified by small unsigned keys...
Definition: SparseSet.h:123
MachineRegisterInfo & getRegInfo()
getRegInfo - Return information about the registers currently in use.
void setReg(unsigned Reg)
Change the register this operand corresponds to.
#define I(x, y, z)
Definition: MD5.cpp:58
INITIALIZE_PASS(RegAllocFast, "regallocfast", "Fast Register Allocator", false, false) void RegAllocFast
Pair of physical register and lane mask.
void setSubReg(unsigned subReg)
void addPhysRegsUsedFromRegMask(const uint32_t *RegMask)
addPhysRegsUsedFromRegMask - Mark any registers not in RegMask as used.
uint32_t Size
Definition: Profile.cpp:46
bool isSuccessor(const MachineBasicBlock *MBB) const
Return true if the specified MBB is a successor of this block.
bool hasOneNonDBGUse(unsigned RegNo) const
hasOneNonDBGUse - Return true if there is exactly one non-Debug use of the specified register...
iterator_range< livein_iterator > liveins() const
bool isReg() const
isReg - Tests if this is a MO_Register operand.
static reg_nodbg_iterator reg_nodbg_end()
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
FunctionPass * createFastRegisterAllocator()
FastRegisterAllocation Pass - This pass register allocates as fast as possible.
use_instr_nodbg_iterator use_instr_nodbg_begin(unsigned RegNo) const
iterator_range< def_instr_iterator > def_instructions(unsigned Reg) const
#define LLVM_FALLTHROUGH
LLVM_FALLTHROUGH - Mark fallthrough cases in switch statements.
Definition: Compiler.h:250
bool addRegisterKilled(unsigned IncomingReg, const TargetRegisterInfo *RegInfo, bool AddIfNotFound=false)
We have determined MI kills a register.
IRTranslator LLVM IR MI
StringRef - Represent a constant reference to a string, i.e.
Definition: StringRef.h:48
Register getReg() const
getReg - Returns the register number.
#define LLVM_DEBUG(X)
Definition: Debug.h:122
const MachineOperand & getOperand(unsigned i) const
Definition: MachineInstr.h:415
reg_begin/reg_end - Provide iteration support to walk over all definitions and uses of a register wit...
virtual void loadRegFromStackSlot(MachineBasicBlock &MBB, MachineBasicBlock::iterator MI, unsigned DestReg, int FrameIndex, const TargetRegisterClass *RC, const TargetRegisterInfo *TRI) const
Load the specified register of the given register class from the specified stack frame index...
Properties which a MachineFunction may have at a given point in time.
bool empty() const
empty - Check if the array is empty.
Definition: ArrayRef.h:143
bool isImplicit() const
size_type count(const T &V) const
count - Return 1 if the element is in the set, 0 otherwise.
Definition: SmallSet.h:164