LLVM  7.0.0svn
RegAllocFast.cpp
Go to the documentation of this file.
1 //===- RegAllocFast.cpp - A fast register allocator for debug code --------===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 /// \file This register allocator allocates registers to a basic block at a
11 /// time, attempting to keep values in registers and reusing registers as
12 /// appropriate.
13 //
14 //===----------------------------------------------------------------------===//
15 
16 #include "llvm/ADT/ArrayRef.h"
17 #include "llvm/ADT/DenseMap.h"
18 #include "llvm/ADT/IndexedMap.h"
19 #include "llvm/ADT/SmallSet.h"
20 #include "llvm/ADT/SmallVector.h"
21 #include "llvm/ADT/SparseSet.h"
22 #include "llvm/ADT/Statistic.h"
37 #include "llvm/IR/DebugLoc.h"
38 #include "llvm/IR/Metadata.h"
39 #include "llvm/MC/MCInstrDesc.h"
40 #include "llvm/MC/MCRegisterInfo.h"
41 #include "llvm/Pass.h"
42 #include "llvm/Support/Casting.h"
43 #include "llvm/Support/Compiler.h"
44 #include "llvm/Support/Debug.h"
47 #include <cassert>
48 #include <tuple>
49 #include <vector>
50 
51 using namespace llvm;
52 
53 #define DEBUG_TYPE "regalloc"
54 
55 STATISTIC(NumStores, "Number of stores added");
56 STATISTIC(NumLoads , "Number of loads added");
57 STATISTIC(NumCopies, "Number of copies coalesced");
58 
59 static RegisterRegAlloc
60  fastRegAlloc("fast", "fast register allocator", createFastRegisterAllocator);
61 
62 namespace {
63 
64  class RegAllocFast : public MachineFunctionPass {
65  public:
66  static char ID;
67 
68  RegAllocFast() : MachineFunctionPass(ID), StackSlotForVirtReg(-1) {}
69 
70  private:
71  MachineFrameInfo *MFI;
73  const TargetRegisterInfo *TRI;
74  const TargetInstrInfo *TII;
75  RegisterClassInfo RegClassInfo;
76 
77  /// Basic block currently being allocated.
78  MachineBasicBlock *MBB;
79 
80  /// Maps virtual regs to the frame index where these values are spilled.
81  IndexedMap<int, VirtReg2IndexFunctor> StackSlotForVirtReg;
82 
83  /// Everything we know about a live virtual register.
84  struct LiveReg {
85  MachineInstr *LastUse = nullptr; ///< Last instr to use reg.
86  unsigned VirtReg; ///< Virtual register number.
87  MCPhysReg PhysReg = 0; ///< Currently held here.
88  unsigned short LastOpNum = 0; ///< OpNum on LastUse.
89  bool Dirty = false; ///< Register needs spill.
90 
91  explicit LiveReg(unsigned v) : VirtReg(v) {}
92 
93  unsigned getSparseSetIndex() const {
94  return TargetRegisterInfo::virtReg2Index(VirtReg);
95  }
96  };
97 
98  using LiveRegMap = SparseSet<LiveReg>;
99 
100  /// This map contains entries for each virtual register that is currently
101  /// available in a physical register.
102  LiveRegMap LiveVirtRegs;
103 
105 
106  /// Track the state of a physical register.
107  enum RegState {
108  /// A disabled register is not available for allocation, but an alias may
109  /// be in use. A register can only be moved out of the disabled state if
110  /// all aliases are disabled.
111  regDisabled,
112 
113  /// A free register is not currently in use and can be allocated
114  /// immediately without checking aliases.
115  regFree,
116 
117  /// A reserved register has been assigned explicitly (e.g., setting up a
118  /// call parameter), and it remains reserved until it is used.
119  regReserved
120 
121  /// A register state may also be a virtual register number, indication
122  /// that the physical register is currently allocated to a virtual
123  /// register. In that case, LiveVirtRegs contains the inverse mapping.
124  };
125 
126  /// One of the RegState enums, or a virtreg.
127  std::vector<unsigned> PhysRegState;
128 
129  SmallVector<unsigned, 16> VirtDead;
131 
132  /// Set of register units.
133  using UsedInInstrSet = SparseSet<unsigned>;
134 
135  /// Set of register units that are used in the current instruction, and so
136  /// cannot be allocated.
137  UsedInInstrSet UsedInInstr;
138 
139  /// Mark a physreg as used in this instruction.
140  void markRegUsedInInstr(MCPhysReg PhysReg) {
141  for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units)
142  UsedInInstr.insert(*Units);
143  }
144 
145  /// Check if a physreg or any of its aliases are used in this instruction.
146  bool isRegUsedInInstr(MCPhysReg PhysReg) const {
147  for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units)
148  if (UsedInInstr.count(*Units))
149  return true;
150  return false;
151  }
152 
153  /// This flag is set when LiveRegMap will be cleared completely after
154  /// spilling all live registers. LiveRegMap entries should not be erased.
155  bool isBulkSpilling = false;
156 
157  enum : unsigned {
158  spillClean = 1,
159  spillDirty = 100,
160  spillImpossible = ~0u
161  };
162 
163  public:
164  StringRef getPassName() const override { return "Fast Register Allocator"; }
165 
166  void getAnalysisUsage(AnalysisUsage &AU) const override {
167  AU.setPreservesCFG();
169  }
170 
171  MachineFunctionProperties getRequiredProperties() const override {
174  }
175 
176  MachineFunctionProperties getSetProperties() const override {
179  }
180 
181  private:
182  bool runOnMachineFunction(MachineFunction &Fn) override;
183  void allocateBasicBlock(MachineBasicBlock &MBB);
184  void handleThroughOperands(MachineInstr &MI,
185  SmallVectorImpl<unsigned> &VirtDead);
186  int getStackSpaceFor(unsigned VirtReg, const TargetRegisterClass &RC);
187  bool isLastUseOfLocalReg(const MachineOperand &MO) const;
188 
189  void addKillFlag(const LiveReg &LRI);
190  void killVirtReg(LiveRegMap::iterator LRI);
191  void killVirtReg(unsigned VirtReg);
192  void spillVirtReg(MachineBasicBlock::iterator MI, LiveRegMap::iterator);
193  void spillVirtReg(MachineBasicBlock::iterator MI, unsigned VirtReg);
194 
195  void usePhysReg(MachineOperand &MO);
196  void definePhysReg(MachineBasicBlock::iterator MI, MCPhysReg PhysReg,
197  RegState NewState);
198  unsigned calcSpillCost(MCPhysReg PhysReg) const;
199  void assignVirtToPhysReg(LiveReg &, MCPhysReg PhysReg);
200 
201  LiveRegMap::iterator findLiveVirtReg(unsigned VirtReg) {
202  return LiveVirtRegs.find(TargetRegisterInfo::virtReg2Index(VirtReg));
203  }
204 
205  LiveRegMap::const_iterator findLiveVirtReg(unsigned VirtReg) const {
206  return LiveVirtRegs.find(TargetRegisterInfo::virtReg2Index(VirtReg));
207  }
208 
209  LiveRegMap::iterator assignVirtToPhysReg(unsigned VReg, MCPhysReg PhysReg);
210  LiveRegMap::iterator allocVirtReg(MachineInstr &MI, LiveRegMap::iterator,
211  unsigned Hint);
212  LiveRegMap::iterator defineVirtReg(MachineInstr &MI, unsigned OpNum,
213  unsigned VirtReg, unsigned Hint);
214  LiveRegMap::iterator reloadVirtReg(MachineInstr &MI, unsigned OpNum,
215  unsigned VirtReg, unsigned Hint);
216  void spillAll(MachineBasicBlock::iterator MI);
217  bool setPhysReg(MachineInstr &MI, unsigned OpNum, MCPhysReg PhysReg);
218 
219  void dumpState();
220  };
221 
222 } // end anonymous namespace
223 
224 char RegAllocFast::ID = 0;
225 
226 INITIALIZE_PASS(RegAllocFast, "regallocfast", "Fast Register Allocator", false,
227  false)
228 
229 /// This allocates space for the specified virtual register to be held on the
230 /// stack.
231 int RegAllocFast::getStackSpaceFor(unsigned VirtReg,
233  // Find the location Reg would belong...
234  int SS = StackSlotForVirtReg[VirtReg];
235  // Already has space allocated?
236  if (SS != -1)
237  return SS;
238 
239  // Allocate a new stack object for this spill location...
240  unsigned Size = TRI->getSpillSize(RC);
241  unsigned Align = TRI->getSpillAlignment(RC);
242  int FrameIdx = MFI->CreateSpillStackObject(Size, Align);
243 
244  // Assign the slot.
245  StackSlotForVirtReg[VirtReg] = FrameIdx;
246  return FrameIdx;
247 }
248 
249 /// Return true if MO is the only remaining reference to its virtual register,
250 /// and it is guaranteed to be a block-local register.
251 bool RegAllocFast::isLastUseOfLocalReg(const MachineOperand &MO) const {
252  // If the register has ever been spilled or reloaded, we conservatively assume
253  // it is a global register used in multiple blocks.
254  if (StackSlotForVirtReg[MO.getReg()] != -1)
255  return false;
256 
257  // Check that the use/def chain has exactly one operand - MO.
259  if (&*I != &MO)
260  return false;
261  return ++I == MRI->reg_nodbg_end();
262 }
263 
264 /// Set kill flags on last use of a virtual register.
265 void RegAllocFast::addKillFlag(const LiveReg &LR) {
266  if (!LR.LastUse) return;
267  MachineOperand &MO = LR.LastUse->getOperand(LR.LastOpNum);
268  if (MO.isUse() && !LR.LastUse->isRegTiedToDefOperand(LR.LastOpNum)) {
269  if (MO.getReg() == LR.PhysReg)
270  MO.setIsKill();
271  // else, don't do anything we are problably redefining a
272  // subreg of this register and given we don't track which
273  // lanes are actually dead, we cannot insert a kill flag here.
274  // Otherwise we may end up in a situation like this:
275  // ... = (MO) physreg:sub1, implicit killed physreg
276  // ... <== Here we would allow later pass to reuse physreg:sub1
277  // which is potentially wrong.
278  // LR:sub0 = ...
279  // ... = LR.sub1 <== This is going to use physreg:sub1
280  }
281 }
282 
283 /// Mark virtreg as no longer available.
284 void RegAllocFast::killVirtReg(LiveRegMap::iterator LRI) {
285  addKillFlag(*LRI);
286  assert(PhysRegState[LRI->PhysReg] == LRI->VirtReg &&
287  "Broken RegState mapping");
288  PhysRegState[LRI->PhysReg] = regFree;
289  // Erase from LiveVirtRegs unless we're spilling in bulk.
290  if (!isBulkSpilling)
291  LiveVirtRegs.erase(LRI);
292 }
293 
294 /// Mark virtreg as no longer available.
295 void RegAllocFast::killVirtReg(unsigned VirtReg) {
297  "killVirtReg needs a virtual register");
298  LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
299  if (LRI != LiveVirtRegs.end())
300  killVirtReg(LRI);
301 }
302 
303 /// This method spills the value specified by VirtReg into the corresponding
304 /// stack slot if needed.
305 void RegAllocFast::spillVirtReg(MachineBasicBlock::iterator MI,
306  unsigned VirtReg) {
308  "Spilling a physical register is illegal!");
309  LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
310  assert(LRI != LiveVirtRegs.end() && "Spilling unmapped virtual register");
311  spillVirtReg(MI, LRI);
312 }
313 
314 /// Do the actual work of spilling.
315 void RegAllocFast::spillVirtReg(MachineBasicBlock::iterator MI,
316  LiveRegMap::iterator LRI) {
317  LiveReg &LR = *LRI;
318  assert(PhysRegState[LR.PhysReg] == LRI->VirtReg && "Broken RegState mapping");
319 
320  if (LR.Dirty) {
321  // If this physreg is used by the instruction, we want to kill it on the
322  // instruction, not on the spill.
323  bool SpillKill = MachineBasicBlock::iterator(LR.LastUse) != MI;
324  LR.Dirty = false;
325  DEBUG(dbgs() << "Spilling " << printReg(LRI->VirtReg, TRI)
326  << " in " << printReg(LR.PhysReg, TRI));
327  const TargetRegisterClass &RC = *MRI->getRegClass(LRI->VirtReg);
328  int FI = getStackSpaceFor(LRI->VirtReg, RC);
329  DEBUG(dbgs() << " to stack slot #" << FI << "\n");
330  TII->storeRegToStackSlot(*MBB, MI, LR.PhysReg, SpillKill, FI, &RC, TRI);
331  ++NumStores; // Update statistics
332 
333  // If this register is used by DBG_VALUE then insert new DBG_VALUE to
334  // identify spilled location as the place to find corresponding variable's
335  // value.
336  SmallVectorImpl<MachineInstr *> &LRIDbgValues =
337  LiveDbgValueMap[LRI->VirtReg];
338  for (MachineInstr *DBG : LRIDbgValues) {
339  MachineInstr *NewDV = buildDbgValueForSpill(*MBB, MI, *DBG, FI);
340  assert(NewDV->getParent() == MBB && "dangling parent pointer");
341  (void)NewDV;
342  DEBUG(dbgs() << "Inserting debug info due to spill:" << "\n" << *NewDV);
343  }
344  // Now this register is spilled there is should not be any DBG_VALUE
345  // pointing to this register because they are all pointing to spilled value
346  // now.
347  LRIDbgValues.clear();
348  if (SpillKill)
349  LR.LastUse = nullptr; // Don't kill register again
350  }
351  killVirtReg(LRI);
352 }
353 
354 /// Spill all dirty virtregs without killing them.
355 void RegAllocFast::spillAll(MachineBasicBlock::iterator MI) {
356  if (LiveVirtRegs.empty()) return;
357  isBulkSpilling = true;
358  // The LiveRegMap is keyed by an unsigned (the virtreg number), so the order
359  // of spilling here is deterministic, if arbitrary.
360  for (LiveRegMap::iterator I = LiveVirtRegs.begin(), E = LiveVirtRegs.end();
361  I != E; ++I)
362  spillVirtReg(MI, I);
363  LiveVirtRegs.clear();
364  isBulkSpilling = false;
365 }
366 
367 /// Handle the direct use of a physical register. Check that the register is
368 /// not used by a virtreg. Kill the physreg, marking it free. This may add
369 /// implicit kills to MO->getParent() and invalidate MO.
370 void RegAllocFast::usePhysReg(MachineOperand &MO) {
371  // Ignore undef uses.
372  if (MO.isUndef())
373  return;
374 
375  unsigned PhysReg = MO.getReg();
377  "Bad usePhysReg operand");
378 
379  markRegUsedInInstr(PhysReg);
380  switch (PhysRegState[PhysReg]) {
381  case regDisabled:
382  break;
383  case regReserved:
384  PhysRegState[PhysReg] = regFree;
386  case regFree:
387  MO.setIsKill();
388  return;
389  default:
390  // The physreg was allocated to a virtual register. That means the value we
391  // wanted has been clobbered.
392  llvm_unreachable("Instruction uses an allocated register");
393  }
394 
395  // Maybe a superregister is reserved?
396  for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) {
397  MCPhysReg Alias = *AI;
398  switch (PhysRegState[Alias]) {
399  case regDisabled:
400  break;
401  case regReserved:
402  // Either PhysReg is a subregister of Alias and we mark the
403  // whole register as free, or PhysReg is the superregister of
404  // Alias and we mark all the aliases as disabled before freeing
405  // PhysReg.
406  // In the latter case, since PhysReg was disabled, this means that
407  // its value is defined only by physical sub-registers. This check
408  // is performed by the assert of the default case in this loop.
409  // Note: The value of the superregister may only be partial
410  // defined, that is why regDisabled is a valid state for aliases.
411  assert((TRI->isSuperRegister(PhysReg, Alias) ||
412  TRI->isSuperRegister(Alias, PhysReg)) &&
413  "Instruction is not using a subregister of a reserved register");
415  case regFree:
416  if (TRI->isSuperRegister(PhysReg, Alias)) {
417  // Leave the superregister in the working set.
418  PhysRegState[Alias] = regFree;
419  MO.getParent()->addRegisterKilled(Alias, TRI, true);
420  return;
421  }
422  // Some other alias was in the working set - clear it.
423  PhysRegState[Alias] = regDisabled;
424  break;
425  default:
426  llvm_unreachable("Instruction uses an alias of an allocated register");
427  }
428  }
429 
430  // All aliases are disabled, bring register into working set.
431  PhysRegState[PhysReg] = regFree;
432  MO.setIsKill();
433 }
434 
435 /// Mark PhysReg as reserved or free after spilling any virtregs. This is very
436 /// similar to defineVirtReg except the physreg is reserved instead of
437 /// allocated.
438 void RegAllocFast::definePhysReg(MachineBasicBlock::iterator MI,
439  MCPhysReg PhysReg, RegState NewState) {
440  markRegUsedInInstr(PhysReg);
441  switch (unsigned VirtReg = PhysRegState[PhysReg]) {
442  case regDisabled:
443  break;
444  default:
445  spillVirtReg(MI, VirtReg);
447  case regFree:
448  case regReserved:
449  PhysRegState[PhysReg] = NewState;
450  return;
451  }
452 
453  // This is a disabled register, disable all aliases.
454  PhysRegState[PhysReg] = NewState;
455  for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) {
456  MCPhysReg Alias = *AI;
457  switch (unsigned VirtReg = PhysRegState[Alias]) {
458  case regDisabled:
459  break;
460  default:
461  spillVirtReg(MI, VirtReg);
463  case regFree:
464  case regReserved:
465  PhysRegState[Alias] = regDisabled;
466  if (TRI->isSuperRegister(PhysReg, Alias))
467  return;
468  break;
469  }
470  }
471 }
472 
473 /// \brief Return the cost of spilling clearing out PhysReg and aliases so it is
474 /// free for allocation. Returns 0 when PhysReg is free or disabled with all
475 /// aliases disabled - it can be allocated directly.
476 /// \returns spillImpossible when PhysReg or an alias can't be spilled.
477 unsigned RegAllocFast::calcSpillCost(MCPhysReg PhysReg) const {
478  if (isRegUsedInInstr(PhysReg)) {
479  DEBUG(dbgs() << printReg(PhysReg, TRI) << " is already used in instr.\n");
480  return spillImpossible;
481  }
482  switch (unsigned VirtReg = PhysRegState[PhysReg]) {
483  case regDisabled:
484  break;
485  case regFree:
486  return 0;
487  case regReserved:
488  DEBUG(dbgs() << printReg(VirtReg, TRI) << " corresponding "
489  << printReg(PhysReg, TRI) << " is reserved already.\n");
490  return spillImpossible;
491  default: {
492  LiveRegMap::const_iterator I = findLiveVirtReg(VirtReg);
493  assert(I != LiveVirtRegs.end() && "Missing VirtReg entry");
494  return I->Dirty ? spillDirty : spillClean;
495  }
496  }
497 
498  // This is a disabled register, add up cost of aliases.
499  DEBUG(dbgs() << printReg(PhysReg, TRI) << " is disabled.\n");
500  unsigned Cost = 0;
501  for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) {
502  MCPhysReg Alias = *AI;
503  switch (unsigned VirtReg = PhysRegState[Alias]) {
504  case regDisabled:
505  break;
506  case regFree:
507  ++Cost;
508  break;
509  case regReserved:
510  return spillImpossible;
511  default: {
512  LiveRegMap::const_iterator I = findLiveVirtReg(VirtReg);
513  assert(I != LiveVirtRegs.end() && "Missing VirtReg entry");
514  Cost += I->Dirty ? spillDirty : spillClean;
515  break;
516  }
517  }
518  }
519  return Cost;
520 }
521 
522 /// \brief This method updates local state so that we know that PhysReg is the
523 /// proper container for VirtReg now. The physical register must not be used
524 /// for anything else when this is called.
525 void RegAllocFast::assignVirtToPhysReg(LiveReg &LR, MCPhysReg PhysReg) {
526  DEBUG(dbgs() << "Assigning " << printReg(LR.VirtReg, TRI) << " to "
527  << printReg(PhysReg, TRI) << "\n");
528  PhysRegState[PhysReg] = LR.VirtReg;
529  assert(!LR.PhysReg && "Already assigned a physreg");
530  LR.PhysReg = PhysReg;
531 }
532 
534 RegAllocFast::assignVirtToPhysReg(unsigned VirtReg, MCPhysReg PhysReg) {
535  LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
536  assert(LRI != LiveVirtRegs.end() && "VirtReg disappeared");
537  assignVirtToPhysReg(*LRI, PhysReg);
538  return LRI;
539 }
540 
541 /// Allocates a physical register for VirtReg.
542 RegAllocFast::LiveRegMap::iterator RegAllocFast::allocVirtReg(MachineInstr &MI,
543  LiveRegMap::iterator LRI, unsigned Hint) {
544  const unsigned VirtReg = LRI->VirtReg;
545 
547  "Can only allocate virtual registers");
548 
549  // Take hint when possible.
550  const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg);
552  MRI->isAllocatable(Hint) && RC.contains(Hint)) {
553  // Ignore the hint if we would have to spill a dirty register.
554  unsigned Cost = calcSpillCost(Hint);
555  if (Cost < spillDirty) {
556  if (Cost)
557  definePhysReg(MI, Hint, regFree);
558  // definePhysReg may kill virtual registers and modify LiveVirtRegs.
559  // That invalidates LRI, so run a new lookup for VirtReg.
560  return assignVirtToPhysReg(VirtReg, Hint);
561  }
562  }
563 
564  // First try to find a completely free register.
565  ArrayRef<MCPhysReg> AO = RegClassInfo.getOrder(&RC);
566  for (MCPhysReg PhysReg : AO) {
567  if (PhysRegState[PhysReg] == regFree && !isRegUsedInInstr(PhysReg)) {
568  assignVirtToPhysReg(*LRI, PhysReg);
569  return LRI;
570  }
571  }
572 
573  DEBUG(dbgs() << "Allocating " << printReg(VirtReg) << " from "
574  << TRI->getRegClassName(&RC) << "\n");
575 
576  unsigned BestReg = 0;
577  unsigned BestCost = spillImpossible;
578  for (MCPhysReg PhysReg : AO) {
579  unsigned Cost = calcSpillCost(PhysReg);
580  DEBUG(dbgs() << "\tRegister: " << printReg(PhysReg, TRI) << "\n");
581  DEBUG(dbgs() << "\tCost: " << Cost << "\n");
582  DEBUG(dbgs() << "\tBestCost: " << BestCost << "\n");
583  // Cost is 0 when all aliases are already disabled.
584  if (Cost == 0) {
585  assignVirtToPhysReg(*LRI, PhysReg);
586  return LRI;
587  }
588  if (Cost < BestCost)
589  BestReg = PhysReg, BestCost = Cost;
590  }
591 
592  if (BestReg) {
593  definePhysReg(MI, BestReg, regFree);
594  // definePhysReg may kill virtual registers and modify LiveVirtRegs.
595  // That invalidates LRI, so run a new lookup for VirtReg.
596  return assignVirtToPhysReg(VirtReg, BestReg);
597  }
598 
599  // Nothing we can do. Report an error and keep going with a bad allocation.
600  if (MI.isInlineAsm())
601  MI.emitError("inline assembly requires more registers than available");
602  else
603  MI.emitError("ran out of registers during register allocation");
604  definePhysReg(MI, *AO.begin(), regFree);
605  return assignVirtToPhysReg(VirtReg, *AO.begin());
606 }
607 
608 /// Allocates a register for VirtReg and mark it as dirty.
609 RegAllocFast::LiveRegMap::iterator RegAllocFast::defineVirtReg(MachineInstr &MI,
610  unsigned OpNum,
611  unsigned VirtReg,
612  unsigned Hint) {
614  "Not a virtual register");
615  LiveRegMap::iterator LRI;
616  bool New;
617  std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg));
618  if (New) {
619  // If there is no hint, peek at the only use of this register.
620  if ((!Hint || !TargetRegisterInfo::isPhysicalRegister(Hint)) &&
621  MRI->hasOneNonDBGUse(VirtReg)) {
622  const MachineInstr &UseMI = *MRI->use_instr_nodbg_begin(VirtReg);
623  // It's a copy, use the destination register as a hint.
624  if (UseMI.isCopyLike())
625  Hint = UseMI.getOperand(0).getReg();
626  }
627  LRI = allocVirtReg(MI, LRI, Hint);
628  } else if (LRI->LastUse) {
629  // Redefining a live register - kill at the last use, unless it is this
630  // instruction defining VirtReg multiple times.
631  if (LRI->LastUse != &MI || LRI->LastUse->getOperand(LRI->LastOpNum).isUse())
632  addKillFlag(*LRI);
633  }
634  assert(LRI->PhysReg && "Register not assigned");
635  LRI->LastUse = &MI;
636  LRI->LastOpNum = OpNum;
637  LRI->Dirty = true;
638  markRegUsedInInstr(LRI->PhysReg);
639  return LRI;
640 }
641 
642 /// Make sure VirtReg is available in a physreg and return it.
643 RegAllocFast::LiveRegMap::iterator RegAllocFast::reloadVirtReg(MachineInstr &MI,
644  unsigned OpNum,
645  unsigned VirtReg,
646  unsigned Hint) {
648  "Not a virtual register");
649  LiveRegMap::iterator LRI;
650  bool New;
651  std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg));
652  MachineOperand &MO = MI.getOperand(OpNum);
653  if (New) {
654  LRI = allocVirtReg(MI, LRI, Hint);
655  const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg);
656  int FrameIndex = getStackSpaceFor(VirtReg, RC);
657  DEBUG(dbgs() << "Reloading " << printReg(VirtReg, TRI) << " into "
658  << printReg(LRI->PhysReg, TRI) << "\n");
659  TII->loadRegFromStackSlot(*MBB, MI, LRI->PhysReg, FrameIndex, &RC, TRI);
660  ++NumLoads;
661  } else if (LRI->Dirty) {
662  if (isLastUseOfLocalReg(MO)) {
663  DEBUG(dbgs() << "Killing last use: " << MO << "\n");
664  if (MO.isUse())
665  MO.setIsKill();
666  else
667  MO.setIsDead();
668  } else if (MO.isKill()) {
669  DEBUG(dbgs() << "Clearing dubious kill: " << MO << "\n");
670  MO.setIsKill(false);
671  } else if (MO.isDead()) {
672  DEBUG(dbgs() << "Clearing dubious dead: " << MO << "\n");
673  MO.setIsDead(false);
674  }
675  } else if (MO.isKill()) {
676  // We must remove kill flags from uses of reloaded registers because the
677  // register would be killed immediately, and there might be a second use:
678  // %foo = OR killed %x, %x
679  // This would cause a second reload of %x into a different register.
680  DEBUG(dbgs() << "Clearing clean kill: " << MO << "\n");
681  MO.setIsKill(false);
682  } else if (MO.isDead()) {
683  DEBUG(dbgs() << "Clearing clean dead: " << MO << "\n");
684  MO.setIsDead(false);
685  }
686  assert(LRI->PhysReg && "Register not assigned");
687  LRI->LastUse = &MI;
688  LRI->LastOpNum = OpNum;
689  markRegUsedInInstr(LRI->PhysReg);
690  return LRI;
691 }
692 
693 /// Changes operand OpNum in MI the refer the PhysReg, considering subregs. This
694 /// may invalidate any operand pointers. Return true if the operand kills its
695 /// register.
696 bool RegAllocFast::setPhysReg(MachineInstr &MI, unsigned OpNum,
697  MCPhysReg PhysReg) {
698  MachineOperand &MO = MI.getOperand(OpNum);
699  bool Dead = MO.isDead();
700  if (!MO.getSubReg()) {
701  MO.setReg(PhysReg);
702  MO.setIsRenamable(true);
703  return MO.isKill() || Dead;
704  }
705 
706  // Handle subregister index.
707  MO.setReg(PhysReg ? TRI->getSubReg(PhysReg, MO.getSubReg()) : 0);
708  MO.setIsRenamable(true);
709  MO.setSubReg(0);
710 
711  // A kill flag implies killing the full register. Add corresponding super
712  // register kill.
713  if (MO.isKill()) {
714  MI.addRegisterKilled(PhysReg, TRI, true);
715  return true;
716  }
717 
718  // A <def,read-undef> of a sub-register requires an implicit def of the full
719  // register.
720  if (MO.isDef() && MO.isUndef())
721  MI.addRegisterDefined(PhysReg, TRI);
722 
723  return Dead;
724 }
725 
726 // Handles special instruction operand like early clobbers and tied ops when
727 // there are additional physreg defines.
728 void RegAllocFast::handleThroughOperands(MachineInstr &MI,
729  SmallVectorImpl<unsigned> &VirtDead) {
730  DEBUG(dbgs() << "Scanning for through registers:");
731  SmallSet<unsigned, 8> ThroughRegs;
732  for (const MachineOperand &MO : MI.operands()) {
733  if (!MO.isReg()) continue;
734  unsigned Reg = MO.getReg();
736  continue;
737  if (MO.isEarlyClobber() || (MO.isUse() && MO.isTied()) ||
738  (MO.getSubReg() && MI.readsVirtualRegister(Reg))) {
739  if (ThroughRegs.insert(Reg).second)
740  DEBUG(dbgs() << ' ' << printReg(Reg));
741  }
742  }
743 
744  // If any physreg defines collide with preallocated through registers,
745  // we must spill and reallocate.
746  DEBUG(dbgs() << "\nChecking for physdef collisions.\n");
747  for (const MachineOperand &MO : MI.operands()) {
748  if (!MO.isReg() || !MO.isDef()) continue;
749  unsigned Reg = MO.getReg();
750  if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue;
751  markRegUsedInInstr(Reg);
752  for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI) {
753  if (ThroughRegs.count(PhysRegState[*AI]))
754  definePhysReg(MI, *AI, regFree);
755  }
756  }
757 
758  SmallVector<unsigned, 8> PartialDefs;
759  DEBUG(dbgs() << "Allocating tied uses.\n");
760  for (unsigned I = 0, E = MI.getNumOperands(); I != E; ++I) {
761  const MachineOperand &MO = MI.getOperand(I);
762  if (!MO.isReg()) continue;
763  unsigned Reg = MO.getReg();
764  if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue;
765  if (MO.isUse()) {
766  if (!MO.isTied()) continue;
767  DEBUG(dbgs() << "Operand " << I << "("<< MO << ") is tied to operand "
768  << MI.findTiedOperandIdx(I) << ".\n");
769  LiveRegMap::iterator LRI = reloadVirtReg(MI, I, Reg, 0);
770  MCPhysReg PhysReg = LRI->PhysReg;
771  setPhysReg(MI, I, PhysReg);
772  // Note: we don't update the def operand yet. That would cause the normal
773  // def-scan to attempt spilling.
774  } else if (MO.getSubReg() && MI.readsVirtualRegister(Reg)) {
775  DEBUG(dbgs() << "Partial redefine: " << MO << "\n");
776  // Reload the register, but don't assign to the operand just yet.
777  // That would confuse the later phys-def processing pass.
778  LiveRegMap::iterator LRI = reloadVirtReg(MI, I, Reg, 0);
779  PartialDefs.push_back(LRI->PhysReg);
780  }
781  }
782 
783  DEBUG(dbgs() << "Allocating early clobbers.\n");
784  for (unsigned I = 0, E = MI.getNumOperands(); I != E; ++I) {
785  const MachineOperand &MO = MI.getOperand(I);
786  if (!MO.isReg()) continue;
787  unsigned Reg = MO.getReg();
788  if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue;
789  if (!MO.isEarlyClobber())
790  continue;
791  // Note: defineVirtReg may invalidate MO.
792  LiveRegMap::iterator LRI = defineVirtReg(MI, I, Reg, 0);
793  MCPhysReg PhysReg = LRI->PhysReg;
794  if (setPhysReg(MI, I, PhysReg))
795  VirtDead.push_back(Reg);
796  }
797 
798  // Restore UsedInInstr to a state usable for allocating normal virtual uses.
799  UsedInInstr.clear();
800  for (const MachineOperand &MO : MI.operands()) {
801  if (!MO.isReg() || (MO.isDef() && !MO.isEarlyClobber())) continue;
802  unsigned Reg = MO.getReg();
803  if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue;
804  DEBUG(dbgs() << "\tSetting " << printReg(Reg, TRI)
805  << " as used in instr\n");
806  markRegUsedInInstr(Reg);
807  }
808 
809  // Also mark PartialDefs as used to avoid reallocation.
810  for (unsigned PartialDef : PartialDefs)
811  markRegUsedInInstr(PartialDef);
812 }
813 
814 #ifndef NDEBUG
815 void RegAllocFast::dumpState() {
816  for (unsigned Reg = 1, E = TRI->getNumRegs(); Reg != E; ++Reg) {
817  if (PhysRegState[Reg] == regDisabled) continue;
818  dbgs() << " " << printReg(Reg, TRI);
819  switch(PhysRegState[Reg]) {
820  case regFree:
821  break;
822  case regReserved:
823  dbgs() << "*";
824  break;
825  default: {
826  dbgs() << '=' << printReg(PhysRegState[Reg]);
827  LiveRegMap::iterator I = findLiveVirtReg(PhysRegState[Reg]);
828  assert(I != LiveVirtRegs.end() && "Missing VirtReg entry");
829  if (I->Dirty)
830  dbgs() << "*";
831  assert(I->PhysReg == Reg && "Bad inverse map");
832  break;
833  }
834  }
835  }
836  dbgs() << '\n';
837  // Check that LiveVirtRegs is the inverse.
838  for (LiveRegMap::iterator i = LiveVirtRegs.begin(),
839  e = LiveVirtRegs.end(); i != e; ++i) {
841  "Bad map key");
843  "Bad map value");
844  assert(PhysRegState[i->PhysReg] == i->VirtReg && "Bad inverse map");
845  }
846 }
847 #endif
848 
849 void RegAllocFast::allocateBasicBlock(MachineBasicBlock &MBB) {
850  this->MBB = &MBB;
851  DEBUG(dbgs() << "\nAllocating " << MBB);
852 
853  PhysRegState.assign(TRI->getNumRegs(), regDisabled);
854  assert(LiveVirtRegs.empty() && "Mapping not cleared from last block?");
855 
857 
858  // Add live-in registers as live.
859  for (const MachineBasicBlock::RegisterMaskPair LI : MBB.liveins())
860  if (MRI->isAllocatable(LI.PhysReg))
861  definePhysReg(MII, LI.PhysReg, regReserved);
862 
863  VirtDead.clear();
864  Coalesced.clear();
865 
866  // Otherwise, sequentially allocate each instruction in the MBB.
867  for (MachineInstr &MI : MBB) {
868  const MCInstrDesc &MCID = MI.getDesc();
869  DEBUG(
870  dbgs() << "\n>> " << MI << "Regs:";
871  dumpState()
872  );
873 
874  // Debug values are not allowed to change codegen in any way.
875  if (MI.isDebugValue()) {
876  MachineInstr *DebugMI = &MI;
877  MachineOperand &MO = DebugMI->getOperand(0);
878 
879  // Ignore DBG_VALUEs that aren't based on virtual registers. These are
880  // mostly constants and frame indices.
881  if (!MO.isReg())
882  continue;
883  unsigned Reg = MO.getReg();
885  continue;
886 
887  // See if this virtual register has already been allocated to a physical
888  // register or spilled to a stack slot.
889  LiveRegMap::iterator LRI = findLiveVirtReg(Reg);
890  if (LRI != LiveVirtRegs.end())
891  setPhysReg(*DebugMI, 0, LRI->PhysReg);
892  else {
893  int SS = StackSlotForVirtReg[Reg];
894  if (SS != -1) {
895  // Modify DBG_VALUE now that the value is in a spill slot.
896  updateDbgValueForSpill(*DebugMI, SS);
897  DEBUG(dbgs() << "Modifying debug info due to spill:"
898  << "\t" << *DebugMI);
899  continue;
900  }
901 
902  // We can't allocate a physreg for a DebugValue, sorry!
903  DEBUG(dbgs() << "Unable to allocate vreg used by DBG_VALUE");
904  MO.setReg(0);
905  }
906 
907  // If Reg hasn't been spilled, put this DBG_VALUE in LiveDbgValueMap so
908  // that future spills of Reg will have DBG_VALUEs.
909  LiveDbgValueMap[Reg].push_back(DebugMI);
910  continue;
911  }
912 
913  // If this is a copy, we may be able to coalesce.
914  unsigned CopySrcReg = 0;
915  unsigned CopyDstReg = 0;
916  unsigned CopySrcSub = 0;
917  unsigned CopyDstSub = 0;
918  if (MI.isCopy()) {
919  CopyDstReg = MI.getOperand(0).getReg();
920  CopySrcReg = MI.getOperand(1).getReg();
921  CopyDstSub = MI.getOperand(0).getSubReg();
922  CopySrcSub = MI.getOperand(1).getSubReg();
923  }
924 
925  // Track registers used by instruction.
926  UsedInInstr.clear();
927 
928  // First scan.
929  // Mark physreg uses and early clobbers as used.
930  // Find the end of the virtreg operands
931  unsigned VirtOpEnd = 0;
932  bool hasTiedOps = false;
933  bool hasEarlyClobbers = false;
934  bool hasPartialRedefs = false;
935  bool hasPhysDefs = false;
936  for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) {
937  MachineOperand &MO = MI.getOperand(i);
938  // Make sure MRI knows about registers clobbered by regmasks.
939  if (MO.isRegMask()) {
941  continue;
942  }
943  if (!MO.isReg()) continue;
944  unsigned Reg = MO.getReg();
945  if (!Reg) continue;
947  VirtOpEnd = i+1;
948  if (MO.isUse()) {
949  hasTiedOps = hasTiedOps ||
950  MCID.getOperandConstraint(i, MCOI::TIED_TO) != -1;
951  } else {
952  if (MO.isEarlyClobber())
953  hasEarlyClobbers = true;
954  if (MO.getSubReg() && MI.readsVirtualRegister(Reg))
955  hasPartialRedefs = true;
956  }
957  continue;
958  }
959  if (!MRI->isAllocatable(Reg)) continue;
960  if (MO.isUse()) {
961  usePhysReg(MO);
962  } else if (MO.isEarlyClobber()) {
963  definePhysReg(MI, Reg,
964  (MO.isImplicit() || MO.isDead()) ? regFree : regReserved);
965  hasEarlyClobbers = true;
966  } else
967  hasPhysDefs = true;
968  }
969 
970  // The instruction may have virtual register operands that must be allocated
971  // the same register at use-time and def-time: early clobbers and tied
972  // operands. If there are also physical defs, these registers must avoid
973  // both physical defs and uses, making them more constrained than normal
974  // operands.
975  // Similarly, if there are multiple defs and tied operands, we must make
976  // sure the same register is allocated to uses and defs.
977  // We didn't detect inline asm tied operands above, so just make this extra
978  // pass for all inline asm.
979  if (MI.isInlineAsm() || hasEarlyClobbers || hasPartialRedefs ||
980  (hasTiedOps && (hasPhysDefs || MCID.getNumDefs() > 1))) {
981  handleThroughOperands(MI, VirtDead);
982  // Don't attempt coalescing when we have funny stuff going on.
983  CopyDstReg = 0;
984  // Pretend we have early clobbers so the use operands get marked below.
985  // This is not necessary for the common case of a single tied use.
986  hasEarlyClobbers = true;
987  }
988 
989  // Second scan.
990  // Allocate virtreg uses.
991  for (unsigned I = 0; I != VirtOpEnd; ++I) {
992  const MachineOperand &MO = MI.getOperand(I);
993  if (!MO.isReg()) continue;
994  unsigned Reg = MO.getReg();
995  if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue;
996  if (MO.isUse()) {
997  LiveRegMap::iterator LRI = reloadVirtReg(MI, I, Reg, CopyDstReg);
998  MCPhysReg PhysReg = LRI->PhysReg;
999  CopySrcReg = (CopySrcReg == Reg || CopySrcReg == PhysReg) ? PhysReg : 0;
1000  if (setPhysReg(MI, I, PhysReg))
1001  killVirtReg(LRI);
1002  }
1003  }
1004 
1005  // Track registers defined by instruction - early clobbers and tied uses at
1006  // this point.
1007  UsedInInstr.clear();
1008  if (hasEarlyClobbers) {
1009  for (const MachineOperand &MO : MI.operands()) {
1010  if (!MO.isReg()) continue;
1011  unsigned Reg = MO.getReg();
1012  if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue;
1013  // Look for physreg defs and tied uses.
1014  if (!MO.isDef() && !MO.isTied()) continue;
1015  markRegUsedInInstr(Reg);
1016  }
1017  }
1018 
1019  unsigned DefOpEnd = MI.getNumOperands();
1020  if (MI.isCall()) {
1021  // Spill all virtregs before a call. This serves one purpose: If an
1022  // exception is thrown, the landing pad is going to expect to find
1023  // registers in their spill slots.
1024  // Note: although this is appealing to just consider all definitions
1025  // as call-clobbered, this is not correct because some of those
1026  // definitions may be used later on and we do not want to reuse
1027  // those for virtual registers in between.
1028  DEBUG(dbgs() << " Spilling remaining registers before call.\n");
1029  spillAll(MI);
1030  }
1031 
1032  // Third scan.
1033  // Allocate defs and collect dead defs.
1034  for (unsigned I = 0; I != DefOpEnd; ++I) {
1035  const MachineOperand &MO = MI.getOperand(I);
1036  if (!MO.isReg() || !MO.isDef() || !MO.getReg() || MO.isEarlyClobber())
1037  continue;
1038  unsigned Reg = MO.getReg();
1039 
1041  if (!MRI->isAllocatable(Reg)) continue;
1042  definePhysReg(MI, Reg, MO.isDead() ? regFree : regReserved);
1043  continue;
1044  }
1045  LiveRegMap::iterator LRI = defineVirtReg(MI, I, Reg, CopySrcReg);
1046  MCPhysReg PhysReg = LRI->PhysReg;
1047  if (setPhysReg(MI, I, PhysReg)) {
1048  VirtDead.push_back(Reg);
1049  CopyDstReg = 0; // cancel coalescing;
1050  } else
1051  CopyDstReg = (CopyDstReg == Reg || CopyDstReg == PhysReg) ? PhysReg : 0;
1052  }
1053 
1054  // Kill dead defs after the scan to ensure that multiple defs of the same
1055  // register are allocated identically. We didn't need to do this for uses
1056  // because we are crerating our own kill flags, and they are always at the
1057  // last use.
1058  for (unsigned VirtReg : VirtDead)
1059  killVirtReg(VirtReg);
1060  VirtDead.clear();
1061 
1062  if (CopyDstReg && CopyDstReg == CopySrcReg && CopyDstSub == CopySrcSub) {
1063  DEBUG(dbgs() << "-- coalescing: " << MI);
1064  Coalesced.push_back(&MI);
1065  } else {
1066  DEBUG(dbgs() << "<< " << MI);
1067  }
1068  }
1069 
1070  // Spill all physical registers holding virtual registers now.
1071  DEBUG(dbgs() << "Spilling live registers at end of block.\n");
1072  spillAll(MBB.getFirstTerminator());
1073 
1074  // Erase all the coalesced copies. We are delaying it until now because
1075  // LiveVirtRegs might refer to the instrs.
1076  for (MachineInstr *MI : Coalesced)
1077  MBB.erase(MI);
1078  NumCopies += Coalesced.size();
1079 
1080  DEBUG(MBB.dump());
1081 }
1082 
1083 /// Allocates registers for a function.
1084 bool RegAllocFast::runOnMachineFunction(MachineFunction &MF) {
1085  DEBUG(dbgs() << "********** FAST REGISTER ALLOCATION **********\n"
1086  << "********** Function: " << MF.getName() << '\n');
1087  MRI = &MF.getRegInfo();
1088  const TargetSubtargetInfo &STI = MF.getSubtarget();
1089  TRI = STI.getRegisterInfo();
1090  TII = STI.getInstrInfo();
1091  MFI = &MF.getFrameInfo();
1092  MRI->freezeReservedRegs(MF);
1093  RegClassInfo.runOnMachineFunction(MF);
1094  UsedInInstr.clear();
1095  UsedInInstr.setUniverse(TRI->getNumRegUnits());
1096 
1097  // initialize the virtual->physical register map to have a 'null'
1098  // mapping for all virtual registers
1099  unsigned NumVirtRegs = MRI->getNumVirtRegs();
1100  StackSlotForVirtReg.resize(NumVirtRegs);
1101  LiveVirtRegs.setUniverse(NumVirtRegs);
1102 
1103  // Loop over all of the basic blocks, eliminating virtual register references
1104  for (MachineBasicBlock &MBB : MF)
1105  allocateBasicBlock(MBB);
1106 
1107  // All machine operands and other references to virtual registers have been
1108  // replaced. Remove the virtual registers.
1109  MRI->clearVirtRegs();
1110 
1111  StackSlotForVirtReg.clear();
1112  LiveDbgValueMap.clear();
1113  return true;
1114 }
1115 
1117  return new RegAllocFast();
1118 }
bool isRegMask() const
isRegMask - Tests if this is a MO_RegisterMask operand.
ArrayRef< MCPhysReg > getOrder(const TargetRegisterClass *RC) const
getOrder - Returns the preferred allocation order for RC.
void push_back(const T &Elt)
Definition: SmallVector.h:212
constexpr char Align[]
Key for Kernel::Arg::Metadata::mAlign.
MachineInstr * getParent()
getParent - Return the instruction that this operand belongs to.
bool contains(unsigned Reg) const
Return true if the specified register is included in this register class.
bool isCall(QueryType Type=AnyInBundle) const
Definition: MachineInstr.h:461
const TargetRegisterClass * getRegClass(unsigned Reg) const
Return the register class of the specified virtual register.
void emitError(StringRef Msg) const
Emit an error referring to the source location of this instruction.
bool isAllocatable(unsigned PhysReg) const
isAllocatable - Returns true when PhysReg belongs to an allocatable register class and it hasn&#39;t been...
static unsigned virtReg2Index(unsigned Reg)
Convert a virtual register number to a 0-based index.
Compute iterated dominance frontiers using a linear time algorithm.
Definition: AllocatorList.h:24
std::pair< iterator, bool > insert(const ValueT &Val)
insert - Attempts to insert a new element.
Definition: SparseSet.h:250
virtual const TargetRegisterInfo * getRegisterInfo() const
getRegisterInfo - If register information is available, return it.
Describe properties that are true of each instruction in the target description file.
Definition: MCInstrDesc.h:163
unsigned getReg() const
getReg - Returns the register number.
static bool isVirtualRegister(unsigned Reg)
Return true if the specified register number is in the virtual register namespace.
This file contains the declarations for metadata subclasses.
unsigned getSubReg() const
bool isInlineAsm() const
Definition: MachineInstr.h:835
STATISTIC(NumFunctions, "Total number of functions")
void setIsDead(bool Val=true)
reg_nodbg_iterator reg_nodbg_begin(unsigned RegNo) const
iterator_range< mop_iterator > operands()
Definition: MachineInstr.h:335
bool isCopyLike() const
Return true if the instruction behaves like a copy.
Definition: MachineInstr.h:874
void setIsRenamable(bool Val=true)
unsigned getNumRegUnits() const
Return the number of (native) register units in the target.
unsigned getSpillSize(const TargetRegisterClass &RC) const
Return the size in bytes of the stack slot allocated to hold a spilled copy of a register from class ...
bool isEarlyClobber() const
const char * getRegClassName(const TargetRegisterClass *Class) const
Returns the name of the register class.
unsigned getSpillAlignment(const TargetRegisterClass &RC) const
Return the minimum required alignment in bytes for a spill slot for a register of this class...
MachineInstr * buildDbgValueForSpill(MachineBasicBlock &BB, MachineBasicBlock::iterator I, const MachineInstr &Orig, int FrameIndex)
Clone a DBG_VALUE whose value has been spilled to FrameIndex.
MachineFunctionPass - This class adapts the FunctionPass interface to allow convenient creation of pa...
const HexagonInstrInfo * TII
unsigned getNumOperands() const
Access to explicit operands of the instruction.
Definition: MachineInstr.h:296
Reg
All possible values of the reg field in the ModR/M byte.
The MachineFrameInfo class represents an abstract stack frame until prolog/epilog code is inserted...
Printable printReg(unsigned Reg, const TargetRegisterInfo *TRI=nullptr, unsigned SubRegIdx=0)
Prints virtual and physical registers with or without a TRI instance.
const MCInstrDesc & getDesc() const
Returns the target instruction descriptor of this MachineInstr.
Definition: MachineInstr.h:290
RegisterRegAlloc class - Track the registration of register allocators.
virtual const TargetInstrInfo * getInstrInfo() const
StringRef getName() const
getName - Return the name of the corresponding LLVM function.
uint16_t MCPhysReg
An unsigned integer type large enough to represent all physical registers, but not necessarily virtua...
TargetInstrInfo - Interface to description of machine instruction set.
bool isSuperRegister(unsigned RegA, unsigned RegB) const
Returns true if RegB is a super-register of RegA.
unsigned getNumRegs() const
Return the number of registers this target has (useful for sizing arrays holding per register informa...
MachineInstrBundleIterator< MachineInstr > iterator
unsigned const MachineRegisterInfo * MRI
typename DenseT::iterator iterator
Definition: SparseSet.h:172
const TargetSubtargetInfo & getSubtarget() const
getSubtarget - Return the subtarget for which this machine code is being compiled.
void getAnalysisUsage(AnalysisUsage &AU) const override
getAnalysisUsage - Subclasses that override getAnalysisUsage must call this.
bool readsVirtualRegister(unsigned Reg) const
Return true if the MachineInstr reads the specified virtual register.
Definition: MachineInstr.h:940
MachineFrameInfo & getFrameInfo()
getFrameInfo - Return the frame info object for the current function.
MachineInstrBuilder & UseMI
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
unsigned getSubReg(unsigned Reg, unsigned Idx) const
Returns the physical register number of sub-register "Index" for physical register RegNo...
MCRegAliasIterator enumerates all registers aliasing Reg.
Represent the analysis usage information of a pass.
void resize(typename StorageT::size_type s)
Definition: IndexedMap.h:60
FunctionPass class - This class is used to implement most global optimizations.
Definition: Pass.h:285
std::pair< NoneType, bool > insert(const T &V)
insert - Insert an element into the set if it isn&#39;t already there.
Definition: SmallSet.h:81
void runOnMachineFunction(const MachineFunction &MF)
runOnFunction - Prepare to answer questions about MF.
bool isCopy() const
Definition: MachineInstr.h:860
int CreateSpillStackObject(uint64_t Size, unsigned Alignment)
Create a new statically sized stack object that represents a spill slot, returning a nonnegative iden...
TargetRegisterInfo base class - We assume that the target defines a static array of TargetRegisterDes...
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
void setIsKill(bool Val=true)
int getOperandConstraint(unsigned OpNum, MCOI::OperandConstraint Constraint) const
Returns the value of the specific constraint if it is set.
Definition: MCInstrDesc.h:187
unsigned findTiedOperandIdx(unsigned OpIdx) const
Given the index of a tied register operand, find the operand it is tied to.
bool isDebugValue() const
Definition: MachineInstr.h:819
MachineOperand class - Representation of each machine instruction operand.
static RegisterRegAlloc fastRegAlloc("fast", "fast register allocator", createFastRegisterAllocator)
void updateDbgValueForSpill(MachineInstr &Orig, int FrameIndex)
Update a DBG_VALUE whose value has been spilled to FrameIndex.
unsigned getNumDefs() const
Return the number of MachineOperands that are register definitions.
Definition: MCInstrDesc.h:225
void setPreservesCFG()
This function should be called by the pass, iff they do not:
Definition: Pass.cpp:285
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
Definition: Debug.cpp:132
void addRegisterDefined(unsigned Reg, const TargetRegisterInfo *RegInfo=nullptr)
We have determined MI defines a register.
const uint32_t * getRegMask() const
getRegMask - Returns a bit mask of registers preserved by this RegMask operand.
bool isValid() const
isValid - returns true if this iterator is not yet at the end.
virtual void storeRegToStackSlot(MachineBasicBlock &MBB, MachineBasicBlock::iterator MI, unsigned SrcReg, bool isKill, int FrameIndex, const TargetRegisterClass *RC, const TargetRegisterInfo *TRI) const
Store the specified register of the given register class to the specified stack frame index...
const MachineBasicBlock * getParent() const
Definition: MachineInstr.h:142
MachineRegisterInfo - Keep track of information for virtual and physical registers, including vreg register classes, use/def chains for registers, etc.
MachineFunctionProperties & set(Property P)
TargetSubtargetInfo - Generic base class for all target subtargets.
Representation of each machine instruction.
Definition: MachineInstr.h:60
static bool isPhysicalRegister(unsigned Reg)
Return true if the specified register number is in the physical register namespace.
SparseSet - Fast set implmentation for objects that can be identified by small unsigned keys...
Definition: SparseSet.h:124
MachineRegisterInfo & getRegInfo()
getRegInfo - Return information about the registers currently in use.
void setReg(unsigned Reg)
Change the register this operand corresponds to.
#define I(x, y, z)
Definition: MD5.cpp:58
Pair of physical register and lane mask.
void setSubReg(unsigned subReg)
void addPhysRegsUsedFromRegMask(const uint32_t *RegMask)
addPhysRegsUsedFromRegMask - Mark any registers not in RegMask as used.
INITIALIZE_PASS(RegAllocFast, "regallocfast", "Fast Register Allocator", false, false) int RegAllocFast
This allocates space for the specified virtual register to be held on the stack.
bool hasOneNonDBGUse(unsigned RegNo) const
hasOneNonDBGUse - Return true if there is exactly one non-Debug instruction using the specified regis...
iterator_range< livein_iterator > liveins() const
bool isReg() const
isReg - Tests if this is a MO_Register operand.
static reg_nodbg_iterator reg_nodbg_end()
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
FunctionPass * createFastRegisterAllocator()
FastRegisterAllocation Pass - This pass register allocates as fast as possible.
aarch64 promote const
use_instr_nodbg_iterator use_instr_nodbg_begin(unsigned RegNo) const
constexpr char Size[]
Key for Kernel::Arg::Metadata::mSize.
#define LLVM_FALLTHROUGH
LLVM_FALLTHROUGH - Mark fallthrough cases in switch statements.
Definition: Compiler.h:235
#define DEBUG(X)
Definition: Debug.h:118
bool addRegisterKilled(unsigned IncomingReg, const TargetRegisterInfo *RegInfo, bool AddIfNotFound=false)
We have determined MI kills a register.
IRTranslator LLVM IR MI
StringRef - Represent a constant reference to a string, i.e.
Definition: StringRef.h:49
const MachineOperand & getOperand(unsigned i) const
Definition: MachineInstr.h:298
reg_begin/reg_end - Provide iteration support to walk over all definitions and uses of a register wit...
virtual void loadRegFromStackSlot(MachineBasicBlock &MBB, MachineBasicBlock::iterator MI, unsigned DestReg, int FrameIndex, const TargetRegisterClass *RC, const TargetRegisterInfo *TRI) const
Load the specified register of the given register class from the specified stack frame index...
Properties which a MachineFunction may have at a given point in time.
bool isImplicit() const
size_type count(const T &V) const
count - Return 1 if the element is in the set, 0 otherwise.
Definition: SmallSet.h:65