LCOV - code coverage report
Current view: top level - lib/CodeGen - RegisterScavenging.cpp (source / functions) Hit Total Coverage
Test: llvm-toolchain.info Lines: 233 257 90.7 %
Date: 2018-10-20 13:21:21 Functions: 21 24 87.5 %
Legend: Lines: hit not hit

          Line data    Source code
       1             : //===- RegisterScavenging.cpp - Machine register scavenging ---------------===//
       2             : //
       3             : //                     The LLVM Compiler Infrastructure
       4             : //
       5             : // This file is distributed under the University of Illinois Open Source
       6             : // License. See LICENSE.TXT for details.
       7             : //
       8             : //===----------------------------------------------------------------------===//
       9             : //
      10             : /// \file
      11             : /// This file implements the machine register scavenger. It can provide
      12             : /// information, such as unused registers, at any point in a machine basic
      13             : /// block. It also provides a mechanism to make registers available by evicting
      14             : /// them to spill slots.
      15             : //
      16             : //===----------------------------------------------------------------------===//
      17             : 
      18             : #include "llvm/CodeGen/RegisterScavenging.h"
      19             : #include "llvm/ADT/ArrayRef.h"
      20             : #include "llvm/ADT/BitVector.h"
      21             : #include "llvm/ADT/SmallVector.h"
      22             : #include "llvm/ADT/Statistic.h"
      23             : #include "llvm/CodeGen/LiveRegUnits.h"
      24             : #include "llvm/CodeGen/MachineBasicBlock.h"
      25             : #include "llvm/CodeGen/MachineFrameInfo.h"
      26             : #include "llvm/CodeGen/MachineFunction.h"
      27             : #include "llvm/CodeGen/MachineFunctionPass.h"
      28             : #include "llvm/CodeGen/MachineInstr.h"
      29             : #include "llvm/CodeGen/MachineOperand.h"
      30             : #include "llvm/CodeGen/MachineRegisterInfo.h"
      31             : #include "llvm/CodeGen/TargetFrameLowering.h"
      32             : #include "llvm/CodeGen/TargetInstrInfo.h"
      33             : #include "llvm/CodeGen/TargetRegisterInfo.h"
      34             : #include "llvm/CodeGen/TargetSubtargetInfo.h"
      35             : #include "llvm/MC/MCRegisterInfo.h"
      36             : #include "llvm/Pass.h"
      37             : #include "llvm/Support/Debug.h"
      38             : #include "llvm/Support/ErrorHandling.h"
      39             : #include "llvm/Support/raw_ostream.h"
      40             : #include <algorithm>
      41             : #include <cassert>
      42             : #include <iterator>
      43             : #include <limits>
      44             : #include <string>
      45             : #include <utility>
      46             : 
      47             : using namespace llvm;
      48             : 
      49             : #define DEBUG_TYPE "reg-scavenging"
      50             : 
      51             : STATISTIC(NumScavengedRegs, "Number of frame index regs scavenged");
      52             : 
      53        3084 : void RegScavenger::setRegUsed(unsigned Reg, LaneBitmask LaneMask) {
      54        3084 :   LiveUnits.addRegMasked(Reg, LaneMask);
      55        3084 : }
      56             : 
      57        2089 : void RegScavenger::init(MachineBasicBlock &MBB) {
      58        2089 :   MachineFunction &MF = *MBB.getParent();
      59        2089 :   TII = MF.getSubtarget().getInstrInfo();
      60        2089 :   TRI = MF.getSubtarget().getRegisterInfo();
      61        2089 :   MRI = &MF.getRegInfo();
      62        2089 :   LiveUnits.init(*TRI);
      63             : 
      64             :   assert((NumRegUnits == 0 || NumRegUnits == TRI->getNumRegUnits()) &&
      65             :          "Target changed?");
      66             : 
      67             :   // Self-initialize.
      68        2089 :   if (!this->MBB) {
      69         919 :     NumRegUnits = TRI->getNumRegUnits();
      70         919 :     KillRegUnits.resize(NumRegUnits);
      71         919 :     DefRegUnits.resize(NumRegUnits);
      72         919 :     TmpRegUnits.resize(NumRegUnits);
      73             :   }
      74        2089 :   this->MBB = &MBB;
      75             : 
      76        3487 :   for (ScavengedInfo &SI : Scavenged) {
      77        1398 :     SI.Reg = 0;
      78        1398 :     SI.Restore = nullptr;
      79             :   }
      80             : 
      81        2089 :   Tracking = false;
      82        2089 : }
      83             : 
      84         545 : void RegScavenger::enterBasicBlock(MachineBasicBlock &MBB) {
      85         545 :   init(MBB);
      86         545 :   LiveUnits.addLiveIns(MBB);
      87         545 : }
      88             : 
      89        1544 : void RegScavenger::enterBasicBlockEnd(MachineBasicBlock &MBB) {
      90        1544 :   init(MBB);
      91        1544 :   LiveUnits.addLiveOuts(MBB);
      92             : 
      93             :   // Move internal iterator at the last instruction of the block.
      94        1544 :   if (MBB.begin() != MBB.end()) {
      95        1544 :     MBBI = std::prev(MBB.end());
      96        1544 :     Tracking = true;
      97             :   }
      98        1544 : }
      99             : 
     100        6361 : void RegScavenger::addRegUnits(BitVector &BV, unsigned Reg) {
     101       22023 :   for (MCRegUnitIterator RUI(Reg, TRI); RUI.isValid(); ++RUI)
     102             :     BV.set(*RUI);
     103        6361 : }
     104             : 
     105           0 : void RegScavenger::removeRegUnits(BitVector &BV, unsigned Reg) {
     106           0 :   for (MCRegUnitIterator RUI(Reg, TRI); RUI.isValid(); ++RUI)
     107             :     BV.reset(*RUI);
     108           0 : }
     109             : 
     110        5383 : void RegScavenger::determineKillsAndDefs() {
     111             :   assert(Tracking && "Must be tracking to determine kills and defs");
     112             : 
     113             :   MachineInstr &MI = *MBBI;
     114             :   assert(!MI.isDebugInstr() && "Debug values have no kills or defs");
     115             : 
     116             :   // Find out which registers are early clobbered, killed, defined, and marked
     117             :   // def-dead in this instruction.
     118             :   KillRegUnits.reset();
     119             :   DefRegUnits.reset();
     120       25419 :   for (const MachineOperand &MO : MI.operands()) {
     121       20036 :     if (MO.isRegMask()) {
     122             :       TmpRegUnits.clear();
     123        6290 :       for (unsigned RU = 0, RUEnd = TRI->getNumRegUnits(); RU != RUEnd; ++RU) {
     124       14490 :         for (MCRegUnitRootIterator RURI(RU, TRI); RURI.isValid(); ++RURI) {
     125        6243 :           if (MO.clobbersPhysReg(*RURI)) {
     126             :             TmpRegUnits.set(RU);
     127             :             break;
     128             :           }
     129             :         }
     130             :       }
     131             : 
     132             :       // Apply the mask.
     133          47 :       KillRegUnits |= TmpRegUnits;
     134             :     }
     135       20036 :     if (!MO.isReg())
     136             :       continue;
     137       13872 :     unsigned Reg = MO.getReg();
     138       13872 :     if (!TargetRegisterInfo::isPhysicalRegister(Reg) || isReserved(Reg))
     139             :       continue;
     140             : 
     141        8759 :     if (MO.isUse()) {
     142             :       // Ignore undef uses.
     143        4738 :       if (MO.isUndef())
     144             :         continue;
     145        4582 :       if (MO.isKill())
     146        2340 :         addRegUnits(KillRegUnits, Reg);
     147             :     } else {
     148             :       assert(MO.isDef());
     149        4021 :       if (MO.isDead())
     150         481 :         addRegUnits(KillRegUnits, Reg);
     151             :       else
     152        3540 :         addRegUnits(DefRegUnits, Reg);
     153             :     }
     154             :   }
     155        5383 : }
     156             : 
     157           0 : void RegScavenger::unprocess() {
     158             :   assert(Tracking && "Cannot unprocess because we're not tracking");
     159             : 
     160             :   MachineInstr &MI = *MBBI;
     161             :   if (!MI.isDebugInstr()) {
     162           0 :     determineKillsAndDefs();
     163             : 
     164             :     // Commit the changes.
     165           0 :     setUnused(DefRegUnits);
     166           0 :     setUsed(KillRegUnits);
     167             :   }
     168             : 
     169           0 :   if (MBBI == MBB->begin()) {
     170           0 :     MBBI = MachineBasicBlock::iterator(nullptr);
     171           0 :     Tracking = false;
     172             :   } else
     173             :     --MBBI;
     174           0 : }
     175             : 
     176        5383 : void RegScavenger::forward() {
     177             :   // Move ptr forward.
     178        5383 :   if (!Tracking) {
     179         431 :     MBBI = MBB->begin();
     180         431 :     Tracking = true;
     181             :   } else {
     182             :     assert(MBBI != MBB->end() && "Already past the end of the basic block!");
     183        4952 :     MBBI = std::next(MBBI);
     184             :   }
     185             :   assert(MBBI != MBB->end() && "Already at the end of the basic block!");
     186             : 
     187             :   MachineInstr &MI = *MBBI;
     188             : 
     189        1621 :   for (SmallVectorImpl<ScavengedInfo>::iterator I = Scavenged.begin(),
     190        7004 :          IE = Scavenged.end(); I != IE; ++I) {
     191        1621 :     if (I->Restore != &MI)
     192             :       continue;
     193             : 
     194           3 :     I->Reg = 0;
     195           3 :     I->Restore = nullptr;
     196             :   }
     197             : 
     198             :   if (MI.isDebugInstr())
     199             :     return;
     200             : 
     201        5383 :   determineKillsAndDefs();
     202             : 
     203             :   // Verify uses and defs.
     204             : #ifndef NDEBUG
     205             :   for (const MachineOperand &MO : MI.operands()) {
     206             :     if (!MO.isReg())
     207             :       continue;
     208             :     unsigned Reg = MO.getReg();
     209             :     if (!TargetRegisterInfo::isPhysicalRegister(Reg) || isReserved(Reg))
     210             :       continue;
     211             :     if (MO.isUse()) {
     212             :       if (MO.isUndef())
     213             :         continue;
     214             :       if (!isRegUsed(Reg)) {
     215             :         // Check if it's partial live: e.g.
     216             :         // D0 = insert_subreg undef D0, S0
     217             :         // ... D0
     218             :         // The problem is the insert_subreg could be eliminated. The use of
     219             :         // D0 is using a partially undef value. This is not *incorrect* since
     220             :         // S1 is can be freely clobbered.
     221             :         // Ideally we would like a way to model this, but leaving the
     222             :         // insert_subreg around causes both correctness and performance issues.
     223             :         bool SubUsed = false;
     224             :         for (MCSubRegIterator SubRegs(Reg, TRI); SubRegs.isValid(); ++SubRegs)
     225             :           if (isRegUsed(*SubRegs)) {
     226             :             SubUsed = true;
     227             :             break;
     228             :           }
     229             :         bool SuperUsed = false;
     230             :         for (MCSuperRegIterator SR(Reg, TRI); SR.isValid(); ++SR) {
     231             :           if (isRegUsed(*SR)) {
     232             :             SuperUsed = true;
     233             :             break;
     234             :           }
     235             :         }
     236             :         if (!SubUsed && !SuperUsed) {
     237             :           MBB->getParent()->verify(nullptr, "In Register Scavenger");
     238             :           llvm_unreachable("Using an undefined register!");
     239             :         }
     240             :         (void)SubUsed;
     241             :         (void)SuperUsed;
     242             :       }
     243             :     } else {
     244             :       assert(MO.isDef());
     245             : #if 0
     246             :       // FIXME: Enable this once we've figured out how to correctly transfer
     247             :       // implicit kills during codegen passes like the coalescer.
     248             :       assert((KillRegs.test(Reg) || isUnused(Reg) ||
     249             :               isLiveInButUnusedBefore(Reg, MI, MBB, TRI, MRI)) &&
     250             :              "Re-defining a live register!");
     251             : #endif
     252             :     }
     253             :   }
     254             : #endif // NDEBUG
     255             : 
     256             :   // Commit the changes.
     257        5383 :   setUnused(KillRegUnits);
     258        5383 :   setUsed(DefRegUnits);
     259             : }
     260             : 
     261       39527 : void RegScavenger::backward() {
     262             :   assert(Tracking && "Must be tracking to determine kills and defs");
     263             : 
     264             :   const MachineInstr &MI = *MBBI;
     265       39527 :   LiveUnits.stepBackward(MI);
     266             : 
     267             :   // Expire scavenge spill frameindex uses.
     268       65122 :   for (ScavengedInfo &I : Scavenged) {
     269       25595 :     if (I.Restore == &MI) {
     270          23 :       I.Reg = 0;
     271          23 :       I.Restore = nullptr;
     272             :     }
     273             :   }
     274             : 
     275       79054 :   if (MBBI == MBB->begin()) {
     276           0 :     MBBI = MachineBasicBlock::iterator(nullptr);
     277           0 :     Tracking = false;
     278             :   } else
     279             :     --MBBI;
     280       39527 : }
     281             : 
     282        3309 : bool RegScavenger::isRegUsed(unsigned Reg, bool includeReserved) const {
     283        6618 :   if (isReserved(Reg))
     284             :     return includeReserved;
     285        2650 :   return !LiveUnits.available(Reg);
     286             : }
     287             : 
     288           0 : unsigned RegScavenger::FindUnusedReg(const TargetRegisterClass *RC) const {
     289           0 :   for (unsigned Reg : *RC) {
     290           0 :     if (!isRegUsed(Reg)) {
     291             :       LLVM_DEBUG(dbgs() << "Scavenger found unused reg: " << printReg(Reg, TRI)
     292             :                         << "\n");
     293           0 :       return Reg;
     294             :     }
     295             :   }
     296             :   return 0;
     297             : }
     298             : 
     299          75 : BitVector RegScavenger::getRegsAvailable(const TargetRegisterClass *RC) {
     300          75 :   BitVector Mask(TRI->getNumRegs());
     301        2961 :   for (unsigned Reg : *RC)
     302        2886 :     if (!isRegUsed(Reg))
     303             :       Mask.set(Reg);
     304          75 :   return Mask;
     305             : }
     306             : 
     307          68 : unsigned RegScavenger::findSurvivorReg(MachineBasicBlock::iterator StartMI,
     308             :                                        BitVector &Candidates,
     309             :                                        unsigned InstrLimit,
     310             :                                        MachineBasicBlock::iterator &UseMI) {
     311             :   int Survivor = Candidates.find_first();
     312             :   assert(Survivor > 0 && "No candidates for scavenging");
     313             : 
     314          68 :   MachineBasicBlock::iterator ME = MBB->getFirstTerminator();
     315             :   assert(StartMI != ME && "MI already at terminator");
     316          68 :   MachineBasicBlock::iterator RestorePointMI = StartMI;
     317             :   MachineBasicBlock::iterator MI = StartMI;
     318             : 
     319             :   bool inVirtLiveRange = false;
     320         338 :   for (++MI; InstrLimit > 0 && MI != ME; ++MI, --InstrLimit) {
     321             :     if (MI->isDebugInstr()) {
     322           0 :       ++InstrLimit; // Don't count debug instructions
     323           0 :       continue;
     324             :     }
     325             :     bool isVirtKillInsn = false;
     326             :     bool isVirtDefInsn = false;
     327             :     // Remove any candidates touched by instruction.
     328        1201 :     for (const MachineOperand &MO : MI->operands()) {
     329         915 :       if (MO.isRegMask())
     330           0 :         Candidates.clearBitsNotInMask(MO.getRegMask());
     331         915 :       if (!MO.isReg() || MO.isUndef() || !MO.getReg())
     332             :         continue;
     333         586 :       if (TargetRegisterInfo::isVirtualRegister(MO.getReg())) {
     334         136 :         if (MO.isDef())
     335             :           isVirtDefInsn = true;
     336          68 :         else if (MO.isKill())
     337             :           isVirtKillInsn = true;
     338         136 :         continue;
     339             :       }
     340         900 :       for (MCRegAliasIterator AI(MO.getReg(), TRI, true); AI.isValid(); ++AI)
     341             :         Candidates.reset(*AI);
     342             :     }
     343             :     // If we're not in a virtual reg's live range, this is a valid
     344             :     // restore point.
     345         286 :     if (!inVirtLiveRange) RestorePointMI = MI;
     346             : 
     347             :     // Update whether we're in the live range of a virtual register
     348         286 :     if (isVirtKillInsn) inVirtLiveRange = false;
     349         286 :     if (isVirtDefInsn) inVirtLiveRange = true;
     350             : 
     351             :     // Was our survivor untouched by this instruction?
     352         572 :     if (Candidates.test(Survivor))
     353             :       continue;
     354             : 
     355             :     // All candidates gone?
     356          60 :     if (Candidates.none())
     357             :       break;
     358             : 
     359             :     Survivor = Candidates.find_first();
     360             :   }
     361             :   // If we ran off the end, that's where we want to restore.
     362          68 :   if (MI == ME) RestorePointMI = ME;
     363             :   assert(RestorePointMI != StartMI &&
     364             :          "No available scavenger restore location!");
     365             : 
     366             :   // We ran out of candidates, so stop the search.
     367          68 :   UseMI = RestorePointMI;
     368          68 :   return Survivor;
     369             : }
     370             : 
     371             : /// Given the bitvector \p Available of free register units at position
     372             : /// \p From. Search backwards to find a register that is part of \p
     373             : /// Candidates and not used/clobbered until the point \p To. If there is
     374             : /// multiple candidates continue searching and pick the one that is not used/
     375             : /// clobbered for the longest time.
     376             : /// Returns the register and the earliest position we know it to be free or
     377             : /// the position MBB.end() if no register is available.
     378             : static std::pair<MCPhysReg, MachineBasicBlock::iterator>
     379        3045 : findSurvivorBackwards(const MachineRegisterInfo &MRI,
     380             :     MachineBasicBlock::iterator From, MachineBasicBlock::iterator To,
     381             :     const LiveRegUnits &LiveOut, ArrayRef<MCPhysReg> AllocationOrder,
     382             :     bool RestoreAfter) {
     383             :   bool FoundTo = false;
     384             :   MCPhysReg Survivor = 0;
     385             :   MachineBasicBlock::iterator Pos;
     386        3045 :   MachineBasicBlock &MBB = *From->getParent();
     387             :   unsigned InstrLimit = 25;
     388             :   unsigned InstrCountDown = InstrLimit;
     389        3045 :   const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo();
     390             :   LiveRegUnits Used(TRI);
     391             : 
     392        3045 :   for (MachineBasicBlock::iterator I = From;; --I) {
     393             :     const MachineInstr &MI = *I;
     394             : 
     395        3834 :     Used.accumulate(MI);
     396             : 
     397        3834 :     if (I == To) {
     398             :       // See if one of the registers in RC wasn't used so far.
     399        6140 :       for (MCPhysReg Reg : AllocationOrder) {
     400       17503 :         if (!MRI.isReserved(Reg) && Used.available(Reg) &&
     401        5273 :             LiveOut.available(Reg))
     402             :           return std::make_pair(Reg, MBB.end());
     403             :       }
     404             :       // Otherwise we will continue up to InstrLimit instructions to find
     405             :       // the register which is not defined/used for the longest time.
     406             :       FoundTo = true;
     407             :       Pos = To;
     408             :       // Note: It was fine so far to start our search at From, however now that
     409             :       // we have to spill, and can only place the restore after From then
     410             :       // add the regs used/defed by std::next(From) to the set.
     411          25 :       if (RestoreAfter)
     412          25 :         Used.accumulate(*std::next(From));
     413             :     }
     414         789 :     if (FoundTo) {
     415         419 :       if (Survivor == 0 || !Used.available(Survivor)) {
     416             :         MCPhysReg AvilableReg = 0;
     417         303 :         for (MCPhysReg Reg : AllocationOrder) {
     418         588 :           if (!MRI.isReserved(Reg) && Used.available(Reg)) {
     419             :             AvilableReg = Reg;
     420             :             break;
     421             :           }
     422             :         }
     423          51 :         if (AvilableReg == 0)
     424             :           break;
     425             :         Survivor = AvilableReg;
     426             :       }
     427         410 :       if (--InstrCountDown == 0)
     428             :         break;
     429             : 
     430             :       // Keep searching when we find a vreg since the spilled register will
     431             :       // be usefull for this other vreg as well later.
     432             :       bool FoundVReg = false;
     433        1397 :       for (const MachineOperand &MO : MI.operands()) {
     434        1042 :         if (MO.isReg() && TargetRegisterInfo::isVirtualRegister(MO.getReg())) {
     435             :           FoundVReg = true;
     436             :           break;
     437             :         }
     438             :       }
     439         404 :       if (FoundVReg) {
     440             :         InstrCountDown = InstrLimit;
     441          49 :         Pos = I;
     442             :       }
     443         404 :       if (I == MBB.begin())
     444             :         break;
     445             :     }
     446             :   }
     447             : 
     448             :   return std::make_pair(Survivor, Pos);
     449             : }
     450             : 
     451             : static unsigned getFrameIndexOperandNum(MachineInstr &MI) {
     452             :   unsigned i = 0;
     453         216 :   while (!MI.getOperand(i).isFI()) {
     454          56 :     ++i;
     455             :     assert(i < MI.getNumOperands() && "Instr doesn't have FrameIndex operand!");
     456             :   }
     457             :   return i;
     458             : }
     459             : 
     460             : RegScavenger::ScavengedInfo &
     461          29 : RegScavenger::spill(unsigned Reg, const TargetRegisterClass &RC, int SPAdj,
     462             :                     MachineBasicBlock::iterator Before,
     463             :                     MachineBasicBlock::iterator &UseMI) {
     464             :   // Find an available scavenging slot with size and alignment matching
     465             :   // the requirements of the class RC.
     466             :   const MachineFunction &MF = *Before->getMF();
     467          29 :   const MachineFrameInfo &MFI = MF.getFrameInfo();
     468          29 :   unsigned NeedSize = TRI->getSpillSize(RC);
     469             :   unsigned NeedAlign = TRI->getSpillAlignment(RC);
     470             : 
     471          29 :   unsigned SI = Scavenged.size(), Diff = std::numeric_limits<unsigned>::max();
     472          29 :   int FIB = MFI.getObjectIndexBegin(), FIE = MFI.getObjectIndexEnd();
     473          72 :   for (unsigned I = 0; I < Scavenged.size(); ++I) {
     474          43 :     if (Scavenged[I].Reg != 0)
     475             :       continue;
     476             :     // Verify that this slot is valid for this register.
     477          42 :     int FI = Scavenged[I].FrameIndex;
     478          42 :     if (FI < FIB || FI >= FIE)
     479             :       continue;
     480          41 :     unsigned S = MFI.getObjectSize(FI);
     481             :     unsigned A = MFI.getObjectAlignment(FI);
     482          41 :     if (NeedSize > S || NeedAlign > A)
     483             :       continue;
     484             :     // Avoid wasting slots with large size and/or large alignment. Pick one
     485             :     // that is the best fit for this register class (in street metric).
     486             :     // Picking a larger slot than necessary could happen if a slot for a
     487             :     // larger register is reserved before a slot for a smaller one. When
     488             :     // trying to spill a smaller register, the large slot would be found
     489             :     // first, thus making it impossible to spill the larger register later.
     490          41 :     unsigned D = (S-NeedSize) + (A-NeedAlign);
     491          41 :     if (D < Diff) {
     492             :       SI = I;
     493             :       Diff = D;
     494             :     }
     495             :   }
     496             : 
     497          29 :   if (SI == Scavenged.size()) {
     498             :     // We need to scavenge a register but have no spill slot, the target
     499             :     // must know how to do it (if not, we'll assert below).
     500           6 :     Scavenged.push_back(ScavengedInfo(FIE));
     501             :   }
     502             : 
     503             :   // Avoid infinite regress
     504          29 :   Scavenged[SI].Reg = Reg;
     505             : 
     506             :   // If the target knows how to save/restore the register, let it do so;
     507             :   // otherwise, use the emergency stack spill slot.
     508          29 :   if (!TRI->saveScavengerRegister(*MBB, Before, UseMI, &RC, Reg)) {
     509             :     // Spill the scavenged register before \p Before.
     510          27 :     int FI = Scavenged[SI].FrameIndex;
     511          27 :     if (FI < FIB || FI >= FIE) {
     512           2 :       std::string Msg = std::string("Error while trying to spill ") +
     513           3 :           TRI->getName(Reg) + " from class " + TRI->getRegClassName(&RC) +
     514           1 :           ": Cannot scavenge register without an emergency spill slot!";
     515           1 :       report_fatal_error(Msg.c_str());
     516             :     }
     517          52 :     TII->storeRegToStackSlot(*MBB, Before, Reg, true, Scavenged[SI].FrameIndex,
     518          26 :                              &RC, TRI);
     519          26 :     MachineBasicBlock::iterator II = std::prev(Before);
     520             : 
     521             :     unsigned FIOperandNum = getFrameIndexOperandNum(*II);
     522          26 :     TRI->eliminateFrameIndex(II, SPAdj, FIOperandNum, this);
     523             : 
     524             :     // Restore the scavenged register before its use (or first terminator).
     525          26 :     TII->loadRegFromStackSlot(*MBB, UseMI, Reg, Scavenged[SI].FrameIndex,
     526          26 :                               &RC, TRI);
     527          26 :     II = std::prev(UseMI);
     528             : 
     529             :     FIOperandNum = getFrameIndexOperandNum(*II);
     530          26 :     TRI->eliminateFrameIndex(II, SPAdj, FIOperandNum, this);
     531             :   }
     532          28 :   return Scavenged[SI];
     533             : }
     534             : 
     535          68 : unsigned RegScavenger::scavengeRegister(const TargetRegisterClass *RC,
     536             :                                         MachineBasicBlock::iterator I,
     537             :                                         int SPAdj) {
     538             :   MachineInstr &MI = *I;
     539             :   const MachineFunction &MF = *MI.getMF();
     540             :   // Consider all allocatable registers in the register class initially
     541          68 :   BitVector Candidates = TRI->getAllocatableSet(MF, RC);
     542             : 
     543             :   // Exclude all the registers being used by the instruction.
     544         204 :   for (const MachineOperand &MO : MI.operands()) {
     545         136 :     if (MO.isReg() && MO.getReg() != 0 && !(MO.isUse() && MO.isUndef()) &&
     546             :         !TargetRegisterInfo::isVirtualRegister(MO.getReg()))
     547          68 :       for (MCRegAliasIterator AI(MO.getReg(), TRI, true); AI.isValid(); ++AI)
     548             :         Candidates.reset(*AI);
     549             :   }
     550             : 
     551             :   // Try to find a register that's unused if there is one, as then we won't
     552             :   // have to spill.
     553          68 :   BitVector Available = getRegsAvailable(RC);
     554          68 :   Available &= Candidates;
     555          68 :   if (Available.any())
     556          64 :     Candidates = Available;
     557             : 
     558             :   // Find the register whose use is furthest away.
     559             :   MachineBasicBlock::iterator UseMI;
     560          68 :   unsigned SReg = findSurvivorReg(I, Candidates, 25, UseMI);
     561             : 
     562             :   // If we found an unused register there is no reason to spill it.
     563          68 :   if (!isRegUsed(SReg)) {
     564             :     LLVM_DEBUG(dbgs() << "Scavenged register: " << printReg(SReg, TRI) << "\n");
     565             :     return SReg;
     566             :   }
     567             : 
     568           4 :   ScavengedInfo &Scavenged = spill(SReg, *RC, SPAdj, I, UseMI);
     569           3 :   Scavenged.Restore = &*std::prev(UseMI);
     570             : 
     571             :   LLVM_DEBUG(dbgs() << "Scavenged register (with spill): "
     572             :                     << printReg(SReg, TRI) << "\n");
     573             : 
     574           3 :   return SReg;
     575             : }
     576             : 
     577        3045 : unsigned RegScavenger::scavengeRegisterBackwards(const TargetRegisterClass &RC,
     578             :                                                  MachineBasicBlock::iterator To,
     579             :                                                  bool RestoreAfter, int SPAdj) {
     580        3045 :   const MachineBasicBlock &MBB = *To->getParent();
     581        3045 :   const MachineFunction &MF = *MBB.getParent();
     582             : 
     583             :   // Find the register whose use is furthest away.
     584             :   MachineBasicBlock::iterator UseMI;
     585        3045 :   ArrayRef<MCPhysReg> AllocationOrder = RC.getRawAllocationOrder(MF);
     586             :   std::pair<MCPhysReg, MachineBasicBlock::iterator> P =
     587        3045 :       findSurvivorBackwards(*MRI, MBBI, To, LiveUnits, AllocationOrder,
     588        3045 :                             RestoreAfter);
     589        3045 :   MCPhysReg Reg = P.first;
     590        3045 :   MachineBasicBlock::iterator SpillBefore = P.second;
     591             :   assert(Reg != 0 && "No register left to scavenge!");
     592             :   // Found an available register?
     593        3045 :   if (SpillBefore != MBB.end()) {
     594             :     MachineBasicBlock::iterator ReloadAfter =
     595          50 :       RestoreAfter ? std::next(MBBI) : MBBI;
     596          25 :     MachineBasicBlock::iterator ReloadBefore = std::next(ReloadAfter);
     597             :     if (ReloadBefore != MBB.end())
     598             :       LLVM_DEBUG(dbgs() << "Reload before: " << *ReloadBefore << '\n');
     599          25 :     ScavengedInfo &Scavenged = spill(Reg, RC, SPAdj, SpillBefore, ReloadBefore);
     600          25 :     Scavenged.Restore = &*std::prev(SpillBefore);
     601          25 :     LiveUnits.removeReg(Reg);
     602             :     LLVM_DEBUG(dbgs() << "Scavenged register with spill: " << printReg(Reg, TRI)
     603             :                       << " until " << *SpillBefore);
     604             :   } else {
     605             :     LLVM_DEBUG(dbgs() << "Scavenged free register: " << printReg(Reg, TRI)
     606             :                       << '\n');
     607             :   }
     608        3045 :   return Reg;
     609             : }
     610             : 
     611             : /// Allocate a register for the virtual register \p VReg. The last use of
     612             : /// \p VReg is around the current position of the register scavenger \p RS.
     613             : /// \p ReserveAfter controls whether the scavenged register needs to be reserved
     614             : /// after the current instruction, otherwise it will only be reserved before the
     615             : /// current instruction.
     616        3045 : static unsigned scavengeVReg(MachineRegisterInfo &MRI, RegScavenger &RS,
     617             :                              unsigned VReg, bool ReserveAfter) {
     618        3045 :   const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo();
     619             : #ifndef NDEBUG
     620             :   // Verify that all definitions and uses are in the same basic block.
     621             :   const MachineBasicBlock *CommonMBB = nullptr;
     622             :   // Real definition for the reg, re-definitions are not considered.
     623             :   const MachineInstr *RealDef = nullptr;
     624             :   for (MachineOperand &MO : MRI.reg_nodbg_operands(VReg)) {
     625             :     MachineBasicBlock *MBB = MO.getParent()->getParent();
     626             :     if (CommonMBB == nullptr)
     627             :       CommonMBB = MBB;
     628             :     assert(MBB == CommonMBB && "All defs+uses must be in the same basic block");
     629             :     if (MO.isDef()) {
     630             :       const MachineInstr &MI = *MO.getParent();
     631             :       if (!MI.readsRegister(VReg, &TRI)) {
     632             :         assert((!RealDef || RealDef == &MI) &&
     633             :                "Can have at most one definition which is not a redefinition");
     634             :         RealDef = &MI;
     635             :       }
     636             :     }
     637             :   }
     638             :   assert(RealDef != nullptr && "Must have at least 1 Def");
     639             : #endif
     640             : 
     641             :   // We should only have one definition of the register. However to accommodate
     642             :   // the requirements of two address code we also allow definitions in
     643             :   // subsequent instructions provided they also read the register. That way
     644             :   // we get a single contiguous lifetime.
     645             :   //
     646             :   // Definitions in MRI.def_begin() are unordered, search for the first.
     647             :   MachineRegisterInfo::def_iterator FirstDef =
     648             :     std::find_if(MRI.def_begin(VReg), MRI.def_end(),
     649             :                  [VReg, &TRI](const MachineOperand &MO) {
     650           0 :       return !MO.getParent()->readsRegister(VReg, &TRI);
     651        3045 :     });
     652             :   assert(FirstDef != MRI.def_end() &&
     653             :          "Must have one definition that does not redefine vreg");
     654        3045 :   MachineInstr &DefMI = *FirstDef->getParent();
     655             : 
     656             :   // The register scavenger will report a free register inserting an emergency
     657             :   // spill/reload if necessary.
     658             :   int SPAdj = 0;
     659             :   const TargetRegisterClass &RC = *MRI.getRegClass(VReg);
     660        6090 :   unsigned SReg = RS.scavengeRegisterBackwards(RC, DefMI.getIterator(),
     661             :                                                ReserveAfter, SPAdj);
     662        3045 :   MRI.replaceRegWith(VReg, SReg);
     663             :   ++NumScavengedRegs;
     664        3045 :   return SReg;
     665             : }
     666             : 
     667             : /// Allocate (scavenge) vregs inside a single basic block.
     668             : /// Returns true if the target spill callback created new vregs and a 2nd pass
     669             : /// is necessary.
     670        1510 : static bool scavengeFrameVirtualRegsInBlock(MachineRegisterInfo &MRI,
     671             :                                             RegScavenger &RS,
     672             :                                             MachineBasicBlock &MBB) {
     673        1510 :   const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo();
     674        1510 :   RS.enterBasicBlockEnd(MBB);
     675             : 
     676             :   unsigned InitialNumVirtRegs = MRI.getNumVirtRegs();
     677             :   bool NextInstructionReadsVReg = false;
     678       42547 :   for (MachineBasicBlock::iterator I = MBB.end(); I != MBB.begin(); ) {
     679             :     --I;
     680             :     // Move RegScavenger to the position between *I and *std::next(I).
     681             :     RS.backward(I);
     682             : 
     683             :     // Look for unassigned vregs in the uses of *std::next(I).
     684       41037 :     if (NextInstructionReadsVReg) {
     685        2999 :       MachineBasicBlock::iterator N = std::next(I);
     686             :       const MachineInstr &NMI = *N;
     687       16674 :       for (const MachineOperand &MO : NMI.operands()) {
     688       13675 :         if (!MO.isReg())
     689             :           continue;
     690        9588 :         unsigned Reg = MO.getReg();
     691             :         // We only care about virtual registers and ignore virtual registers
     692             :         // created by the target callbacks in the process (those will be handled
     693             :         // in a scavenging round).
     694        9588 :         if (!TargetRegisterInfo::isVirtualRegister(Reg) ||
     695             :             TargetRegisterInfo::virtReg2Index(Reg) >= InitialNumVirtRegs)
     696             :           continue;
     697             :         if (!MO.readsReg())
     698             :           continue;
     699             : 
     700        3017 :         unsigned SReg = scavengeVReg(MRI, RS, Reg, true);
     701        3017 :         N->addRegisterKilled(SReg, &TRI, false);
     702        3017 :         RS.setRegUsed(SReg);
     703             :       }
     704             :     }
     705             : 
     706             :     // Look for unassigned vregs in the defs of *I.
     707             :     NextInstructionReadsVReg = false;
     708             :     const MachineInstr &MI = *I;
     709      197940 :     for (const MachineOperand &MO : MI.operands()) {
     710      156903 :       if (!MO.isReg())
     711             :         continue;
     712      102334 :       unsigned Reg = MO.getReg();
     713             :       // Only vregs, no newly created vregs (see above).
     714      102334 :       if (!TargetRegisterInfo::isVirtualRegister(Reg) ||
     715             :           TargetRegisterInfo::virtReg2Index(Reg) >= InitialNumVirtRegs)
     716             :         continue;
     717             :       // We have to look at all operands anyway so we can precalculate here
     718             :       // whether there is a reading operand. This allows use to skip the use
     719             :       // step in the next iteration if there was none.
     720             :       assert(!MO.isInternalRead() && "Cannot assign inside bundles");
     721             :       assert((!MO.isUndef() || MO.isDef()) && "Cannot handle undef uses");
     722             :       if (MO.readsReg()) {
     723             :         NextInstructionReadsVReg = true;
     724             :       }
     725        3045 :       if (MO.isDef()) {
     726          28 :         unsigned SReg = scavengeVReg(MRI, RS, Reg, false);
     727          28 :         I->addRegisterDead(SReg, &TRI, false);
     728             :       }
     729             :     }
     730             :   }
     731             : #ifndef NDEBUG
     732             :   for (const MachineOperand &MO : MBB.front().operands()) {
     733             :     if (!MO.isReg() || !TargetRegisterInfo::isVirtualRegister(MO.getReg()))
     734             :       continue;
     735             :     assert(!MO.isInternalRead() && "Cannot assign inside bundles");
     736             :     assert((!MO.isUndef() || MO.isDef()) && "Cannot handle undef uses");
     737             :     assert(!MO.readsReg() && "Vreg use in first instruction not allowed");
     738             :   }
     739             : #endif
     740             : 
     741        1510 :   return MRI.getNumVirtRegs() != InitialNumVirtRegs;
     742             : }
     743             : 
     744       65574 : void llvm::scavengeFrameVirtualRegs(MachineFunction &MF, RegScavenger &RS) {
     745             :   // FIXME: Iterating over the instruction stream is unnecessary. We can simply
     746             :   // iterate over the vreg use list, which at this point only contains machine
     747             :   // operands for which eliminateFrameIndex need a new scratch reg.
     748       65574 :   MachineRegisterInfo &MRI = MF.getRegInfo();
     749             :   // Shortcut.
     750       65574 :   if (MRI.getNumVirtRegs() == 0) {
     751             :     MF.getProperties().set(MachineFunctionProperties::Property::NoVRegs);
     752       65052 :     return;
     753             :   }
     754             : 
     755             :   // Run through the instructions and find any virtual registers.
     756        2100 :   for (MachineBasicBlock &MBB : MF) {
     757        1578 :     if (MBB.empty())
     758             :       continue;
     759             : 
     760        1510 :     bool Again = scavengeFrameVirtualRegsInBlock(MRI, RS, MBB);
     761        1510 :     if (Again) {
     762             :       LLVM_DEBUG(dbgs() << "Warning: Required two scavenging passes for block "
     763             :                         << MBB.getName() << '\n');
     764           0 :       Again = scavengeFrameVirtualRegsInBlock(MRI, RS, MBB);
     765             :       // The target required a 2nd run (because it created new vregs while
     766             :       // spilling). Refuse to do another pass to keep compiletime in check.
     767           0 :       if (Again)
     768           0 :         report_fatal_error("Incomplete scavenging after 2nd pass");
     769             :     }
     770             :   }
     771             : 
     772         522 :   MRI.clearVirtRegs();
     773             :   MF.getProperties().set(MachineFunctionProperties::Property::NoVRegs);
     774             : }
     775             : 
     776             : namespace {
     777             : 
     778             : /// This class runs register scavenging independ of the PrologEpilogInserter.
     779             : /// This is used in for testing.
     780             : class ScavengerTest : public MachineFunctionPass {
     781             : public:
     782             :   static char ID;
     783             : 
     784           3 :   ScavengerTest() : MachineFunctionPass(ID) {}
     785             : 
     786           7 :   bool runOnMachineFunction(MachineFunction &MF) override {
     787           7 :     const TargetSubtargetInfo &STI = MF.getSubtarget();
     788           7 :     const TargetFrameLowering &TFL = *STI.getFrameLowering();
     789             : 
     790           7 :     RegScavenger RS;
     791             :     // Let's hope that calling those outside of PrologEpilogueInserter works
     792             :     // well enough to initialize the scavenger with some emergency spillslots
     793             :     // for the target.
     794             :     BitVector SavedRegs;
     795           7 :     TFL.determineCalleeSaves(MF, SavedRegs, &RS);
     796           7 :     TFL.processFunctionBeforeFrameFinalized(MF, &RS);
     797             : 
     798             :     // Let's scavenge the current function
     799           7 :     scavengeFrameVirtualRegs(MF, RS);
     800           7 :     return true;
     801             :   }
     802             : };
     803             : 
     804             : } // end anonymous namespace
     805             : 
     806             : char ScavengerTest::ID;
     807             : 
     808       42604 : INITIALIZE_PASS(ScavengerTest, "scavenger-test",
     809             :                 "Scavenge virtual registers inside basic blocks", false, false)

Generated by: LCOV version 1.13