Bug Summary

File:lib/CodeGen/MachineVerifier.cpp
Warning:line 1923, column 32
Access to field 'id' results in a dereference of a null pointer (loaded from variable 'PVNI')

Annotated Source Code

1//===-- MachineVerifier.cpp - Machine Code Verifier -----------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// Pass to verify generated machine code. The following is checked:
11//
12// Operand counts: All explicit operands must be present.
13//
14// Register classes: All physical and virtual register operands must be
15// compatible with the register class required by the instruction descriptor.
16//
17// Register live intervals: Registers must be defined only once, and must be
18// defined before use.
19//
20// The machine code verifier is enabled from LLVMTargetMachine.cpp with the
21// command-line option -verify-machineinstrs, or by defining the environment
22// variable LLVM_VERIFY_MACHINEINSTRS to the name of a file that will receive
23// the verifier errors.
24//===----------------------------------------------------------------------===//
25
26#include "llvm/CodeGen/Passes.h"
27#include "llvm/ADT/DenseSet.h"
28#include "llvm/ADT/DepthFirstIterator.h"
29#include "llvm/ADT/SetOperations.h"
30#include "llvm/ADT/SmallVector.h"
31#include "llvm/Analysis/EHPersonalities.h"
32#include "llvm/CodeGen/LiveIntervalAnalysis.h"
33#include "llvm/CodeGen/LiveStackAnalysis.h"
34#include "llvm/CodeGen/LiveVariables.h"
35#include "llvm/CodeGen/MachineFrameInfo.h"
36#include "llvm/CodeGen/MachineFunctionPass.h"
37#include "llvm/CodeGen/MachineMemOperand.h"
38#include "llvm/CodeGen/MachineRegisterInfo.h"
39#include "llvm/IR/BasicBlock.h"
40#include "llvm/IR/InlineAsm.h"
41#include "llvm/IR/Instructions.h"
42#include "llvm/MC/MCAsmInfo.h"
43#include "llvm/Support/Debug.h"
44#include "llvm/Support/ErrorHandling.h"
45#include "llvm/Support/FileSystem.h"
46#include "llvm/Support/raw_ostream.h"
47#include "llvm/Target/TargetInstrInfo.h"
48#include "llvm/Target/TargetMachine.h"
49#include "llvm/Target/TargetRegisterInfo.h"
50#include "llvm/Target/TargetSubtargetInfo.h"
51using namespace llvm;
52
53namespace {
54 struct MachineVerifier {
55
56 MachineVerifier(Pass *pass, const char *b) :
57 PASS(pass),
58 Banner(b)
59 {}
60
61 unsigned verify(MachineFunction &MF);
62
63 Pass *const PASS;
64 const char *Banner;
65 const MachineFunction *MF;
66 const TargetMachine *TM;
67 const TargetInstrInfo *TII;
68 const TargetRegisterInfo *TRI;
69 const MachineRegisterInfo *MRI;
70
71 unsigned foundErrors;
72
73 // Avoid querying the MachineFunctionProperties for each operand.
74 bool isFunctionRegBankSelected;
75 bool isFunctionSelected;
76
77 typedef SmallVector<unsigned, 16> RegVector;
78 typedef SmallVector<const uint32_t*, 4> RegMaskVector;
79 typedef DenseSet<unsigned> RegSet;
80 typedef DenseMap<unsigned, const MachineInstr*> RegMap;
81 typedef SmallPtrSet<const MachineBasicBlock*, 8> BlockSet;
82
83 const MachineInstr *FirstTerminator;
84 BlockSet FunctionBlocks;
85
86 BitVector regsReserved;
87 RegSet regsLive;
88 RegVector regsDefined, regsDead, regsKilled;
89 RegMaskVector regMasks;
90 RegSet regsLiveInButUnused;
91
92 SlotIndex lastIndex;
93
94 // Add Reg and any sub-registers to RV
95 void addRegWithSubRegs(RegVector &RV, unsigned Reg) {
96 RV.push_back(Reg);
97 if (TargetRegisterInfo::isPhysicalRegister(Reg))
98 for (MCSubRegIterator SubRegs(Reg, TRI); SubRegs.isValid(); ++SubRegs)
99 RV.push_back(*SubRegs);
100 }
101
102 struct BBInfo {
103 // Is this MBB reachable from the MF entry point?
104 bool reachable;
105
106 // Vregs that must be live in because they are used without being
107 // defined. Map value is the user.
108 RegMap vregsLiveIn;
109
110 // Regs killed in MBB. They may be defined again, and will then be in both
111 // regsKilled and regsLiveOut.
112 RegSet regsKilled;
113
114 // Regs defined in MBB and live out. Note that vregs passing through may
115 // be live out without being mentioned here.
116 RegSet regsLiveOut;
117
118 // Vregs that pass through MBB untouched. This set is disjoint from
119 // regsKilled and regsLiveOut.
120 RegSet vregsPassed;
121
122 // Vregs that must pass through MBB because they are needed by a successor
123 // block. This set is disjoint from regsLiveOut.
124 RegSet vregsRequired;
125
126 // Set versions of block's predecessor and successor lists.
127 BlockSet Preds, Succs;
128
129 BBInfo() : reachable(false) {}
130
131 // Add register to vregsPassed if it belongs there. Return true if
132 // anything changed.
133 bool addPassed(unsigned Reg) {
134 if (!TargetRegisterInfo::isVirtualRegister(Reg))
135 return false;
136 if (regsKilled.count(Reg) || regsLiveOut.count(Reg))
137 return false;
138 return vregsPassed.insert(Reg).second;
139 }
140
141 // Same for a full set.
142 bool addPassed(const RegSet &RS) {
143 bool changed = false;
144 for (RegSet::const_iterator I = RS.begin(), E = RS.end(); I != E; ++I)
145 if (addPassed(*I))
146 changed = true;
147 return changed;
148 }
149
150 // Add register to vregsRequired if it belongs there. Return true if
151 // anything changed.
152 bool addRequired(unsigned Reg) {
153 if (!TargetRegisterInfo::isVirtualRegister(Reg))
154 return false;
155 if (regsLiveOut.count(Reg))
156 return false;
157 return vregsRequired.insert(Reg).second;
158 }
159
160 // Same for a full set.
161 bool addRequired(const RegSet &RS) {
162 bool changed = false;
163 for (RegSet::const_iterator I = RS.begin(), E = RS.end(); I != E; ++I)
164 if (addRequired(*I))
165 changed = true;
166 return changed;
167 }
168
169 // Same for a full map.
170 bool addRequired(const RegMap &RM) {
171 bool changed = false;
172 for (RegMap::const_iterator I = RM.begin(), E = RM.end(); I != E; ++I)
173 if (addRequired(I->first))
174 changed = true;
175 return changed;
176 }
177
178 // Live-out registers are either in regsLiveOut or vregsPassed.
179 bool isLiveOut(unsigned Reg) const {
180 return regsLiveOut.count(Reg) || vregsPassed.count(Reg);
181 }
182 };
183
184 // Extra register info per MBB.
185 DenseMap<const MachineBasicBlock*, BBInfo> MBBInfoMap;
186
187 bool isReserved(unsigned Reg) {
188 return Reg < regsReserved.size() && regsReserved.test(Reg);
189 }
190
191 bool isAllocatable(unsigned Reg) {
192 return Reg < TRI->getNumRegs() && MRI->isAllocatable(Reg);
193 }
194
195 // Analysis information if available
196 LiveVariables *LiveVars;
197 LiveIntervals *LiveInts;
198 LiveStacks *LiveStks;
199 SlotIndexes *Indexes;
200
201 void visitMachineFunctionBefore();
202 void visitMachineBasicBlockBefore(const MachineBasicBlock *MBB);
203 void visitMachineBundleBefore(const MachineInstr *MI);
204 void visitMachineInstrBefore(const MachineInstr *MI);
205 void visitMachineOperand(const MachineOperand *MO, unsigned MONum);
206 void visitMachineInstrAfter(const MachineInstr *MI);
207 void visitMachineBundleAfter(const MachineInstr *MI);
208 void visitMachineBasicBlockAfter(const MachineBasicBlock *MBB);
209 void visitMachineFunctionAfter();
210
211 void report(const char *msg, const MachineFunction *MF);
212 void report(const char *msg, const MachineBasicBlock *MBB);
213 void report(const char *msg, const MachineInstr *MI);
214 void report(const char *msg, const MachineOperand *MO, unsigned MONum);
215
216 void report_context(const LiveInterval &LI) const;
217 void report_context(const LiveRange &LR, unsigned VRegUnit,
218 LaneBitmask LaneMask) const;
219 void report_context(const LiveRange::Segment &S) const;
220 void report_context(const VNInfo &VNI) const;
221 void report_context(SlotIndex Pos) const;
222 void report_context_liverange(const LiveRange &LR) const;
223 void report_context_lanemask(LaneBitmask LaneMask) const;
224 void report_context_vreg(unsigned VReg) const;
225 void report_context_vreg_regunit(unsigned VRegOrRegUnit) const;
226
227 void verifyInlineAsm(const MachineInstr *MI);
228
229 void checkLiveness(const MachineOperand *MO, unsigned MONum);
230 void checkLivenessAtUse(const MachineOperand *MO, unsigned MONum,
231 SlotIndex UseIdx, const LiveRange &LR, unsigned Reg,
232 LaneBitmask LaneMask = 0);
233 void checkLivenessAtDef(const MachineOperand *MO, unsigned MONum,
234 SlotIndex DefIdx, const LiveRange &LR, unsigned Reg,
235 LaneBitmask LaneMask = 0);
236
237 void markReachable(const MachineBasicBlock *MBB);
238 void calcRegsPassed();
239 void checkPHIOps(const MachineBasicBlock *MBB);
240
241 void calcRegsRequired();
242 void verifyLiveVariables();
243 void verifyLiveIntervals();
244 void verifyLiveInterval(const LiveInterval&);
245 void verifyLiveRangeValue(const LiveRange&, const VNInfo*, unsigned,
246 unsigned);
247 void verifyLiveRangeSegment(const LiveRange&,
248 const LiveRange::const_iterator I, unsigned,
249 unsigned);
250 void verifyLiveRange(const LiveRange&, unsigned, LaneBitmask LaneMask = 0);
251
252 void verifyStackFrame();
253
254 void verifySlotIndexes() const;
255 void verifyProperties(const MachineFunction &MF);
256 };
257
258 struct MachineVerifierPass : public MachineFunctionPass {
259 static char ID; // Pass ID, replacement for typeid
260 const std::string Banner;
261
262 MachineVerifierPass(const std::string &banner = nullptr)
263 : MachineFunctionPass(ID), Banner(banner) {
264 initializeMachineVerifierPassPass(*PassRegistry::getPassRegistry());
265 }
266
267 void getAnalysisUsage(AnalysisUsage &AU) const override {
268 AU.setPreservesAll();
269 MachineFunctionPass::getAnalysisUsage(AU);
270 }
271
272 bool runOnMachineFunction(MachineFunction &MF) override {
273 unsigned FoundErrors = MachineVerifier(this, Banner.c_str()).verify(MF);
274 if (FoundErrors)
275 report_fatal_error("Found "+Twine(FoundErrors)+" machine code errors.");
276 return false;
277 }
278 };
279
280}
281
282char MachineVerifierPass::ID = 0;
283INITIALIZE_PASS(MachineVerifierPass, "machineverifier",static void *initializeMachineVerifierPassPassOnce(PassRegistry
&Registry) { PassInfo *PI = new PassInfo( "Verify generated machine code"
, "machineverifier", &MachineVerifierPass::ID, PassInfo::
NormalCtor_t(callDefaultCtor<MachineVerifierPass>), false
, false); Registry.registerPass(*PI, true); return PI; } static
once_flag InitializeMachineVerifierPassPassFlag; void llvm::
initializeMachineVerifierPassPass(PassRegistry &Registry)
{ llvm::call_once(InitializeMachineVerifierPassPassFlag, initializeMachineVerifierPassPassOnce
, std::ref(Registry)); }
284 "Verify generated machine code", false, false)static void *initializeMachineVerifierPassPassOnce(PassRegistry
&Registry) { PassInfo *PI = new PassInfo( "Verify generated machine code"
, "machineverifier", &MachineVerifierPass::ID, PassInfo::
NormalCtor_t(callDefaultCtor<MachineVerifierPass>), false
, false); Registry.registerPass(*PI, true); return PI; } static
once_flag InitializeMachineVerifierPassPassFlag; void llvm::
initializeMachineVerifierPassPass(PassRegistry &Registry)
{ llvm::call_once(InitializeMachineVerifierPassPassFlag, initializeMachineVerifierPassPassOnce
, std::ref(Registry)); }
285
286FunctionPass *llvm::createMachineVerifierPass(const std::string &Banner) {
287 return new MachineVerifierPass(Banner);
288}
289
290bool MachineFunction::verify(Pass *p, const char *Banner, bool AbortOnErrors)
291 const {
292 MachineFunction &MF = const_cast<MachineFunction&>(*this);
293 unsigned FoundErrors = MachineVerifier(p, Banner).verify(MF);
294 if (AbortOnErrors && FoundErrors)
295 report_fatal_error("Found "+Twine(FoundErrors)+" machine code errors.");
296 return FoundErrors == 0;
297}
298
299void MachineVerifier::verifySlotIndexes() const {
300 if (Indexes == nullptr)
301 return;
302
303 // Ensure the IdxMBB list is sorted by slot indexes.
304 SlotIndex Last;
305 for (SlotIndexes::MBBIndexIterator I = Indexes->MBBIndexBegin(),
306 E = Indexes->MBBIndexEnd(); I != E; ++I) {
307 assert(!Last.isValid() || I->first > Last)((!Last.isValid() || I->first > Last) ? static_cast<
void> (0) : __assert_fail ("!Last.isValid() || I->first > Last"
, "/tmp/buildd/llvm-toolchain-snapshot-4.0~svn285991/lib/CodeGen/MachineVerifier.cpp"
, 307, __PRETTY_FUNCTION__))
;
308 Last = I->first;
309 }
310}
311
312void MachineVerifier::verifyProperties(const MachineFunction &MF) {
313 // If a pass has introduced virtual registers without clearing the
314 // NoVRegs property (or set it without allocating the vregs)
315 // then report an error.
316 if (MF.getProperties().hasProperty(
317 MachineFunctionProperties::Property::NoVRegs) &&
318 MRI->getNumVirtRegs())
319 report("Function has NoVRegs property but there are VReg operands", &MF);
320}
321
322unsigned MachineVerifier::verify(MachineFunction &MF) {
323 foundErrors = 0;
324
325 this->MF = &MF;
326 TM = &MF.getTarget();
327 TII = MF.getSubtarget().getInstrInfo();
328 TRI = MF.getSubtarget().getRegisterInfo();
329 MRI = &MF.getRegInfo();
330
331 isFunctionRegBankSelected = MF.getProperties().hasProperty(
332 MachineFunctionProperties::Property::RegBankSelected);
333 isFunctionSelected = MF.getProperties().hasProperty(
334 MachineFunctionProperties::Property::Selected);
335
336 LiveVars = nullptr;
337 LiveInts = nullptr;
338 LiveStks = nullptr;
339 Indexes = nullptr;
340 if (PASS) {
341 LiveInts = PASS->getAnalysisIfAvailable<LiveIntervals>();
342 // We don't want to verify LiveVariables if LiveIntervals is available.
343 if (!LiveInts)
344 LiveVars = PASS->getAnalysisIfAvailable<LiveVariables>();
345 LiveStks = PASS->getAnalysisIfAvailable<LiveStacks>();
346 Indexes = PASS->getAnalysisIfAvailable<SlotIndexes>();
347 }
348
349 verifySlotIndexes();
350
351 verifyProperties(MF);
352
353 visitMachineFunctionBefore();
354 for (MachineFunction::const_iterator MFI = MF.begin(), MFE = MF.end();
355 MFI!=MFE; ++MFI) {
356 visitMachineBasicBlockBefore(&*MFI);
357 // Keep track of the current bundle header.
358 const MachineInstr *CurBundle = nullptr;
359 // Do we expect the next instruction to be part of the same bundle?
360 bool InBundle = false;
361
362 for (MachineBasicBlock::const_instr_iterator MBBI = MFI->instr_begin(),
363 MBBE = MFI->instr_end(); MBBI != MBBE; ++MBBI) {
364 if (MBBI->getParent() != &*MFI) {
365 report("Bad instruction parent pointer", &*MFI);
366 errs() << "Instruction: " << *MBBI;
367 continue;
368 }
369
370 // Check for consistent bundle flags.
371 if (InBundle && !MBBI->isBundledWithPred())
372 report("Missing BundledPred flag, "
373 "BundledSucc was set on predecessor",
374 &*MBBI);
375 if (!InBundle && MBBI->isBundledWithPred())
376 report("BundledPred flag is set, "
377 "but BundledSucc not set on predecessor",
378 &*MBBI);
379
380 // Is this a bundle header?
381 if (!MBBI->isInsideBundle()) {
382 if (CurBundle)
383 visitMachineBundleAfter(CurBundle);
384 CurBundle = &*MBBI;
385 visitMachineBundleBefore(CurBundle);
386 } else if (!CurBundle)
387 report("No bundle header", &*MBBI);
388 visitMachineInstrBefore(&*MBBI);
389 for (unsigned I = 0, E = MBBI->getNumOperands(); I != E; ++I) {
390 const MachineInstr &MI = *MBBI;
391 const MachineOperand &Op = MI.getOperand(I);
392 if (Op.getParent() != &MI) {
393 // Make sure to use correct addOperand / RemoveOperand / ChangeTo
394 // functions when replacing operands of a MachineInstr.
395 report("Instruction has operand with wrong parent set", &MI);
396 }
397
398 visitMachineOperand(&Op, I);
399 }
400
401 visitMachineInstrAfter(&*MBBI);
402
403 // Was this the last bundled instruction?
404 InBundle = MBBI->isBundledWithSucc();
405 }
406 if (CurBundle)
407 visitMachineBundleAfter(CurBundle);
408 if (InBundle)
409 report("BundledSucc flag set on last instruction in block", &MFI->back());
410 visitMachineBasicBlockAfter(&*MFI);
411 }
412 visitMachineFunctionAfter();
413
414 // Clean up.
415 regsLive.clear();
416 regsDefined.clear();
417 regsDead.clear();
418 regsKilled.clear();
419 regMasks.clear();
420 regsLiveInButUnused.clear();
421 MBBInfoMap.clear();
422
423 return foundErrors;
424}
425
426void MachineVerifier::report(const char *msg, const MachineFunction *MF) {
427 assert(MF)((MF) ? static_cast<void> (0) : __assert_fail ("MF", "/tmp/buildd/llvm-toolchain-snapshot-4.0~svn285991/lib/CodeGen/MachineVerifier.cpp"
, 427, __PRETTY_FUNCTION__))
;
428 errs() << '\n';
429 if (!foundErrors++) {
430 if (Banner)
431 errs() << "# " << Banner << '\n';
432 if (LiveInts != nullptr)
433 LiveInts->print(errs());
434 else
435 MF->print(errs(), Indexes);
436 }
437 errs() << "*** Bad machine code: " << msg << " ***\n"
438 << "- function: " << MF->getName() << "\n";
439}
440
441void MachineVerifier::report(const char *msg, const MachineBasicBlock *MBB) {
442 assert(MBB)((MBB) ? static_cast<void> (0) : __assert_fail ("MBB", "/tmp/buildd/llvm-toolchain-snapshot-4.0~svn285991/lib/CodeGen/MachineVerifier.cpp"
, 442, __PRETTY_FUNCTION__))
;
443 report(msg, MBB->getParent());
444 errs() << "- basic block: BB#" << MBB->getNumber()
445 << ' ' << MBB->getName()
446 << " (" << (const void*)MBB << ')';
447 if (Indexes)
448 errs() << " [" << Indexes->getMBBStartIdx(MBB)
449 << ';' << Indexes->getMBBEndIdx(MBB) << ')';
450 errs() << '\n';
451}
452
453void MachineVerifier::report(const char *msg, const MachineInstr *MI) {
454 assert(MI)((MI) ? static_cast<void> (0) : __assert_fail ("MI", "/tmp/buildd/llvm-toolchain-snapshot-4.0~svn285991/lib/CodeGen/MachineVerifier.cpp"
, 454, __PRETTY_FUNCTION__))
;
455 report(msg, MI->getParent());
456 errs() << "- instruction: ";
457 if (Indexes && Indexes->hasIndex(*MI))
458 errs() << Indexes->getInstructionIndex(*MI) << '\t';
459 MI->print(errs(), /*SkipOpers=*/true);
460 errs() << '\n';
461}
462
463void MachineVerifier::report(const char *msg,
464 const MachineOperand *MO, unsigned MONum) {
465 assert(MO)((MO) ? static_cast<void> (0) : __assert_fail ("MO", "/tmp/buildd/llvm-toolchain-snapshot-4.0~svn285991/lib/CodeGen/MachineVerifier.cpp"
, 465, __PRETTY_FUNCTION__))
;
466 report(msg, MO->getParent());
467 errs() << "- operand " << MONum << ": ";
468 MO->print(errs(), TRI);
469 errs() << "\n";
470}
471
472void MachineVerifier::report_context(SlotIndex Pos) const {
473 errs() << "- at: " << Pos << '\n';
474}
475
476void MachineVerifier::report_context(const LiveInterval &LI) const {
477 errs() << "- interval: " << LI << '\n';
478}
479
480void MachineVerifier::report_context(const LiveRange &LR, unsigned VRegUnit,
481 LaneBitmask LaneMask) const {
482 report_context_liverange(LR);
483 report_context_vreg_regunit(VRegUnit);
484 if (LaneMask != 0)
485 report_context_lanemask(LaneMask);
486}
487
488void MachineVerifier::report_context(const LiveRange::Segment &S) const {
489 errs() << "- segment: " << S << '\n';
490}
491
492void MachineVerifier::report_context(const VNInfo &VNI) const {
493 errs() << "- ValNo: " << VNI.id << " (def " << VNI.def << ")\n";
494}
495
496void MachineVerifier::report_context_liverange(const LiveRange &LR) const {
497 errs() << "- liverange: " << LR << '\n';
498}
499
500void MachineVerifier::report_context_vreg(unsigned VReg) const {
501 errs() << "- v. register: " << PrintReg(VReg, TRI) << '\n';
502}
503
504void MachineVerifier::report_context_vreg_regunit(unsigned VRegOrUnit) const {
505 if (TargetRegisterInfo::isVirtualRegister(VRegOrUnit)) {
506 report_context_vreg(VRegOrUnit);
507 } else {
508 errs() << "- regunit: " << PrintRegUnit(VRegOrUnit, TRI) << '\n';
509 }
510}
511
512void MachineVerifier::report_context_lanemask(LaneBitmask LaneMask) const {
513 errs() << "- lanemask: " << PrintLaneMask(LaneMask) << '\n';
514}
515
516void MachineVerifier::markReachable(const MachineBasicBlock *MBB) {
517 BBInfo &MInfo = MBBInfoMap[MBB];
518 if (!MInfo.reachable) {
519 MInfo.reachable = true;
520 for (MachineBasicBlock::const_succ_iterator SuI = MBB->succ_begin(),
521 SuE = MBB->succ_end(); SuI != SuE; ++SuI)
522 markReachable(*SuI);
523 }
524}
525
526void MachineVerifier::visitMachineFunctionBefore() {
527 lastIndex = SlotIndex();
528 regsReserved = MRI->getReservedRegs();
529
530 // A sub-register of a reserved register is also reserved
531 for (int Reg = regsReserved.find_first(); Reg>=0;
532 Reg = regsReserved.find_next(Reg)) {
533 for (MCSubRegIterator SubRegs(Reg, TRI); SubRegs.isValid(); ++SubRegs) {
534 // FIXME: This should probably be:
535 // assert(regsReserved.test(*SubRegs) && "Non-reserved sub-register");
536 regsReserved.set(*SubRegs);
537 }
538 }
539
540 markReachable(&MF->front());
541
542 // Build a set of the basic blocks in the function.
543 FunctionBlocks.clear();
544 for (const auto &MBB : *MF) {
545 FunctionBlocks.insert(&MBB);
546 BBInfo &MInfo = MBBInfoMap[&MBB];
547
548 MInfo.Preds.insert(MBB.pred_begin(), MBB.pred_end());
549 if (MInfo.Preds.size() != MBB.pred_size())
550 report("MBB has duplicate entries in its predecessor list.", &MBB);
551
552 MInfo.Succs.insert(MBB.succ_begin(), MBB.succ_end());
553 if (MInfo.Succs.size() != MBB.succ_size())
554 report("MBB has duplicate entries in its successor list.", &MBB);
555 }
556
557 // Check that the register use lists are sane.
558 MRI->verifyUseLists();
559
560 verifyStackFrame();
561}
562
563// Does iterator point to a and b as the first two elements?
564static bool matchPair(MachineBasicBlock::const_succ_iterator i,
565 const MachineBasicBlock *a, const MachineBasicBlock *b) {
566 if (*i == a)
567 return *++i == b;
568 if (*i == b)
569 return *++i == a;
570 return false;
571}
572
573void
574MachineVerifier::visitMachineBasicBlockBefore(const MachineBasicBlock *MBB) {
575 FirstTerminator = nullptr;
576
577 if (!MF->getProperties().hasProperty(
578 MachineFunctionProperties::Property::NoPHIs)) {
579 // If this block has allocatable physical registers live-in, check that
580 // it is an entry block or landing pad.
581 for (const auto &LI : MBB->liveins()) {
582 if (isAllocatable(LI.PhysReg) && !MBB->isEHPad() &&
583 MBB->getIterator() != MBB->getParent()->begin()) {
584 report("MBB has allocable live-in, but isn't entry or landing-pad.", MBB);
585 }
586 }
587 }
588
589 // Count the number of landing pad successors.
590 SmallPtrSet<MachineBasicBlock*, 4> LandingPadSuccs;
591 for (MachineBasicBlock::const_succ_iterator I = MBB->succ_begin(),
592 E = MBB->succ_end(); I != E; ++I) {
593 if ((*I)->isEHPad())
594 LandingPadSuccs.insert(*I);
595 if (!FunctionBlocks.count(*I))
596 report("MBB has successor that isn't part of the function.", MBB);
597 if (!MBBInfoMap[*I].Preds.count(MBB)) {
598 report("Inconsistent CFG", MBB);
599 errs() << "MBB is not in the predecessor list of the successor BB#"
600 << (*I)->getNumber() << ".\n";
601 }
602 }
603
604 // Check the predecessor list.
605 for (MachineBasicBlock::const_pred_iterator I = MBB->pred_begin(),
606 E = MBB->pred_end(); I != E; ++I) {
607 if (!FunctionBlocks.count(*I))
608 report("MBB has predecessor that isn't part of the function.", MBB);
609 if (!MBBInfoMap[*I].Succs.count(MBB)) {
610 report("Inconsistent CFG", MBB);
611 errs() << "MBB is not in the successor list of the predecessor BB#"
612 << (*I)->getNumber() << ".\n";
613 }
614 }
615
616 const MCAsmInfo *AsmInfo = TM->getMCAsmInfo();
617 const BasicBlock *BB = MBB->getBasicBlock();
618 const Function *Fn = MF->getFunction();
619 if (LandingPadSuccs.size() > 1 &&
620 !(AsmInfo &&
621 AsmInfo->getExceptionHandlingType() == ExceptionHandling::SjLj &&
622 BB && isa<SwitchInst>(BB->getTerminator())) &&
623 !isFuncletEHPersonality(classifyEHPersonality(Fn->getPersonalityFn())))
624 report("MBB has more than one landing pad successor", MBB);
625
626 // Call AnalyzeBranch. If it succeeds, there several more conditions to check.
627 MachineBasicBlock *TBB = nullptr, *FBB = nullptr;
628 SmallVector<MachineOperand, 4> Cond;
629 if (!TII->analyzeBranch(*const_cast<MachineBasicBlock *>(MBB), TBB, FBB,
630 Cond)) {
631 // Ok, AnalyzeBranch thinks it knows what's going on with this block. Let's
632 // check whether its answers match up with reality.
633 if (!TBB && !FBB) {
634 // Block falls through to its successor.
635 MachineFunction::const_iterator MBBI = MBB->getIterator();
636 ++MBBI;
637 if (MBBI == MF->end()) {
638 // It's possible that the block legitimately ends with a noreturn
639 // call or an unreachable, in which case it won't actually fall
640 // out the bottom of the function.
641 } else if (MBB->succ_size() == LandingPadSuccs.size()) {
642 // It's possible that the block legitimately ends with a noreturn
643 // call or an unreachable, in which case it won't actuall fall
644 // out of the block.
645 } else if (MBB->succ_size() != 1+LandingPadSuccs.size()) {
646 report("MBB exits via unconditional fall-through but doesn't have "
647 "exactly one CFG successor!", MBB);
648 } else if (!MBB->isSuccessor(&*MBBI)) {
649 report("MBB exits via unconditional fall-through but its successor "
650 "differs from its CFG successor!", MBB);
651 }
652 if (!MBB->empty() && MBB->back().isBarrier() &&
653 !TII->isPredicated(MBB->back())) {
654 report("MBB exits via unconditional fall-through but ends with a "
655 "barrier instruction!", MBB);
656 }
657 if (!Cond.empty()) {
658 report("MBB exits via unconditional fall-through but has a condition!",
659 MBB);
660 }
661 } else if (TBB && !FBB && Cond.empty()) {
662 // Block unconditionally branches somewhere.
663 // If the block has exactly one successor, that happens to be a
664 // landingpad, accept it as valid control flow.
665 if (MBB->succ_size() != 1+LandingPadSuccs.size() &&
666 (MBB->succ_size() != 1 || LandingPadSuccs.size() != 1 ||
667 *MBB->succ_begin() != *LandingPadSuccs.begin())) {
668 report("MBB exits via unconditional branch but doesn't have "
669 "exactly one CFG successor!", MBB);
670 } else if (!MBB->isSuccessor(TBB)) {
671 report("MBB exits via unconditional branch but the CFG "
672 "successor doesn't match the actual successor!", MBB);
673 }
674 if (MBB->empty()) {
675 report("MBB exits via unconditional branch but doesn't contain "
676 "any instructions!", MBB);
677 } else if (!MBB->back().isBarrier()) {
678 report("MBB exits via unconditional branch but doesn't end with a "
679 "barrier instruction!", MBB);
680 } else if (!MBB->back().isTerminator()) {
681 report("MBB exits via unconditional branch but the branch isn't a "
682 "terminator instruction!", MBB);
683 }
684 } else if (TBB && !FBB && !Cond.empty()) {
685 // Block conditionally branches somewhere, otherwise falls through.
686 MachineFunction::const_iterator MBBI = MBB->getIterator();
687 ++MBBI;
688 if (MBBI == MF->end()) {
689 report("MBB conditionally falls through out of function!", MBB);
690 } else if (MBB->succ_size() == 1) {
691 // A conditional branch with only one successor is weird, but allowed.
692 if (&*MBBI != TBB)
693 report("MBB exits via conditional branch/fall-through but only has "
694 "one CFG successor!", MBB);
695 else if (TBB != *MBB->succ_begin())
696 report("MBB exits via conditional branch/fall-through but the CFG "
697 "successor don't match the actual successor!", MBB);
698 } else if (MBB->succ_size() != 2) {
699 report("MBB exits via conditional branch/fall-through but doesn't have "
700 "exactly two CFG successors!", MBB);
701 } else if (!matchPair(MBB->succ_begin(), TBB, &*MBBI)) {
702 report("MBB exits via conditional branch/fall-through but the CFG "
703 "successors don't match the actual successors!", MBB);
704 }
705 if (MBB->empty()) {
706 report("MBB exits via conditional branch/fall-through but doesn't "
707 "contain any instructions!", MBB);
708 } else if (MBB->back().isBarrier()) {
709 report("MBB exits via conditional branch/fall-through but ends with a "
710 "barrier instruction!", MBB);
711 } else if (!MBB->back().isTerminator()) {
712 report("MBB exits via conditional branch/fall-through but the branch "
713 "isn't a terminator instruction!", MBB);
714 }
715 } else if (TBB && FBB) {
716 // Block conditionally branches somewhere, otherwise branches
717 // somewhere else.
718 if (MBB->succ_size() == 1) {
719 // A conditional branch with only one successor is weird, but allowed.
720 if (FBB != TBB)
721 report("MBB exits via conditional branch/branch through but only has "
722 "one CFG successor!", MBB);
723 else if (TBB != *MBB->succ_begin())
724 report("MBB exits via conditional branch/branch through but the CFG "
725 "successor don't match the actual successor!", MBB);
726 } else if (MBB->succ_size() != 2) {
727 report("MBB exits via conditional branch/branch but doesn't have "
728 "exactly two CFG successors!", MBB);
729 } else if (!matchPair(MBB->succ_begin(), TBB, FBB)) {
730 report("MBB exits via conditional branch/branch but the CFG "
731 "successors don't match the actual successors!", MBB);
732 }
733 if (MBB->empty()) {
734 report("MBB exits via conditional branch/branch but doesn't "
735 "contain any instructions!", MBB);
736 } else if (!MBB->back().isBarrier()) {
737 report("MBB exits via conditional branch/branch but doesn't end with a "
738 "barrier instruction!", MBB);
739 } else if (!MBB->back().isTerminator()) {
740 report("MBB exits via conditional branch/branch but the branch "
741 "isn't a terminator instruction!", MBB);
742 }
743 if (Cond.empty()) {
744 report("MBB exits via conditinal branch/branch but there's no "
745 "condition!", MBB);
746 }
747 } else {
748 report("AnalyzeBranch returned invalid data!", MBB);
749 }
750 }
751
752 regsLive.clear();
753 for (const auto &LI : MBB->liveins()) {
754 if (!TargetRegisterInfo::isPhysicalRegister(LI.PhysReg)) {
755 report("MBB live-in list contains non-physical register", MBB);
756 continue;
757 }
758 for (MCSubRegIterator SubRegs(LI.PhysReg, TRI, /*IncludeSelf=*/true);
759 SubRegs.isValid(); ++SubRegs)
760 regsLive.insert(*SubRegs);
761 }
762 regsLiveInButUnused = regsLive;
763
764 const MachineFrameInfo &MFI = MF->getFrameInfo();
765 BitVector PR = MFI.getPristineRegs(*MF);
766 for (int I = PR.find_first(); I>0; I = PR.find_next(I)) {
767 for (MCSubRegIterator SubRegs(I, TRI, /*IncludeSelf=*/true);
768 SubRegs.isValid(); ++SubRegs)
769 regsLive.insert(*SubRegs);
770 }
771
772 regsKilled.clear();
773 regsDefined.clear();
774
775 if (Indexes)
776 lastIndex = Indexes->getMBBStartIdx(MBB);
777}
778
779// This function gets called for all bundle headers, including normal
780// stand-alone unbundled instructions.
781void MachineVerifier::visitMachineBundleBefore(const MachineInstr *MI) {
782 if (Indexes && Indexes->hasIndex(*MI)) {
783 SlotIndex idx = Indexes->getInstructionIndex(*MI);
784 if (!(idx > lastIndex)) {
785 report("Instruction index out of order", MI);
786 errs() << "Last instruction was at " << lastIndex << '\n';
787 }
788 lastIndex = idx;
789 }
790
791 // Ensure non-terminators don't follow terminators.
792 // Ignore predicated terminators formed by if conversion.
793 // FIXME: If conversion shouldn't need to violate this rule.
794 if (MI->isTerminator() && !TII->isPredicated(*MI)) {
795 if (!FirstTerminator)
796 FirstTerminator = MI;
797 } else if (FirstTerminator) {
798 report("Non-terminator instruction after the first terminator", MI);
799 errs() << "First terminator was:\t" << *FirstTerminator;
800 }
801}
802
803// The operands on an INLINEASM instruction must follow a template.
804// Verify that the flag operands make sense.
805void MachineVerifier::verifyInlineAsm(const MachineInstr *MI) {
806 // The first two operands on INLINEASM are the asm string and global flags.
807 if (MI->getNumOperands() < 2) {
808 report("Too few operands on inline asm", MI);
809 return;
810 }
811 if (!MI->getOperand(0).isSymbol())
812 report("Asm string must be an external symbol", MI);
813 if (!MI->getOperand(1).isImm())
814 report("Asm flags must be an immediate", MI);
815 // Allowed flags are Extra_HasSideEffects = 1, Extra_IsAlignStack = 2,
816 // Extra_AsmDialect = 4, Extra_MayLoad = 8, and Extra_MayStore = 16,
817 // and Extra_IsConvergent = 32.
818 if (!isUInt<6>(MI->getOperand(1).getImm()))
819 report("Unknown asm flags", &MI->getOperand(1), 1);
820
821 static_assert(InlineAsm::MIOp_FirstOperand == 2, "Asm format changed");
822
823 unsigned OpNo = InlineAsm::MIOp_FirstOperand;
824 unsigned NumOps;
825 for (unsigned e = MI->getNumOperands(); OpNo < e; OpNo += NumOps) {
826 const MachineOperand &MO = MI->getOperand(OpNo);
827 // There may be implicit ops after the fixed operands.
828 if (!MO.isImm())
829 break;
830 NumOps = 1 + InlineAsm::getNumOperandRegisters(MO.getImm());
831 }
832
833 if (OpNo > MI->getNumOperands())
834 report("Missing operands in last group", MI);
835
836 // An optional MDNode follows the groups.
837 if (OpNo < MI->getNumOperands() && MI->getOperand(OpNo).isMetadata())
838 ++OpNo;
839
840 // All trailing operands must be implicit registers.
841 for (unsigned e = MI->getNumOperands(); OpNo < e; ++OpNo) {
842 const MachineOperand &MO = MI->getOperand(OpNo);
843 if (!MO.isReg() || !MO.isImplicit())
844 report("Expected implicit register after groups", &MO, OpNo);
845 }
846}
847
848void MachineVerifier::visitMachineInstrBefore(const MachineInstr *MI) {
849 const MCInstrDesc &MCID = MI->getDesc();
850 if (MI->getNumOperands() < MCID.getNumOperands()) {
851 report("Too few operands", MI);
852 errs() << MCID.getNumOperands() << " operands expected, but "
853 << MI->getNumOperands() << " given.\n";
854 }
855
856 if (MI->isPHI() && MF->getProperties().hasProperty(
857 MachineFunctionProperties::Property::NoPHIs))
858 report("Found PHI instruction with NoPHIs property set", MI);
859
860 // Check the tied operands.
861 if (MI->isInlineAsm())
862 verifyInlineAsm(MI);
863
864 // Check the MachineMemOperands for basic consistency.
865 for (MachineInstr::mmo_iterator I = MI->memoperands_begin(),
866 E = MI->memoperands_end(); I != E; ++I) {
867 if ((*I)->isLoad() && !MI->mayLoad())
868 report("Missing mayLoad flag", MI);
869 if ((*I)->isStore() && !MI->mayStore())
870 report("Missing mayStore flag", MI);
871 }
872
873 // Debug values must not have a slot index.
874 // Other instructions must have one, unless they are inside a bundle.
875 if (LiveInts) {
876 bool mapped = !LiveInts->isNotInMIMap(*MI);
877 if (MI->isDebugValue()) {
878 if (mapped)
879 report("Debug instruction has a slot index", MI);
880 } else if (MI->isInsideBundle()) {
881 if (mapped)
882 report("Instruction inside bundle has a slot index", MI);
883 } else {
884 if (!mapped)
885 report("Missing slot index", MI);
886 }
887 }
888
889 // Check types.
890 if (isPreISelGenericOpcode(MCID.getOpcode())) {
891 if (isFunctionSelected)
892 report("Unexpected generic instruction in a Selected function", MI);
893
894 // Generic instructions specify equality constraints between some
895 // of their operands. Make sure these are consistent.
896 SmallVector<LLT, 4> Types;
897 for (unsigned i = 0; i < MCID.getNumOperands(); ++i) {
898 if (!MCID.OpInfo[i].isGenericType())
899 continue;
900 size_t TypeIdx = MCID.OpInfo[i].getGenericTypeIndex();
901 Types.resize(std::max(TypeIdx + 1, Types.size()));
902
903 LLT OpTy = MRI->getType(MI->getOperand(i).getReg());
904 if (Types[TypeIdx].isValid() && Types[TypeIdx] != OpTy)
905 report("type mismatch in generic instruction", MI);
906 Types[TypeIdx] = OpTy;
907 }
908 }
909
910 // Generic opcodes must not have physical register operands.
911 if (isPreISelGenericOpcode(MCID.getOpcode())) {
912 for (auto &Op : MI->operands()) {
913 if (Op.isReg() && TargetRegisterInfo::isPhysicalRegister(Op.getReg()))
914 report("Generic instruction cannot have physical register", MI);
915 }
916 }
917
918 StringRef ErrorInfo;
919 if (!TII->verifyInstruction(*MI, ErrorInfo))
920 report(ErrorInfo.data(), MI);
921}
922
923void
924MachineVerifier::visitMachineOperand(const MachineOperand *MO, unsigned MONum) {
925 const MachineInstr *MI = MO->getParent();
926 const MCInstrDesc &MCID = MI->getDesc();
927 unsigned NumDefs = MCID.getNumDefs();
928 if (MCID.getOpcode() == TargetOpcode::PATCHPOINT)
929 NumDefs = (MONum == 0 && MO->isReg()) ? NumDefs : 0;
930
931 // The first MCID.NumDefs operands must be explicit register defines
932 if (MONum < NumDefs) {
933 const MCOperandInfo &MCOI = MCID.OpInfo[MONum];
934 if (!MO->isReg())
935 report("Explicit definition must be a register", MO, MONum);
936 else if (!MO->isDef() && !MCOI.isOptionalDef())
937 report("Explicit definition marked as use", MO, MONum);
938 else if (MO->isImplicit())
939 report("Explicit definition marked as implicit", MO, MONum);
940 } else if (MONum < MCID.getNumOperands()) {
941 const MCOperandInfo &MCOI = MCID.OpInfo[MONum];
942 // Don't check if it's the last operand in a variadic instruction. See,
943 // e.g., LDM_RET in the arm back end.
944 if (MO->isReg() &&
945 !(MI->isVariadic() && MONum == MCID.getNumOperands()-1)) {
946 if (MO->isDef() && !MCOI.isOptionalDef())
947 report("Explicit operand marked as def", MO, MONum);
948 if (MO->isImplicit())
949 report("Explicit operand marked as implicit", MO, MONum);
950 }
951
952 int TiedTo = MCID.getOperandConstraint(MONum, MCOI::TIED_TO);
953 if (TiedTo != -1) {
954 if (!MO->isReg())
955 report("Tied use must be a register", MO, MONum);
956 else if (!MO->isTied())
957 report("Operand should be tied", MO, MONum);
958 else if (unsigned(TiedTo) != MI->findTiedOperandIdx(MONum))
959 report("Tied def doesn't match MCInstrDesc", MO, MONum);
960 } else if (MO->isReg() && MO->isTied())
961 report("Explicit operand should not be tied", MO, MONum);
962 } else {
963 // ARM adds %reg0 operands to indicate predicates. We'll allow that.
964 if (MO->isReg() && !MO->isImplicit() && !MI->isVariadic() && MO->getReg())
965 report("Extra explicit operand on non-variadic instruction", MO, MONum);
966 }
967
968 switch (MO->getType()) {
969 case MachineOperand::MO_Register: {
970 const unsigned Reg = MO->getReg();
971 if (!Reg)
972 return;
973 if (MRI->tracksLiveness() && !MI->isDebugValue())
974 checkLiveness(MO, MONum);
975
976 // Verify the consistency of tied operands.
977 if (MO->isTied()) {
978 unsigned OtherIdx = MI->findTiedOperandIdx(MONum);
979 const MachineOperand &OtherMO = MI->getOperand(OtherIdx);
980 if (!OtherMO.isReg())
981 report("Must be tied to a register", MO, MONum);
982 if (!OtherMO.isTied())
983 report("Missing tie flags on tied operand", MO, MONum);
984 if (MI->findTiedOperandIdx(OtherIdx) != MONum)
985 report("Inconsistent tie links", MO, MONum);
986 if (MONum < MCID.getNumDefs()) {
987 if (OtherIdx < MCID.getNumOperands()) {
988 if (-1 == MCID.getOperandConstraint(OtherIdx, MCOI::TIED_TO))
989 report("Explicit def tied to explicit use without tie constraint",
990 MO, MONum);
991 } else {
992 if (!OtherMO.isImplicit())
993 report("Explicit def should be tied to implicit use", MO, MONum);
994 }
995 }
996 }
997
998 // Verify two-address constraints after leaving SSA form.
999 unsigned DefIdx;
1000 if (!MRI->isSSA() && MO->isUse() &&
1001 MI->isRegTiedToDefOperand(MONum, &DefIdx) &&
1002 Reg != MI->getOperand(DefIdx).getReg())
1003 report("Two-address instruction operands must be identical", MO, MONum);
1004
1005 // Check register classes.
1006 if (MONum < MCID.getNumOperands() && !MO->isImplicit()) {
1007 unsigned SubIdx = MO->getSubReg();
1008
1009 if (TargetRegisterInfo::isPhysicalRegister(Reg)) {
1010 if (SubIdx) {
1011 report("Illegal subregister index for physical register", MO, MONum);
1012 return;
1013 }
1014 if (const TargetRegisterClass *DRC =
1015 TII->getRegClass(MCID, MONum, TRI, *MF)) {
1016 if (!DRC->contains(Reg)) {
1017 report("Illegal physical register for instruction", MO, MONum);
1018 errs() << TRI->getName(Reg) << " is not a "
1019 << TRI->getRegClassName(DRC) << " register.\n";
1020 }
1021 }
1022 } else {
1023 // Virtual register.
1024 const TargetRegisterClass *RC = MRI->getRegClassOrNull(Reg);
1025 if (!RC) {
1026 // This is a generic virtual register.
1027
1028 // If we're post-Select, we can't have gvregs anymore.
1029 if (isFunctionSelected) {
1030 report("Generic virtual register invalid in a Selected function",
1031 MO, MONum);
1032 return;
1033 }
1034
1035 // The gvreg must have a size and it must not have a SubIdx.
1036 LLT Ty = MRI->getType(Reg);
1037 if (!Ty.isValid()) {
1038 report("Generic virtual register must have a valid type", MO,
1039 MONum);
1040 return;
1041 }
1042
1043 const RegisterBank *RegBank = MRI->getRegBankOrNull(Reg);
1044
1045 // If we're post-RegBankSelect, the gvreg must have a bank.
1046 if (!RegBank && isFunctionRegBankSelected) {
1047 report("Generic virtual register must have a bank in a "
1048 "RegBankSelected function",
1049 MO, MONum);
1050 return;
1051 }
1052
1053 // Make sure the register fits into its register bank if any.
1054 if (RegBank && Ty.isValid() &&
1055 RegBank->getSize() < Ty.getSizeInBits()) {
1056 report("Register bank is too small for virtual register", MO,
1057 MONum);
1058 errs() << "Register bank " << RegBank->getName() << " too small("
1059 << RegBank->getSize() << ") to fit " << Ty.getSizeInBits()
1060 << "-bits\n";
1061 return;
1062 }
1063 if (SubIdx) {
1064 report("Generic virtual register does not subregister index", MO,
1065 MONum);
1066 return;
1067 }
1068 break;
1069 }
1070 if (SubIdx) {
1071 const TargetRegisterClass *SRC =
1072 TRI->getSubClassWithSubReg(RC, SubIdx);
1073 if (!SRC) {
1074 report("Invalid subregister index for virtual register", MO, MONum);
1075 errs() << "Register class " << TRI->getRegClassName(RC)
1076 << " does not support subreg index " << SubIdx << "\n";
1077 return;
1078 }
1079 if (RC != SRC) {
1080 report("Invalid register class for subregister index", MO, MONum);
1081 errs() << "Register class " << TRI->getRegClassName(RC)
1082 << " does not fully support subreg index " << SubIdx << "\n";
1083 return;
1084 }
1085 }
1086 if (const TargetRegisterClass *DRC =
1087 TII->getRegClass(MCID, MONum, TRI, *MF)) {
1088 if (SubIdx) {
1089 const TargetRegisterClass *SuperRC =
1090 TRI->getLargestLegalSuperClass(RC, *MF);
1091 if (!SuperRC) {
1092 report("No largest legal super class exists.", MO, MONum);
1093 return;
1094 }
1095 DRC = TRI->getMatchingSuperRegClass(SuperRC, DRC, SubIdx);
1096 if (!DRC) {
1097 report("No matching super-reg register class.", MO, MONum);
1098 return;
1099 }
1100 }
1101 if (!RC->hasSuperClassEq(DRC)) {
1102 report("Illegal virtual register for instruction", MO, MONum);
1103 errs() << "Expected a " << TRI->getRegClassName(DRC)
1104 << " register, but got a " << TRI->getRegClassName(RC)
1105 << " register\n";
1106 }
1107 }
1108 }
1109 }
1110 break;
1111 }
1112
1113 case MachineOperand::MO_RegisterMask:
1114 regMasks.push_back(MO->getRegMask());
1115 break;
1116
1117 case MachineOperand::MO_MachineBasicBlock:
1118 if (MI->isPHI() && !MO->getMBB()->isSuccessor(MI->getParent()))
1119 report("PHI operand is not in the CFG", MO, MONum);
1120 break;
1121
1122 case MachineOperand::MO_FrameIndex:
1123 if (LiveStks && LiveStks->hasInterval(MO->getIndex()) &&
1124 LiveInts && !LiveInts->isNotInMIMap(*MI)) {
1125 int FI = MO->getIndex();
1126 LiveInterval &LI = LiveStks->getInterval(FI);
1127 SlotIndex Idx = LiveInts->getInstructionIndex(*MI);
1128
1129 bool stores = MI->mayStore();
1130 bool loads = MI->mayLoad();
1131 // For a memory-to-memory move, we need to check if the frame
1132 // index is used for storing or loading, by inspecting the
1133 // memory operands.
1134 if (stores && loads) {
1135 for (auto *MMO : MI->memoperands()) {
1136 const PseudoSourceValue *PSV = MMO->getPseudoValue();
1137 if (PSV == nullptr) continue;
1138 const FixedStackPseudoSourceValue *Value =
1139 dyn_cast<FixedStackPseudoSourceValue>(PSV);
1140 if (Value == nullptr) continue;
1141 if (Value->getFrameIndex() != FI) continue;
1142
1143 if (MMO->isStore())
1144 loads = false;
1145 else
1146 stores = false;
1147 break;
1148 }
1149 if (loads == stores)
1150 report("Missing fixed stack memoperand.", MI);
1151 }
1152 if (loads && !LI.liveAt(Idx.getRegSlot(true))) {
1153 report("Instruction loads from dead spill slot", MO, MONum);
1154 errs() << "Live stack: " << LI << '\n';
1155 }
1156 if (stores && !LI.liveAt(Idx.getRegSlot())) {
1157 report("Instruction stores to dead spill slot", MO, MONum);
1158 errs() << "Live stack: " << LI << '\n';
1159 }
1160 }
1161 break;
1162
1163 default:
1164 break;
1165 }
1166}
1167
1168void MachineVerifier::checkLivenessAtUse(const MachineOperand *MO,
1169 unsigned MONum, SlotIndex UseIdx, const LiveRange &LR, unsigned VRegOrUnit,
1170 LaneBitmask LaneMask) {
1171 LiveQueryResult LRQ = LR.Query(UseIdx);
1172 // Check if we have a segment at the use, note however that we only need one
1173 // live subregister range, the others may be dead.
1174 if (!LRQ.valueIn() && LaneMask == 0) {
1175 report("No live segment at use", MO, MONum);
1176 report_context_liverange(LR);
1177 report_context_vreg_regunit(VRegOrUnit);
1178 report_context(UseIdx);
1179 }
1180 if (MO->isKill() && !LRQ.isKill()) {
1181 report("Live range continues after kill flag", MO, MONum);
1182 report_context_liverange(LR);
1183 report_context_vreg_regunit(VRegOrUnit);
1184 if (LaneMask != 0)
1185 report_context_lanemask(LaneMask);
1186 report_context(UseIdx);
1187 }
1188}
1189
1190void MachineVerifier::checkLivenessAtDef(const MachineOperand *MO,
1191 unsigned MONum, SlotIndex DefIdx, const LiveRange &LR, unsigned VRegOrUnit,
1192 LaneBitmask LaneMask) {
1193 if (const VNInfo *VNI = LR.getVNInfoAt(DefIdx)) {
1194 assert(VNI && "NULL valno is not allowed")((VNI && "NULL valno is not allowed") ? static_cast<
void> (0) : __assert_fail ("VNI && \"NULL valno is not allowed\""
, "/tmp/buildd/llvm-toolchain-snapshot-4.0~svn285991/lib/CodeGen/MachineVerifier.cpp"
, 1194, __PRETTY_FUNCTION__))
;
1195 if (VNI->def != DefIdx) {
1196 report("Inconsistent valno->def", MO, MONum);
1197 report_context_liverange(LR);
1198 report_context_vreg_regunit(VRegOrUnit);
1199 if (LaneMask != 0)
1200 report_context_lanemask(LaneMask);
1201 report_context(*VNI);
1202 report_context(DefIdx);
1203 }
1204 } else {
1205 report("No live segment at def", MO, MONum);
1206 report_context_liverange(LR);
1207 report_context_vreg_regunit(VRegOrUnit);
1208 if (LaneMask != 0)
1209 report_context_lanemask(LaneMask);
1210 report_context(DefIdx);
1211 }
1212 // Check that, if the dead def flag is present, LiveInts agree.
1213 if (MO->isDead()) {
1214 LiveQueryResult LRQ = LR.Query(DefIdx);
1215 if (!LRQ.isDeadDef()) {
1216 // In case of physregs we can have a non-dead definition on another
1217 // operand.
1218 bool otherDef = false;
1219 if (!TargetRegisterInfo::isVirtualRegister(VRegOrUnit)) {
1220 const MachineInstr &MI = *MO->getParent();
1221 for (const MachineOperand &MO : MI.operands()) {
1222 if (!MO.isReg() || !MO.isDef() || MO.isDead())
1223 continue;
1224 unsigned Reg = MO.getReg();
1225 for (MCRegUnitIterator Units(Reg, TRI); Units.isValid(); ++Units) {
1226 if (*Units == VRegOrUnit) {
1227 otherDef = true;
1228 break;
1229 }
1230 }
1231 }
1232 }
1233
1234 if (!otherDef) {
1235 report("Live range continues after dead def flag", MO, MONum);
1236 report_context_liverange(LR);
1237 report_context_vreg_regunit(VRegOrUnit);
1238 if (LaneMask != 0)
1239 report_context_lanemask(LaneMask);
1240 }
1241 }
1242 }
1243}
1244
1245void MachineVerifier::checkLiveness(const MachineOperand *MO, unsigned MONum) {
1246 const MachineInstr *MI = MO->getParent();
1247 const unsigned Reg = MO->getReg();
1248
1249 // Both use and def operands can read a register.
1250 if (MO->readsReg()) {
1251 regsLiveInButUnused.erase(Reg);
1252
1253 if (MO->isKill())
1254 addRegWithSubRegs(regsKilled, Reg);
1255
1256 // Check that LiveVars knows this kill.
1257 if (LiveVars && TargetRegisterInfo::isVirtualRegister(Reg) &&
1258 MO->isKill()) {
1259 LiveVariables::VarInfo &VI = LiveVars->getVarInfo(Reg);
1260 if (!is_contained(VI.Kills, MI))
1261 report("Kill missing from LiveVariables", MO, MONum);
1262 }
1263
1264 // Check LiveInts liveness and kill.
1265 if (LiveInts && !LiveInts->isNotInMIMap(*MI)) {
1266 SlotIndex UseIdx = LiveInts->getInstructionIndex(*MI);
1267 // Check the cached regunit intervals.
1268 if (TargetRegisterInfo::isPhysicalRegister(Reg) && !isReserved(Reg)) {
1269 for (MCRegUnitIterator Units(Reg, TRI); Units.isValid(); ++Units) {
1270 if (const LiveRange *LR = LiveInts->getCachedRegUnit(*Units))
1271 checkLivenessAtUse(MO, MONum, UseIdx, *LR, *Units);
1272 }
1273 }
1274
1275 if (TargetRegisterInfo::isVirtualRegister(Reg)) {
1276 if (LiveInts->hasInterval(Reg)) {
1277 // This is a virtual register interval.
1278 const LiveInterval &LI = LiveInts->getInterval(Reg);
1279 checkLivenessAtUse(MO, MONum, UseIdx, LI, Reg);
1280
1281 if (LI.hasSubRanges() && !MO->isDef()) {
1282 unsigned SubRegIdx = MO->getSubReg();
1283 LaneBitmask MOMask = SubRegIdx != 0
1284 ? TRI->getSubRegIndexLaneMask(SubRegIdx)
1285 : MRI->getMaxLaneMaskForVReg(Reg);
1286 LaneBitmask LiveInMask = 0;
1287 for (const LiveInterval::SubRange &SR : LI.subranges()) {
1288 if ((MOMask & SR.LaneMask) == 0)
1289 continue;
1290 checkLivenessAtUse(MO, MONum, UseIdx, SR, Reg, SR.LaneMask);
1291 LiveQueryResult LRQ = SR.Query(UseIdx);
1292 if (LRQ.valueIn())
1293 LiveInMask |= SR.LaneMask;
1294 }
1295 // At least parts of the register has to be live at the use.
1296 if ((LiveInMask & MOMask) == 0) {
1297 report("No live subrange at use", MO, MONum);
1298 report_context(LI);
1299 report_context(UseIdx);
1300 }
1301 }
1302 } else {
1303 report("Virtual register has no live interval", MO, MONum);
1304 }
1305 }
1306 }
1307
1308 // Use of a dead register.
1309 if (!regsLive.count(Reg)) {
1310 if (TargetRegisterInfo::isPhysicalRegister(Reg)) {
1311 // Reserved registers may be used even when 'dead'.
1312 bool Bad = !isReserved(Reg);
1313 // We are fine if just any subregister has a defined value.
1314 if (Bad) {
1315 for (MCSubRegIterator SubRegs(Reg, TRI); SubRegs.isValid();
1316 ++SubRegs) {
1317 if (regsLive.count(*SubRegs)) {
1318 Bad = false;
1319 break;
1320 }
1321 }
1322 }
1323 // If there is an additional implicit-use of a super register we stop
1324 // here. By definition we are fine if the super register is not
1325 // (completely) dead, if the complete super register is dead we will
1326 // get a report for its operand.
1327 if (Bad) {
1328 for (const MachineOperand &MOP : MI->uses()) {
1329 if (!MOP.isReg())
1330 continue;
1331 if (!MOP.isImplicit())
1332 continue;
1333 for (MCSubRegIterator SubRegs(MOP.getReg(), TRI); SubRegs.isValid();
1334 ++SubRegs) {
1335 if (*SubRegs == Reg) {
1336 Bad = false;
1337 break;
1338 }
1339 }
1340 }
1341 }
1342 if (Bad)
1343 report("Using an undefined physical register", MO, MONum);
1344 } else if (MRI->def_empty(Reg)) {
1345 report("Reading virtual register without a def", MO, MONum);
1346 } else {
1347 BBInfo &MInfo = MBBInfoMap[MI->getParent()];
1348 // We don't know which virtual registers are live in, so only complain
1349 // if vreg was killed in this MBB. Otherwise keep track of vregs that
1350 // must be live in. PHI instructions are handled separately.
1351 if (MInfo.regsKilled.count(Reg))
1352 report("Using a killed virtual register", MO, MONum);
1353 else if (!MI->isPHI())
1354 MInfo.vregsLiveIn.insert(std::make_pair(Reg, MI));
1355 }
1356 }
1357 }
1358
1359 if (MO->isDef()) {
1360 // Register defined.
1361 // TODO: verify that earlyclobber ops are not used.
1362 if (MO->isDead())
1363 addRegWithSubRegs(regsDead, Reg);
1364 else
1365 addRegWithSubRegs(regsDefined, Reg);
1366
1367 // Verify SSA form.
1368 if (MRI->isSSA() && TargetRegisterInfo::isVirtualRegister(Reg) &&
1369 std::next(MRI->def_begin(Reg)) != MRI->def_end())
1370 report("Multiple virtual register defs in SSA form", MO, MONum);
1371
1372 // Check LiveInts for a live segment, but only for virtual registers.
1373 if (LiveInts && !LiveInts->isNotInMIMap(*MI)) {
1374 SlotIndex DefIdx = LiveInts->getInstructionIndex(*MI);
1375 DefIdx = DefIdx.getRegSlot(MO->isEarlyClobber());
1376
1377 if (TargetRegisterInfo::isVirtualRegister(Reg)) {
1378 if (LiveInts->hasInterval(Reg)) {
1379 const LiveInterval &LI = LiveInts->getInterval(Reg);
1380 checkLivenessAtDef(MO, MONum, DefIdx, LI, Reg);
1381
1382 if (LI.hasSubRanges()) {
1383 unsigned SubRegIdx = MO->getSubReg();
1384 LaneBitmask MOMask = SubRegIdx != 0
1385 ? TRI->getSubRegIndexLaneMask(SubRegIdx)
1386 : MRI->getMaxLaneMaskForVReg(Reg);
1387 for (const LiveInterval::SubRange &SR : LI.subranges()) {
1388 if ((SR.LaneMask & MOMask) == 0)
1389 continue;
1390 checkLivenessAtDef(MO, MONum, DefIdx, SR, Reg, SR.LaneMask);
1391 }
1392 }
1393 } else {
1394 report("Virtual register has no Live interval", MO, MONum);
1395 }
1396 }
1397 }
1398 }
1399}
1400
1401void MachineVerifier::visitMachineInstrAfter(const MachineInstr *MI) {
1402}
1403
1404// This function gets called after visiting all instructions in a bundle. The
1405// argument points to the bundle header.
1406// Normal stand-alone instructions are also considered 'bundles', and this
1407// function is called for all of them.
1408void MachineVerifier::visitMachineBundleAfter(const MachineInstr *MI) {
1409 BBInfo &MInfo = MBBInfoMap[MI->getParent()];
1410 set_union(MInfo.regsKilled, regsKilled);
1411 set_subtract(regsLive, regsKilled); regsKilled.clear();
1412 // Kill any masked registers.
1413 while (!regMasks.empty()) {
1414 const uint32_t *Mask = regMasks.pop_back_val();
1415 for (RegSet::iterator I = regsLive.begin(), E = regsLive.end(); I != E; ++I)
1416 if (TargetRegisterInfo::isPhysicalRegister(*I) &&
1417 MachineOperand::clobbersPhysReg(Mask, *I))
1418 regsDead.push_back(*I);
1419 }
1420 set_subtract(regsLive, regsDead); regsDead.clear();
1421 set_union(regsLive, regsDefined); regsDefined.clear();
1422}
1423
1424void
1425MachineVerifier::visitMachineBasicBlockAfter(const MachineBasicBlock *MBB) {
1426 MBBInfoMap[MBB].regsLiveOut = regsLive;
1427 regsLive.clear();
1428
1429 if (Indexes) {
1430 SlotIndex stop = Indexes->getMBBEndIdx(MBB);
1431 if (!(stop > lastIndex)) {
1432 report("Block ends before last instruction index", MBB);
1433 errs() << "Block ends at " << stop
1434 << " last instruction was at " << lastIndex << '\n';
1435 }
1436 lastIndex = stop;
1437 }
1438}
1439
1440// Calculate the largest possible vregsPassed sets. These are the registers that
1441// can pass through an MBB live, but may not be live every time. It is assumed
1442// that all vregsPassed sets are empty before the call.
1443void MachineVerifier::calcRegsPassed() {
1444 // First push live-out regs to successors' vregsPassed. Remember the MBBs that
1445 // have any vregsPassed.
1446 SmallPtrSet<const MachineBasicBlock*, 8> todo;
1447 for (const auto &MBB : *MF) {
1448 BBInfo &MInfo = MBBInfoMap[&MBB];
1449 if (!MInfo.reachable)
1450 continue;
1451 for (MachineBasicBlock::const_succ_iterator SuI = MBB.succ_begin(),
1452 SuE = MBB.succ_end(); SuI != SuE; ++SuI) {
1453 BBInfo &SInfo = MBBInfoMap[*SuI];
1454 if (SInfo.addPassed(MInfo.regsLiveOut))
1455 todo.insert(*SuI);
1456 }
1457 }
1458
1459 // Iteratively push vregsPassed to successors. This will converge to the same
1460 // final state regardless of DenseSet iteration order.
1461 while (!todo.empty()) {
1462 const MachineBasicBlock *MBB = *todo.begin();
1463 todo.erase(MBB);
1464 BBInfo &MInfo = MBBInfoMap[MBB];
1465 for (MachineBasicBlock::const_succ_iterator SuI = MBB->succ_begin(),
1466 SuE = MBB->succ_end(); SuI != SuE; ++SuI) {
1467 if (*SuI == MBB)
1468 continue;
1469 BBInfo &SInfo = MBBInfoMap[*SuI];
1470 if (SInfo.addPassed(MInfo.vregsPassed))
1471 todo.insert(*SuI);
1472 }
1473 }
1474}
1475
1476// Calculate the set of virtual registers that must be passed through each basic
1477// block in order to satisfy the requirements of successor blocks. This is very
1478// similar to calcRegsPassed, only backwards.
1479void MachineVerifier::calcRegsRequired() {
1480 // First push live-in regs to predecessors' vregsRequired.
1481 SmallPtrSet<const MachineBasicBlock*, 8> todo;
1482 for (const auto &MBB : *MF) {
1483 BBInfo &MInfo = MBBInfoMap[&MBB];
1484 for (MachineBasicBlock::const_pred_iterator PrI = MBB.pred_begin(),
1485 PrE = MBB.pred_end(); PrI != PrE; ++PrI) {
1486 BBInfo &PInfo = MBBInfoMap[*PrI];
1487 if (PInfo.addRequired(MInfo.vregsLiveIn))
1488 todo.insert(*PrI);
1489 }
1490 }
1491
1492 // Iteratively push vregsRequired to predecessors. This will converge to the
1493 // same final state regardless of DenseSet iteration order.
1494 while (!todo.empty()) {
1495 const MachineBasicBlock *MBB = *todo.begin();
1496 todo.erase(MBB);
1497 BBInfo &MInfo = MBBInfoMap[MBB];
1498 for (MachineBasicBlock::const_pred_iterator PrI = MBB->pred_begin(),
1499 PrE = MBB->pred_end(); PrI != PrE; ++PrI) {
1500 if (*PrI == MBB)
1501 continue;
1502 BBInfo &SInfo = MBBInfoMap[*PrI];
1503 if (SInfo.addRequired(MInfo.vregsRequired))
1504 todo.insert(*PrI);
1505 }
1506 }
1507}
1508
1509// Check PHI instructions at the beginning of MBB. It is assumed that
1510// calcRegsPassed has been run so BBInfo::isLiveOut is valid.
1511void MachineVerifier::checkPHIOps(const MachineBasicBlock *MBB) {
1512 SmallPtrSet<const MachineBasicBlock*, 8> seen;
1513 for (const auto &BBI : *MBB) {
1514 if (!BBI.isPHI())
1515 break;
1516 seen.clear();
1517
1518 for (unsigned i = 1, e = BBI.getNumOperands(); i != e; i += 2) {
1519 unsigned Reg = BBI.getOperand(i).getReg();
1520 const MachineBasicBlock *Pre = BBI.getOperand(i + 1).getMBB();
1521 if (!Pre->isSuccessor(MBB))
1522 continue;
1523 seen.insert(Pre);
1524 BBInfo &PrInfo = MBBInfoMap[Pre];
1525 if (PrInfo.reachable && !PrInfo.isLiveOut(Reg))
1526 report("PHI operand is not live-out from predecessor",
1527 &BBI.getOperand(i), i);
1528 }
1529
1530 // Did we see all predecessors?
1531 for (MachineBasicBlock::const_pred_iterator PrI = MBB->pred_begin(),
1532 PrE = MBB->pred_end(); PrI != PrE; ++PrI) {
1533 if (!seen.count(*PrI)) {
1534 report("Missing PHI operand", &BBI);
1535 errs() << "BB#" << (*PrI)->getNumber()
1536 << " is a predecessor according to the CFG.\n";
1537 }
1538 }
1539 }
1540}
1541
1542void MachineVerifier::visitMachineFunctionAfter() {
1543 calcRegsPassed();
1544
1545 for (const auto &MBB : *MF) {
1546 BBInfo &MInfo = MBBInfoMap[&MBB];
1547
1548 // Skip unreachable MBBs.
1549 if (!MInfo.reachable)
1550 continue;
1551
1552 checkPHIOps(&MBB);
1553 }
1554
1555 // Now check liveness info if available
1556 calcRegsRequired();
1557
1558 // Check for killed virtual registers that should be live out.
1559 for (const auto &MBB : *MF) {
1560 BBInfo &MInfo = MBBInfoMap[&MBB];
1561 for (RegSet::iterator
1562 I = MInfo.vregsRequired.begin(), E = MInfo.vregsRequired.end(); I != E;
1563 ++I)
1564 if (MInfo.regsKilled.count(*I)) {
1565 report("Virtual register killed in block, but needed live out.", &MBB);
1566 errs() << "Virtual register " << PrintReg(*I)
1567 << " is used after the block.\n";
1568 }
1569 }
1570
1571 if (!MF->empty()) {
1572 BBInfo &MInfo = MBBInfoMap[&MF->front()];
1573 for (RegSet::iterator
1574 I = MInfo.vregsRequired.begin(), E = MInfo.vregsRequired.end(); I != E;
1575 ++I) {
1576 report("Virtual register defs don't dominate all uses.", MF);
1577 report_context_vreg(*I);
1578 }
1579 }
1580
1581 if (LiveVars)
1582 verifyLiveVariables();
1583 if (LiveInts)
1584 verifyLiveIntervals();
1585}
1586
1587void MachineVerifier::verifyLiveVariables() {
1588 assert(LiveVars && "Don't call verifyLiveVariables without LiveVars")((LiveVars && "Don't call verifyLiveVariables without LiveVars"
) ? static_cast<void> (0) : __assert_fail ("LiveVars && \"Don't call verifyLiveVariables without LiveVars\""
, "/tmp/buildd/llvm-toolchain-snapshot-4.0~svn285991/lib/CodeGen/MachineVerifier.cpp"
, 1588, __PRETTY_FUNCTION__))
;
1589 for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) {
1590 unsigned Reg = TargetRegisterInfo::index2VirtReg(i);
1591 LiveVariables::VarInfo &VI = LiveVars->getVarInfo(Reg);
1592 for (const auto &MBB : *MF) {
1593 BBInfo &MInfo = MBBInfoMap[&MBB];
1594
1595 // Our vregsRequired should be identical to LiveVariables' AliveBlocks
1596 if (MInfo.vregsRequired.count(Reg)) {
1597 if (!VI.AliveBlocks.test(MBB.getNumber())) {
1598 report("LiveVariables: Block missing from AliveBlocks", &MBB);
1599 errs() << "Virtual register " << PrintReg(Reg)
1600 << " must be live through the block.\n";
1601 }
1602 } else {
1603 if (VI.AliveBlocks.test(MBB.getNumber())) {
1604 report("LiveVariables: Block should not be in AliveBlocks", &MBB);
1605 errs() << "Virtual register " << PrintReg(Reg)
1606 << " is not needed live through the block.\n";
1607 }
1608 }
1609 }
1610 }
1611}
1612
1613void MachineVerifier::verifyLiveIntervals() {
1614 assert(LiveInts && "Don't call verifyLiveIntervals without LiveInts")((LiveInts && "Don't call verifyLiveIntervals without LiveInts"
) ? static_cast<void> (0) : __assert_fail ("LiveInts && \"Don't call verifyLiveIntervals without LiveInts\""
, "/tmp/buildd/llvm-toolchain-snapshot-4.0~svn285991/lib/CodeGen/MachineVerifier.cpp"
, 1614, __PRETTY_FUNCTION__))
;
1615 for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) {
1616 unsigned Reg = TargetRegisterInfo::index2VirtReg(i);
1617
1618 // Spilling and splitting may leave unused registers around. Skip them.
1619 if (MRI->reg_nodbg_empty(Reg))
1620 continue;
1621
1622 if (!LiveInts->hasInterval(Reg)) {
1623 report("Missing live interval for virtual register", MF);
1624 errs() << PrintReg(Reg, TRI) << " still has defs or uses\n";
1625 continue;
1626 }
1627
1628 const LiveInterval &LI = LiveInts->getInterval(Reg);
1629 assert(Reg == LI.reg && "Invalid reg to interval mapping")((Reg == LI.reg && "Invalid reg to interval mapping")
? static_cast<void> (0) : __assert_fail ("Reg == LI.reg && \"Invalid reg to interval mapping\""
, "/tmp/buildd/llvm-toolchain-snapshot-4.0~svn285991/lib/CodeGen/MachineVerifier.cpp"
, 1629, __PRETTY_FUNCTION__))
;
1630 verifyLiveInterval(LI);
1631 }
1632
1633 // Verify all the cached regunit intervals.
1634 for (unsigned i = 0, e = TRI->getNumRegUnits(); i != e; ++i)
1635 if (const LiveRange *LR = LiveInts->getCachedRegUnit(i))
1636 verifyLiveRange(*LR, i);
1637}
1638
1639void MachineVerifier::verifyLiveRangeValue(const LiveRange &LR,
1640 const VNInfo *VNI, unsigned Reg,
1641 LaneBitmask LaneMask) {
1642 if (VNI->isUnused())
1643 return;
1644
1645 const VNInfo *DefVNI = LR.getVNInfoAt(VNI->def);
1646
1647 if (!DefVNI) {
1648 report("Value not live at VNInfo def and not marked unused", MF);
1649 report_context(LR, Reg, LaneMask);
1650 report_context(*VNI);
1651 return;
1652 }
1653
1654 if (DefVNI != VNI) {
1655 report("Live segment at def has different VNInfo", MF);
1656 report_context(LR, Reg, LaneMask);
1657 report_context(*VNI);
1658 return;
1659 }
1660
1661 const MachineBasicBlock *MBB = LiveInts->getMBBFromIndex(VNI->def);
1662 if (!MBB) {
1663 report("Invalid VNInfo definition index", MF);
1664 report_context(LR, Reg, LaneMask);
1665 report_context(*VNI);
1666 return;
1667 }
1668
1669 if (VNI->isPHIDef()) {
1670 if (VNI->def != LiveInts->getMBBStartIdx(MBB)) {
1671 report("PHIDef VNInfo is not defined at MBB start", MBB);
1672 report_context(LR, Reg, LaneMask);
1673 report_context(*VNI);
1674 }
1675 return;
1676 }
1677
1678 // Non-PHI def.
1679 const MachineInstr *MI = LiveInts->getInstructionFromIndex(VNI->def);
1680 if (!MI) {
1681 report("No instruction at VNInfo def index", MBB);
1682 report_context(LR, Reg, LaneMask);
1683 report_context(*VNI);
1684 return;
1685 }
1686
1687 if (Reg != 0) {
1688 bool hasDef = false;
1689 bool isEarlyClobber = false;
1690 for (ConstMIBundleOperands MOI(*MI); MOI.isValid(); ++MOI) {
1691 if (!MOI->isReg() || !MOI->isDef())
1692 continue;
1693 if (TargetRegisterInfo::isVirtualRegister(Reg)) {
1694 if (MOI->getReg() != Reg)
1695 continue;
1696 } else {
1697 if (!TargetRegisterInfo::isPhysicalRegister(MOI->getReg()) ||
1698 !TRI->hasRegUnit(MOI->getReg(), Reg))
1699 continue;
1700 }
1701 if (LaneMask != 0 &&
1702 (TRI->getSubRegIndexLaneMask(MOI->getSubReg()) & LaneMask) == 0)
1703 continue;
1704 hasDef = true;
1705 if (MOI->isEarlyClobber())
1706 isEarlyClobber = true;
1707 }
1708
1709 if (!hasDef) {
1710 report("Defining instruction does not modify register", MI);
1711 report_context(LR, Reg, LaneMask);
1712 report_context(*VNI);
1713 }
1714
1715 // Early clobber defs begin at USE slots, but other defs must begin at
1716 // DEF slots.
1717 if (isEarlyClobber) {
1718 if (!VNI->def.isEarlyClobber()) {
1719 report("Early clobber def must be at an early-clobber slot", MBB);
1720 report_context(LR, Reg, LaneMask);
1721 report_context(*VNI);
1722 }
1723 } else if (!VNI->def.isRegister()) {
1724 report("Non-PHI, non-early clobber def must be at a register slot", MBB);
1725 report_context(LR, Reg, LaneMask);
1726 report_context(*VNI);
1727 }
1728 }
1729}
1730
1731void MachineVerifier::verifyLiveRangeSegment(const LiveRange &LR,
1732 const LiveRange::const_iterator I,
1733 unsigned Reg, LaneBitmask LaneMask)
1734{
1735 const LiveRange::Segment &S = *I;
1736 const VNInfo *VNI = S.valno;
1737 assert(VNI && "Live segment has no valno")((VNI && "Live segment has no valno") ? static_cast<
void> (0) : __assert_fail ("VNI && \"Live segment has no valno\""
, "/tmp/buildd/llvm-toolchain-snapshot-4.0~svn285991/lib/CodeGen/MachineVerifier.cpp"
, 1737, __PRETTY_FUNCTION__))
;
1738
1739 if (VNI->id >= LR.getNumValNums() || VNI != LR.getValNumInfo(VNI->id)) {
1
Assuming the condition is false
2
Taking false branch
1740 report("Foreign valno in live segment", MF);
1741 report_context(LR, Reg, LaneMask);
1742 report_context(S);
1743 report_context(*VNI);
1744 }
1745
1746 if (VNI->isUnused()) {
3
Taking false branch
1747 report("Live segment valno is marked unused", MF);
1748 report_context(LR, Reg, LaneMask);
1749 report_context(S);
1750 }
1751
1752 const MachineBasicBlock *MBB = LiveInts->getMBBFromIndex(S.start);
1753 if (!MBB) {
4
Taking false branch
1754 report("Bad start of live segment, no basic block", MF);
1755 report_context(LR, Reg, LaneMask);
1756 report_context(S);
1757 return;
1758 }
1759 SlotIndex MBBStartIdx = LiveInts->getMBBStartIdx(MBB);
1760 if (S.start != MBBStartIdx && S.start != VNI->def) {
1761 report("Live segment must begin at MBB entry or valno def", MBB);
1762 report_context(LR, Reg, LaneMask);
1763 report_context(S);
1764 }
1765
1766 const MachineBasicBlock *EndMBB =
1767 LiveInts->getMBBFromIndex(S.end.getPrevSlot());
1768 if (!EndMBB) {
5
Taking false branch
1769 report("Bad end of live segment, no basic block", MF);
1770 report_context(LR, Reg, LaneMask);
1771 report_context(S);
1772 return;
1773 }
1774
1775 // No more checks for live-out segments.
1776 if (S.end == LiveInts->getMBBEndIdx(EndMBB))
6
Taking false branch
1777 return;
1778
1779 // RegUnit intervals are allowed dead phis.
1780 if (!TargetRegisterInfo::isVirtualRegister(Reg) && VNI->isPHIDef() &&
7
Taking false branch
1781 S.start == VNI->def && S.end == VNI->def.getDeadSlot())
1782 return;
1783
1784 // The live segment is ending inside EndMBB
1785 const MachineInstr *MI =
1786 LiveInts->getInstructionFromIndex(S.end.getPrevSlot());
1787 if (!MI) {
8
Assuming 'MI' is non-null
9
Taking false branch
1788 report("Live segment doesn't end at a valid instruction", EndMBB);
1789 report_context(LR, Reg, LaneMask);
1790 report_context(S);
1791 return;
1792 }
1793
1794 // The block slot must refer to a basic block boundary.
1795 if (S.end.isBlock()) {
10
Taking false branch
1796 report("Live segment ends at B slot of an instruction", EndMBB);
1797 report_context(LR, Reg, LaneMask);
1798 report_context(S);
1799 }
1800
1801 if (S.end.isDead()) {
11
Taking false branch
1802 // Segment ends on the dead slot.
1803 // That means there must be a dead def.
1804 if (!SlotIndex::isSameInstr(S.start, S.end)) {
1805 report("Live segment ending at dead slot spans instructions", EndMBB);
1806 report_context(LR, Reg, LaneMask);
1807 report_context(S);
1808 }
1809 }
1810
1811 // A live segment can only end at an early-clobber slot if it is being
1812 // redefined by an early-clobber def.
1813 if (S.end.isEarlyClobber()) {
12
Taking false branch
1814 if (I+1 == LR.end() || (I+1)->start != S.end) {
1815 report("Live segment ending at early clobber slot must be "
1816 "redefined by an EC def in the same instruction", EndMBB);
1817 report_context(LR, Reg, LaneMask);
1818 report_context(S);
1819 }
1820 }
1821
1822 // The following checks only apply to virtual registers. Physreg liveness
1823 // is too weird to check.
1824 if (TargetRegisterInfo::isVirtualRegister(Reg)) {
13
Taking false branch
1825 // A live segment can end with either a redefinition, a kill flag on a
1826 // use, or a dead flag on a def.
1827 bool hasRead = false;
1828 bool hasSubRegDef = false;
1829 bool hasDeadDef = false;
1830 for (ConstMIBundleOperands MOI(*MI); MOI.isValid(); ++MOI) {
1831 if (!MOI->isReg() || MOI->getReg() != Reg)
1832 continue;
1833 unsigned Sub = MOI->getSubReg();
1834 LaneBitmask SLM = Sub != 0 ? TRI->getSubRegIndexLaneMask(Sub) : ~0U;
1835 if (MOI->isDef()) {
1836 if (Sub != 0) {
1837 hasSubRegDef = true;
1838 // An operand vreg0:sub0<def> reads vreg0:sub1..n. Invert the lane
1839 // mask for subregister defs. Read-undef defs will be handled by
1840 // readsReg below.
1841 SLM = ~SLM;
1842 }
1843 if (MOI->isDead())
1844 hasDeadDef = true;
1845 }
1846 if (LaneMask != 0 && !(LaneMask & SLM))
1847 continue;
1848 if (MOI->readsReg())
1849 hasRead = true;
1850 }
1851 if (S.end.isDead()) {
1852 // Make sure that the corresponding machine operand for a "dead" live
1853 // range has the dead flag. We cannot perform this check for subregister
1854 // liveranges as partially dead values are allowed.
1855 if (LaneMask == 0 && !hasDeadDef) {
1856 report("Instruction ending live segment on dead slot has no dead flag",
1857 MI);
1858 report_context(LR, Reg, LaneMask);
1859 report_context(S);
1860 }
1861 } else {
1862 if (!hasRead) {
1863 // When tracking subregister liveness, the main range must start new
1864 // values on partial register writes, even if there is no read.
1865 if (!MRI->shouldTrackSubRegLiveness(Reg) || LaneMask != 0 ||
1866 !hasSubRegDef) {
1867 report("Instruction ending live segment doesn't read the register",
1868 MI);
1869 report_context(LR, Reg, LaneMask);
1870 report_context(S);
1871 }
1872 }
1873 }
1874 }
1875
1876 // Now check all the basic blocks in this live segment.
1877 MachineFunction::const_iterator MFI = MBB->getIterator();
1878 // Is this live segment the beginning of a non-PHIDef VN?
1879 if (S.start == VNI->def && !VNI->isPHIDef()) {
1880 // Not live-in to any blocks.
1881 if (MBB == EndMBB)
1882 return;
1883 // Skip this block.
1884 ++MFI;
1885 }
1886 for (;;) {
14
Loop condition is true. Entering loop body
20
Loop condition is true. Entering loop body
26
Loop condition is true. Entering loop body
32
Loop condition is true. Entering loop body
1887 assert(LiveInts->isLiveInToMBB(LR, &*MFI))((LiveInts->isLiveInToMBB(LR, &*MFI)) ? static_cast<
void> (0) : __assert_fail ("LiveInts->isLiveInToMBB(LR, &*MFI)"
, "/tmp/buildd/llvm-toolchain-snapshot-4.0~svn285991/lib/CodeGen/MachineVerifier.cpp"
, 1887, __PRETTY_FUNCTION__))
;
1888 // We don't know how to track physregs into a landing pad.
1889 if (!TargetRegisterInfo::isVirtualRegister(Reg) &&
16
Taking false branch
22
Taking false branch
28
Taking false branch
34
Taking false branch
1890 MFI->isEHPad()) {
15
Assuming the condition is false
21
Assuming the condition is false
27
Assuming the condition is false
33
Assuming the condition is false
1891 if (&*MFI == EndMBB)
1892 break;
1893 ++MFI;
1894 continue;
1895 }
1896
1897 // Is VNI a PHI-def in the current block?
1898 bool IsPHI = VNI->isPHIDef() &&
1899 VNI->def == LiveInts->getMBBStartIdx(&*MFI);
1900
1901 // Check that VNI is live-out of all predecessors.
1902 for (MachineBasicBlock::const_pred_iterator PI = MFI->pred_begin(),
17
Loop condition is false. Execution continues on line 1929
23
Loop condition is false. Execution continues on line 1929
29
Loop condition is false. Execution continues on line 1929
35
Loop condition is true. Entering loop body
1903 PE = MFI->pred_end(); PI != PE; ++PI) {
1904 SlotIndex PEnd = LiveInts->getMBBEndIdx(*PI);
1905 const VNInfo *PVNI = LR.getVNInfoBefore(PEnd);
36
'PVNI' initialized here
1906
1907 // All predecessors must have a live-out value if this is not a
1908 // subregister liverange.
1909 if (!PVNI && LaneMask == 0) {
37
Assuming 'PVNI' is null
38
Assuming 'LaneMask' is not equal to 0
39
Taking false branch
1910 report("Register not marked live out of predecessor", *PI);
1911 report_context(LR, Reg, LaneMask);
1912 report_context(*VNI);
1913 errs() << " live into BB#" << MFI->getNumber()
1914 << '@' << LiveInts->getMBBStartIdx(&*MFI) << ", not live before "
1915 << PEnd << '\n';
1916 continue;
1917 }
1918
1919 // Only PHI-defs can take different predecessor values.
1920 if (!IsPHI && PVNI != VNI) {
40
Taking true branch
1921 report("Different value live out of predecessor", *PI);
1922 report_context(LR, Reg, LaneMask);
1923 errs() << "Valno #" << PVNI->id << " live out of BB#"
41
Access to field 'id' results in a dereference of a null pointer (loaded from variable 'PVNI')
1924 << (*PI)->getNumber() << '@' << PEnd << "\nValno #" << VNI->id
1925 << " live into BB#" << MFI->getNumber() << '@'
1926 << LiveInts->getMBBStartIdx(&*MFI) << '\n';
1927 }
1928 }
1929 if (&*MFI == EndMBB)
18
Assuming the condition is false
19
Taking false branch
24
Assuming the condition is false
25
Taking false branch
30
Assuming the condition is false
31
Taking false branch
1930 break;
1931 ++MFI;
1932 }
1933}
1934
1935void MachineVerifier::verifyLiveRange(const LiveRange &LR, unsigned Reg,
1936 LaneBitmask LaneMask) {
1937 for (const VNInfo *VNI : LR.valnos)
1938 verifyLiveRangeValue(LR, VNI, Reg, LaneMask);
1939
1940 for (LiveRange::const_iterator I = LR.begin(), E = LR.end(); I != E; ++I)
1941 verifyLiveRangeSegment(LR, I, Reg, LaneMask);
1942}
1943
1944void MachineVerifier::verifyLiveInterval(const LiveInterval &LI) {
1945 unsigned Reg = LI.reg;
1946 assert(TargetRegisterInfo::isVirtualRegister(Reg))((TargetRegisterInfo::isVirtualRegister(Reg)) ? static_cast<
void> (0) : __assert_fail ("TargetRegisterInfo::isVirtualRegister(Reg)"
, "/tmp/buildd/llvm-toolchain-snapshot-4.0~svn285991/lib/CodeGen/MachineVerifier.cpp"
, 1946, __PRETTY_FUNCTION__))
;
1947 verifyLiveRange(LI, Reg);
1948
1949 LaneBitmask Mask = 0;
1950 LaneBitmask MaxMask = MRI->getMaxLaneMaskForVReg(Reg);
1951 for (const LiveInterval::SubRange &SR : LI.subranges()) {
1952 if ((Mask & SR.LaneMask) != 0) {
1953 report("Lane masks of sub ranges overlap in live interval", MF);
1954 report_context(LI);
1955 }
1956 if ((SR.LaneMask & ~MaxMask) != 0) {
1957 report("Subrange lanemask is invalid", MF);
1958 report_context(LI);
1959 }
1960 if (SR.empty()) {
1961 report("Subrange must not be empty", MF);
1962 report_context(SR, LI.reg, SR.LaneMask);
1963 }
1964 Mask |= SR.LaneMask;
1965 verifyLiveRange(SR, LI.reg, SR.LaneMask);
1966 if (!LI.covers(SR)) {
1967 report("A Subrange is not covered by the main range", MF);
1968 report_context(LI);
1969 }
1970 }
1971
1972 // Check the LI only has one connected component.
1973 ConnectedVNInfoEqClasses ConEQ(*LiveInts);
1974 unsigned NumComp = ConEQ.Classify(LI);
1975 if (NumComp > 1) {
1976 report("Multiple connected components in live interval", MF);
1977 report_context(LI);
1978 for (unsigned comp = 0; comp != NumComp; ++comp) {
1979 errs() << comp << ": valnos";
1980 for (LiveInterval::const_vni_iterator I = LI.vni_begin(),
1981 E = LI.vni_end(); I!=E; ++I)
1982 if (comp == ConEQ.getEqClass(*I))
1983 errs() << ' ' << (*I)->id;
1984 errs() << '\n';
1985 }
1986 }
1987}
1988
1989namespace {
1990 // FrameSetup and FrameDestroy can have zero adjustment, so using a single
1991 // integer, we can't tell whether it is a FrameSetup or FrameDestroy if the
1992 // value is zero.
1993 // We use a bool plus an integer to capture the stack state.
1994 struct StackStateOfBB {
1995 StackStateOfBB() : EntryValue(0), ExitValue(0), EntryIsSetup(false),
1996 ExitIsSetup(false) { }
1997 StackStateOfBB(int EntryVal, int ExitVal, bool EntrySetup, bool ExitSetup) :
1998 EntryValue(EntryVal), ExitValue(ExitVal), EntryIsSetup(EntrySetup),
1999 ExitIsSetup(ExitSetup) { }
2000 // Can be negative, which means we are setting up a frame.
2001 int EntryValue;
2002 int ExitValue;
2003 bool EntryIsSetup;
2004 bool ExitIsSetup;
2005 };
2006}
2007
2008/// Make sure on every path through the CFG, a FrameSetup <n> is always followed
2009/// by a FrameDestroy <n>, stack adjustments are identical on all
2010/// CFG edges to a merge point, and frame is destroyed at end of a return block.
2011void MachineVerifier::verifyStackFrame() {
2012 unsigned FrameSetupOpcode = TII->getCallFrameSetupOpcode();
2013 unsigned FrameDestroyOpcode = TII->getCallFrameDestroyOpcode();
2014
2015 SmallVector<StackStateOfBB, 8> SPState;
2016 SPState.resize(MF->getNumBlockIDs());
2017 df_iterator_default_set<const MachineBasicBlock*> Reachable;
2018
2019 // Visit the MBBs in DFS order.
2020 for (df_ext_iterator<const MachineFunction*,
2021 df_iterator_default_set<const MachineBasicBlock*> >
2022 DFI = df_ext_begin(MF, Reachable), DFE = df_ext_end(MF, Reachable);
2023 DFI != DFE; ++DFI) {
2024 const MachineBasicBlock *MBB = *DFI;
2025
2026 StackStateOfBB BBState;
2027 // Check the exit state of the DFS stack predecessor.
2028 if (DFI.getPathLength() >= 2) {
2029 const MachineBasicBlock *StackPred = DFI.getPath(DFI.getPathLength() - 2);
2030 assert(Reachable.count(StackPred) &&((Reachable.count(StackPred) && "DFS stack predecessor is already visited.\n"
) ? static_cast<void> (0) : __assert_fail ("Reachable.count(StackPred) && \"DFS stack predecessor is already visited.\\n\""
, "/tmp/buildd/llvm-toolchain-snapshot-4.0~svn285991/lib/CodeGen/MachineVerifier.cpp"
, 2031, __PRETTY_FUNCTION__))
2031 "DFS stack predecessor is already visited.\n")((Reachable.count(StackPred) && "DFS stack predecessor is already visited.\n"
) ? static_cast<void> (0) : __assert_fail ("Reachable.count(StackPred) && \"DFS stack predecessor is already visited.\\n\""
, "/tmp/buildd/llvm-toolchain-snapshot-4.0~svn285991/lib/CodeGen/MachineVerifier.cpp"
, 2031, __PRETTY_FUNCTION__))
;
2032 BBState.EntryValue = SPState[StackPred->getNumber()].ExitValue;
2033 BBState.EntryIsSetup = SPState[StackPred->getNumber()].ExitIsSetup;
2034 BBState.ExitValue = BBState.EntryValue;
2035 BBState.ExitIsSetup = BBState.EntryIsSetup;
2036 }
2037
2038 // Update stack state by checking contents of MBB.
2039 for (const auto &I : *MBB) {
2040 if (I.getOpcode() == FrameSetupOpcode) {
2041 // The first operand of a FrameOpcode should be i32.
2042 int Size = I.getOperand(0).getImm();
2043 assert(Size >= 0 &&((Size >= 0 && "Value should be non-negative in FrameSetup and FrameDestroy.\n"
) ? static_cast<void> (0) : __assert_fail ("Size >= 0 && \"Value should be non-negative in FrameSetup and FrameDestroy.\\n\""
, "/tmp/buildd/llvm-toolchain-snapshot-4.0~svn285991/lib/CodeGen/MachineVerifier.cpp"
, 2044, __PRETTY_FUNCTION__))
2044 "Value should be non-negative in FrameSetup and FrameDestroy.\n")((Size >= 0 && "Value should be non-negative in FrameSetup and FrameDestroy.\n"
) ? static_cast<void> (0) : __assert_fail ("Size >= 0 && \"Value should be non-negative in FrameSetup and FrameDestroy.\\n\""
, "/tmp/buildd/llvm-toolchain-snapshot-4.0~svn285991/lib/CodeGen/MachineVerifier.cpp"
, 2044, __PRETTY_FUNCTION__))
;
2045
2046 if (BBState.ExitIsSetup)
2047 report("FrameSetup is after another FrameSetup", &I);
2048 BBState.ExitValue -= Size;
2049 BBState.ExitIsSetup = true;
2050 }
2051
2052 if (I.getOpcode() == FrameDestroyOpcode) {
2053 // The first operand of a FrameOpcode should be i32.
2054 int Size = I.getOperand(0).getImm();
2055 assert(Size >= 0 &&((Size >= 0 && "Value should be non-negative in FrameSetup and FrameDestroy.\n"
) ? static_cast<void> (0) : __assert_fail ("Size >= 0 && \"Value should be non-negative in FrameSetup and FrameDestroy.\\n\""
, "/tmp/buildd/llvm-toolchain-snapshot-4.0~svn285991/lib/CodeGen/MachineVerifier.cpp"
, 2056, __PRETTY_FUNCTION__))
2056 "Value should be non-negative in FrameSetup and FrameDestroy.\n")((Size >= 0 && "Value should be non-negative in FrameSetup and FrameDestroy.\n"
) ? static_cast<void> (0) : __assert_fail ("Size >= 0 && \"Value should be non-negative in FrameSetup and FrameDestroy.\\n\""
, "/tmp/buildd/llvm-toolchain-snapshot-4.0~svn285991/lib/CodeGen/MachineVerifier.cpp"
, 2056, __PRETTY_FUNCTION__))
;
2057
2058 if (!BBState.ExitIsSetup)
2059 report("FrameDestroy is not after a FrameSetup", &I);
2060 int AbsSPAdj = BBState.ExitValue < 0 ? -BBState.ExitValue :
2061 BBState.ExitValue;
2062 if (BBState.ExitIsSetup && AbsSPAdj != Size) {
2063 report("FrameDestroy <n> is after FrameSetup <m>", &I);
2064 errs() << "FrameDestroy <" << Size << "> is after FrameSetup <"
2065 << AbsSPAdj << ">.\n";
2066 }
2067 BBState.ExitValue += Size;
2068 BBState.ExitIsSetup = false;
2069 }
2070 }
2071 SPState[MBB->getNumber()] = BBState;
2072
2073 // Make sure the exit state of any predecessor is consistent with the entry
2074 // state.
2075 for (MachineBasicBlock::const_pred_iterator I = MBB->pred_begin(),
2076 E = MBB->pred_end(); I != E; ++I) {
2077 if (Reachable.count(*I) &&
2078 (SPState[(*I)->getNumber()].ExitValue != BBState.EntryValue ||
2079 SPState[(*I)->getNumber()].ExitIsSetup != BBState.EntryIsSetup)) {
2080 report("The exit stack state of a predecessor is inconsistent.", MBB);
2081 errs() << "Predecessor BB#" << (*I)->getNumber() << " has exit state ("
2082 << SPState[(*I)->getNumber()].ExitValue << ", "
2083 << SPState[(*I)->getNumber()].ExitIsSetup
2084 << "), while BB#" << MBB->getNumber() << " has entry state ("
2085 << BBState.EntryValue << ", " << BBState.EntryIsSetup << ").\n";
2086 }
2087 }
2088
2089 // Make sure the entry state of any successor is consistent with the exit
2090 // state.
2091 for (MachineBasicBlock::const_succ_iterator I = MBB->succ_begin(),
2092 E = MBB->succ_end(); I != E; ++I) {
2093 if (Reachable.count(*I) &&
2094 (SPState[(*I)->getNumber()].EntryValue != BBState.ExitValue ||
2095 SPState[(*I)->getNumber()].EntryIsSetup != BBState.ExitIsSetup)) {
2096 report("The entry stack state of a successor is inconsistent.", MBB);
2097 errs() << "Successor BB#" << (*I)->getNumber() << " has entry state ("
2098 << SPState[(*I)->getNumber()].EntryValue << ", "
2099 << SPState[(*I)->getNumber()].EntryIsSetup
2100 << "), while BB#" << MBB->getNumber() << " has exit state ("
2101 << BBState.ExitValue << ", " << BBState.ExitIsSetup << ").\n";
2102 }
2103 }
2104
2105 // Make sure a basic block with return ends with zero stack adjustment.
2106 if (!MBB->empty() && MBB->back().isReturn()) {
2107 if (BBState.ExitIsSetup)
2108 report("A return block ends with a FrameSetup.", MBB);
2109 if (BBState.ExitValue)
2110 report("A return block ends with a nonzero stack adjustment.", MBB);
2111 }
2112 }
2113}