Bug Summary

File:lib/CodeGen/MachineVerifier.cpp
Warning:line 1940, column 32
Access to field 'id' results in a dereference of a null pointer (loaded from variable 'PVNI')

Annotated Source Code

1//===-- MachineVerifier.cpp - Machine Code Verifier -----------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// Pass to verify generated machine code. The following is checked:
11//
12// Operand counts: All explicit operands must be present.
13//
14// Register classes: All physical and virtual register operands must be
15// compatible with the register class required by the instruction descriptor.
16//
17// Register live intervals: Registers must be defined only once, and must be
18// defined before use.
19//
20// The machine code verifier is enabled from LLVMTargetMachine.cpp with the
21// command-line option -verify-machineinstrs, or by defining the environment
22// variable LLVM_VERIFY_MACHINEINSTRS to the name of a file that will receive
23// the verifier errors.
24//===----------------------------------------------------------------------===//
25
26#include "llvm/CodeGen/Passes.h"
27#include "llvm/ADT/DenseSet.h"
28#include "llvm/ADT/DepthFirstIterator.h"
29#include "llvm/ADT/SetOperations.h"
30#include "llvm/ADT/SmallVector.h"
31#include "llvm/Analysis/EHPersonalities.h"
32#include "llvm/CodeGen/LiveIntervalAnalysis.h"
33#include "llvm/CodeGen/LiveStackAnalysis.h"
34#include "llvm/CodeGen/LiveVariables.h"
35#include "llvm/CodeGen/MachineFrameInfo.h"
36#include "llvm/CodeGen/MachineFunctionPass.h"
37#include "llvm/CodeGen/MachineMemOperand.h"
38#include "llvm/CodeGen/MachineRegisterInfo.h"
39#include "llvm/IR/BasicBlock.h"
40#include "llvm/IR/InlineAsm.h"
41#include "llvm/IR/Instructions.h"
42#include "llvm/MC/MCAsmInfo.h"
43#include "llvm/Support/Debug.h"
44#include "llvm/Support/ErrorHandling.h"
45#include "llvm/Support/FileSystem.h"
46#include "llvm/Support/raw_ostream.h"
47#include "llvm/Target/TargetInstrInfo.h"
48#include "llvm/Target/TargetMachine.h"
49#include "llvm/Target/TargetRegisterInfo.h"
50#include "llvm/Target/TargetSubtargetInfo.h"
51using namespace llvm;
52
53namespace {
54 struct MachineVerifier {
55
56 MachineVerifier(Pass *pass, const char *b) :
57 PASS(pass),
58 Banner(b)
59 {}
60
61 unsigned verify(MachineFunction &MF);
62
63 Pass *const PASS;
64 const char *Banner;
65 const MachineFunction *MF;
66 const TargetMachine *TM;
67 const TargetInstrInfo *TII;
68 const TargetRegisterInfo *TRI;
69 const MachineRegisterInfo *MRI;
70
71 unsigned foundErrors;
72
73 // Avoid querying the MachineFunctionProperties for each operand.
74 bool isFunctionRegBankSelected;
75 bool isFunctionSelected;
76
77 typedef SmallVector<unsigned, 16> RegVector;
78 typedef SmallVector<const uint32_t*, 4> RegMaskVector;
79 typedef DenseSet<unsigned> RegSet;
80 typedef DenseMap<unsigned, const MachineInstr*> RegMap;
81 typedef SmallPtrSet<const MachineBasicBlock*, 8> BlockSet;
82
83 const MachineInstr *FirstTerminator;
84 BlockSet FunctionBlocks;
85
86 BitVector regsReserved;
87 RegSet regsLive;
88 RegVector regsDefined, regsDead, regsKilled;
89 RegMaskVector regMasks;
90 RegSet regsLiveInButUnused;
91
92 SlotIndex lastIndex;
93
94 // Add Reg and any sub-registers to RV
95 void addRegWithSubRegs(RegVector &RV, unsigned Reg) {
96 RV.push_back(Reg);
97 if (TargetRegisterInfo::isPhysicalRegister(Reg))
98 for (MCSubRegIterator SubRegs(Reg, TRI); SubRegs.isValid(); ++SubRegs)
99 RV.push_back(*SubRegs);
100 }
101
102 struct BBInfo {
103 // Is this MBB reachable from the MF entry point?
104 bool reachable;
105
106 // Vregs that must be live in because they are used without being
107 // defined. Map value is the user.
108 RegMap vregsLiveIn;
109
110 // Regs killed in MBB. They may be defined again, and will then be in both
111 // regsKilled and regsLiveOut.
112 RegSet regsKilled;
113
114 // Regs defined in MBB and live out. Note that vregs passing through may
115 // be live out without being mentioned here.
116 RegSet regsLiveOut;
117
118 // Vregs that pass through MBB untouched. This set is disjoint from
119 // regsKilled and regsLiveOut.
120 RegSet vregsPassed;
121
122 // Vregs that must pass through MBB because they are needed by a successor
123 // block. This set is disjoint from regsLiveOut.
124 RegSet vregsRequired;
125
126 // Set versions of block's predecessor and successor lists.
127 BlockSet Preds, Succs;
128
129 BBInfo() : reachable(false) {}
130
131 // Add register to vregsPassed if it belongs there. Return true if
132 // anything changed.
133 bool addPassed(unsigned Reg) {
134 if (!TargetRegisterInfo::isVirtualRegister(Reg))
135 return false;
136 if (regsKilled.count(Reg) || regsLiveOut.count(Reg))
137 return false;
138 return vregsPassed.insert(Reg).second;
139 }
140
141 // Same for a full set.
142 bool addPassed(const RegSet &RS) {
143 bool changed = false;
144 for (RegSet::const_iterator I = RS.begin(), E = RS.end(); I != E; ++I)
145 if (addPassed(*I))
146 changed = true;
147 return changed;
148 }
149
150 // Add register to vregsRequired if it belongs there. Return true if
151 // anything changed.
152 bool addRequired(unsigned Reg) {
153 if (!TargetRegisterInfo::isVirtualRegister(Reg))
154 return false;
155 if (regsLiveOut.count(Reg))
156 return false;
157 return vregsRequired.insert(Reg).second;
158 }
159
160 // Same for a full set.
161 bool addRequired(const RegSet &RS) {
162 bool changed = false;
163 for (RegSet::const_iterator I = RS.begin(), E = RS.end(); I != E; ++I)
164 if (addRequired(*I))
165 changed = true;
166 return changed;
167 }
168
169 // Same for a full map.
170 bool addRequired(const RegMap &RM) {
171 bool changed = false;
172 for (RegMap::const_iterator I = RM.begin(), E = RM.end(); I != E; ++I)
173 if (addRequired(I->first))
174 changed = true;
175 return changed;
176 }
177
178 // Live-out registers are either in regsLiveOut or vregsPassed.
179 bool isLiveOut(unsigned Reg) const {
180 return regsLiveOut.count(Reg) || vregsPassed.count(Reg);
181 }
182 };
183
184 // Extra register info per MBB.
185 DenseMap<const MachineBasicBlock*, BBInfo> MBBInfoMap;
186
187 bool isReserved(unsigned Reg) {
188 return Reg < regsReserved.size() && regsReserved.test(Reg);
189 }
190
191 bool isAllocatable(unsigned Reg) {
192 return Reg < TRI->getNumRegs() && MRI->isAllocatable(Reg);
193 }
194
195 // Analysis information if available
196 LiveVariables *LiveVars;
197 LiveIntervals *LiveInts;
198 LiveStacks *LiveStks;
199 SlotIndexes *Indexes;
200
201 void visitMachineFunctionBefore();
202 void visitMachineBasicBlockBefore(const MachineBasicBlock *MBB);
203 void visitMachineBundleBefore(const MachineInstr *MI);
204 void visitMachineInstrBefore(const MachineInstr *MI);
205 void visitMachineOperand(const MachineOperand *MO, unsigned MONum);
206 void visitMachineInstrAfter(const MachineInstr *MI);
207 void visitMachineBundleAfter(const MachineInstr *MI);
208 void visitMachineBasicBlockAfter(const MachineBasicBlock *MBB);
209 void visitMachineFunctionAfter();
210
211 void report(const char *msg, const MachineFunction *MF);
212 void report(const char *msg, const MachineBasicBlock *MBB);
213 void report(const char *msg, const MachineInstr *MI);
214 void report(const char *msg, const MachineOperand *MO, unsigned MONum);
215
216 void report_context(const LiveInterval &LI) const;
217 void report_context(const LiveRange &LR, unsigned VRegUnit,
218 LaneBitmask LaneMask) const;
219 void report_context(const LiveRange::Segment &S) const;
220 void report_context(const VNInfo &VNI) const;
221 void report_context(SlotIndex Pos) const;
222 void report_context_liverange(const LiveRange &LR) const;
223 void report_context_lanemask(LaneBitmask LaneMask) const;
224 void report_context_vreg(unsigned VReg) const;
225 void report_context_vreg_regunit(unsigned VRegOrRegUnit) const;
226
227 void verifyInlineAsm(const MachineInstr *MI);
228
229 void checkLiveness(const MachineOperand *MO, unsigned MONum);
230 void checkLivenessAtUse(const MachineOperand *MO, unsigned MONum,
231 SlotIndex UseIdx, const LiveRange &LR, unsigned Reg,
232 LaneBitmask LaneMask = LaneBitmask::getNone());
233 void checkLivenessAtDef(const MachineOperand *MO, unsigned MONum,
234 SlotIndex DefIdx, const LiveRange &LR, unsigned Reg,
235 LaneBitmask LaneMask = LaneBitmask::getNone());
236
237 void markReachable(const MachineBasicBlock *MBB);
238 void calcRegsPassed();
239 void checkPHIOps(const MachineBasicBlock *MBB);
240
241 void calcRegsRequired();
242 void verifyLiveVariables();
243 void verifyLiveIntervals();
244 void verifyLiveInterval(const LiveInterval&);
245 void verifyLiveRangeValue(const LiveRange&, const VNInfo*, unsigned,
246 LaneBitmask);
247 void verifyLiveRangeSegment(const LiveRange&,
248 const LiveRange::const_iterator I, unsigned,
249 LaneBitmask);
250 void verifyLiveRange(const LiveRange&, unsigned,
251 LaneBitmask LaneMask = LaneBitmask::getNone());
252
253 void verifyStackFrame();
254
255 void verifySlotIndexes() const;
256 void verifyProperties(const MachineFunction &MF);
257 };
258
259 struct MachineVerifierPass : public MachineFunctionPass {
260 static char ID; // Pass ID, replacement for typeid
261 const std::string Banner;
262
263 MachineVerifierPass(const std::string &banner = nullptr)
264 : MachineFunctionPass(ID), Banner(banner) {
265 initializeMachineVerifierPassPass(*PassRegistry::getPassRegistry());
266 }
267
268 void getAnalysisUsage(AnalysisUsage &AU) const override {
269 AU.setPreservesAll();
270 MachineFunctionPass::getAnalysisUsage(AU);
271 }
272
273 bool runOnMachineFunction(MachineFunction &MF) override {
274 unsigned FoundErrors = MachineVerifier(this, Banner.c_str()).verify(MF);
275 if (FoundErrors)
276 report_fatal_error("Found "+Twine(FoundErrors)+" machine code errors.");
277 return false;
278 }
279 };
280
281}
282
283char MachineVerifierPass::ID = 0;
284INITIALIZE_PASS(MachineVerifierPass, "machineverifier",static void *initializeMachineVerifierPassPassOnce(PassRegistry
&Registry) { PassInfo *PI = new PassInfo( "Verify generated machine code"
, "machineverifier", &MachineVerifierPass::ID, PassInfo::
NormalCtor_t(callDefaultCtor<MachineVerifierPass>), false
, false); Registry.registerPass(*PI, true); return PI; } static
llvm::once_flag InitializeMachineVerifierPassPassFlag; void llvm
::initializeMachineVerifierPassPass(PassRegistry &Registry
) { llvm::call_once(InitializeMachineVerifierPassPassFlag, initializeMachineVerifierPassPassOnce
, std::ref(Registry)); }
285 "Verify generated machine code", false, false)static void *initializeMachineVerifierPassPassOnce(PassRegistry
&Registry) { PassInfo *PI = new PassInfo( "Verify generated machine code"
, "machineverifier", &MachineVerifierPass::ID, PassInfo::
NormalCtor_t(callDefaultCtor<MachineVerifierPass>), false
, false); Registry.registerPass(*PI, true); return PI; } static
llvm::once_flag InitializeMachineVerifierPassPassFlag; void llvm
::initializeMachineVerifierPassPass(PassRegistry &Registry
) { llvm::call_once(InitializeMachineVerifierPassPassFlag, initializeMachineVerifierPassPassOnce
, std::ref(Registry)); }
286
287FunctionPass *llvm::createMachineVerifierPass(const std::string &Banner) {
288 return new MachineVerifierPass(Banner);
289}
290
291bool MachineFunction::verify(Pass *p, const char *Banner, bool AbortOnErrors)
292 const {
293 MachineFunction &MF = const_cast<MachineFunction&>(*this);
294 unsigned FoundErrors = MachineVerifier(p, Banner).verify(MF);
295 if (AbortOnErrors && FoundErrors)
296 report_fatal_error("Found "+Twine(FoundErrors)+" machine code errors.");
297 return FoundErrors == 0;
298}
299
300void MachineVerifier::verifySlotIndexes() const {
301 if (Indexes == nullptr)
302 return;
303
304 // Ensure the IdxMBB list is sorted by slot indexes.
305 SlotIndex Last;
306 for (SlotIndexes::MBBIndexIterator I = Indexes->MBBIndexBegin(),
307 E = Indexes->MBBIndexEnd(); I != E; ++I) {
308 assert(!Last.isValid() || I->first > Last)((!Last.isValid() || I->first > Last) ? static_cast<
void> (0) : __assert_fail ("!Last.isValid() || I->first > Last"
, "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn298304/lib/CodeGen/MachineVerifier.cpp"
, 308, __PRETTY_FUNCTION__))
;
309 Last = I->first;
310 }
311}
312
313void MachineVerifier::verifyProperties(const MachineFunction &MF) {
314 // If a pass has introduced virtual registers without clearing the
315 // NoVRegs property (or set it without allocating the vregs)
316 // then report an error.
317 if (MF.getProperties().hasProperty(
318 MachineFunctionProperties::Property::NoVRegs) &&
319 MRI->getNumVirtRegs())
320 report("Function has NoVRegs property but there are VReg operands", &MF);
321}
322
323unsigned MachineVerifier::verify(MachineFunction &MF) {
324 foundErrors = 0;
325
326 this->MF = &MF;
327 TM = &MF.getTarget();
328 TII = MF.getSubtarget().getInstrInfo();
329 TRI = MF.getSubtarget().getRegisterInfo();
330 MRI = &MF.getRegInfo();
331
332 isFunctionRegBankSelected = MF.getProperties().hasProperty(
333 MachineFunctionProperties::Property::RegBankSelected);
334 isFunctionSelected = MF.getProperties().hasProperty(
335 MachineFunctionProperties::Property::Selected);
336
337 LiveVars = nullptr;
338 LiveInts = nullptr;
339 LiveStks = nullptr;
340 Indexes = nullptr;
341 if (PASS) {
342 LiveInts = PASS->getAnalysisIfAvailable<LiveIntervals>();
343 // We don't want to verify LiveVariables if LiveIntervals is available.
344 if (!LiveInts)
345 LiveVars = PASS->getAnalysisIfAvailable<LiveVariables>();
346 LiveStks = PASS->getAnalysisIfAvailable<LiveStacks>();
347 Indexes = PASS->getAnalysisIfAvailable<SlotIndexes>();
348 }
349
350 verifySlotIndexes();
351
352 verifyProperties(MF);
353
354 visitMachineFunctionBefore();
355 for (MachineFunction::const_iterator MFI = MF.begin(), MFE = MF.end();
356 MFI!=MFE; ++MFI) {
357 visitMachineBasicBlockBefore(&*MFI);
358 // Keep track of the current bundle header.
359 const MachineInstr *CurBundle = nullptr;
360 // Do we expect the next instruction to be part of the same bundle?
361 bool InBundle = false;
362
363 for (MachineBasicBlock::const_instr_iterator MBBI = MFI->instr_begin(),
364 MBBE = MFI->instr_end(); MBBI != MBBE; ++MBBI) {
365 if (MBBI->getParent() != &*MFI) {
366 report("Bad instruction parent pointer", &*MFI);
367 errs() << "Instruction: " << *MBBI;
368 continue;
369 }
370
371 // Check for consistent bundle flags.
372 if (InBundle && !MBBI->isBundledWithPred())
373 report("Missing BundledPred flag, "
374 "BundledSucc was set on predecessor",
375 &*MBBI);
376 if (!InBundle && MBBI->isBundledWithPred())
377 report("BundledPred flag is set, "
378 "but BundledSucc not set on predecessor",
379 &*MBBI);
380
381 // Is this a bundle header?
382 if (!MBBI->isInsideBundle()) {
383 if (CurBundle)
384 visitMachineBundleAfter(CurBundle);
385 CurBundle = &*MBBI;
386 visitMachineBundleBefore(CurBundle);
387 } else if (!CurBundle)
388 report("No bundle header", &*MBBI);
389 visitMachineInstrBefore(&*MBBI);
390 for (unsigned I = 0, E = MBBI->getNumOperands(); I != E; ++I) {
391 const MachineInstr &MI = *MBBI;
392 const MachineOperand &Op = MI.getOperand(I);
393 if (Op.getParent() != &MI) {
394 // Make sure to use correct addOperand / RemoveOperand / ChangeTo
395 // functions when replacing operands of a MachineInstr.
396 report("Instruction has operand with wrong parent set", &MI);
397 }
398
399 visitMachineOperand(&Op, I);
400 }
401
402 visitMachineInstrAfter(&*MBBI);
403
404 // Was this the last bundled instruction?
405 InBundle = MBBI->isBundledWithSucc();
406 }
407 if (CurBundle)
408 visitMachineBundleAfter(CurBundle);
409 if (InBundle)
410 report("BundledSucc flag set on last instruction in block", &MFI->back());
411 visitMachineBasicBlockAfter(&*MFI);
412 }
413 visitMachineFunctionAfter();
414
415 // Clean up.
416 regsLive.clear();
417 regsDefined.clear();
418 regsDead.clear();
419 regsKilled.clear();
420 regMasks.clear();
421 regsLiveInButUnused.clear();
422 MBBInfoMap.clear();
423
424 return foundErrors;
425}
426
427void MachineVerifier::report(const char *msg, const MachineFunction *MF) {
428 assert(MF)((MF) ? static_cast<void> (0) : __assert_fail ("MF", "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn298304/lib/CodeGen/MachineVerifier.cpp"
, 428, __PRETTY_FUNCTION__))
;
429 errs() << '\n';
430 if (!foundErrors++) {
431 if (Banner)
432 errs() << "# " << Banner << '\n';
433 if (LiveInts != nullptr)
434 LiveInts->print(errs());
435 else
436 MF->print(errs(), Indexes);
437 }
438 errs() << "*** Bad machine code: " << msg << " ***\n"
439 << "- function: " << MF->getName() << "\n";
440}
441
442void MachineVerifier::report(const char *msg, const MachineBasicBlock *MBB) {
443 assert(MBB)((MBB) ? static_cast<void> (0) : __assert_fail ("MBB", "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn298304/lib/CodeGen/MachineVerifier.cpp"
, 443, __PRETTY_FUNCTION__))
;
444 report(msg, MBB->getParent());
445 errs() << "- basic block: BB#" << MBB->getNumber()
446 << ' ' << MBB->getName()
447 << " (" << (const void*)MBB << ')';
448 if (Indexes)
449 errs() << " [" << Indexes->getMBBStartIdx(MBB)
450 << ';' << Indexes->getMBBEndIdx(MBB) << ')';
451 errs() << '\n';
452}
453
454void MachineVerifier::report(const char *msg, const MachineInstr *MI) {
455 assert(MI)((MI) ? static_cast<void> (0) : __assert_fail ("MI", "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn298304/lib/CodeGen/MachineVerifier.cpp"
, 455, __PRETTY_FUNCTION__))
;
456 report(msg, MI->getParent());
457 errs() << "- instruction: ";
458 if (Indexes && Indexes->hasIndex(*MI))
459 errs() << Indexes->getInstructionIndex(*MI) << '\t';
460 MI->print(errs(), /*SkipOpers=*/true);
461 errs() << '\n';
462}
463
464void MachineVerifier::report(const char *msg,
465 const MachineOperand *MO, unsigned MONum) {
466 assert(MO)((MO) ? static_cast<void> (0) : __assert_fail ("MO", "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn298304/lib/CodeGen/MachineVerifier.cpp"
, 466, __PRETTY_FUNCTION__))
;
467 report(msg, MO->getParent());
468 errs() << "- operand " << MONum << ": ";
469 MO->print(errs(), TRI);
470 errs() << "\n";
471}
472
473void MachineVerifier::report_context(SlotIndex Pos) const {
474 errs() << "- at: " << Pos << '\n';
475}
476
477void MachineVerifier::report_context(const LiveInterval &LI) const {
478 errs() << "- interval: " << LI << '\n';
479}
480
481void MachineVerifier::report_context(const LiveRange &LR, unsigned VRegUnit,
482 LaneBitmask LaneMask) const {
483 report_context_liverange(LR);
484 report_context_vreg_regunit(VRegUnit);
485 if (LaneMask.any())
486 report_context_lanemask(LaneMask);
487}
488
489void MachineVerifier::report_context(const LiveRange::Segment &S) const {
490 errs() << "- segment: " << S << '\n';
491}
492
493void MachineVerifier::report_context(const VNInfo &VNI) const {
494 errs() << "- ValNo: " << VNI.id << " (def " << VNI.def << ")\n";
495}
496
497void MachineVerifier::report_context_liverange(const LiveRange &LR) const {
498 errs() << "- liverange: " << LR << '\n';
499}
500
501void MachineVerifier::report_context_vreg(unsigned VReg) const {
502 errs() << "- v. register: " << PrintReg(VReg, TRI) << '\n';
503}
504
505void MachineVerifier::report_context_vreg_regunit(unsigned VRegOrUnit) const {
506 if (TargetRegisterInfo::isVirtualRegister(VRegOrUnit)) {
507 report_context_vreg(VRegOrUnit);
508 } else {
509 errs() << "- regunit: " << PrintRegUnit(VRegOrUnit, TRI) << '\n';
510 }
511}
512
513void MachineVerifier::report_context_lanemask(LaneBitmask LaneMask) const {
514 errs() << "- lanemask: " << PrintLaneMask(LaneMask) << '\n';
515}
516
517void MachineVerifier::markReachable(const MachineBasicBlock *MBB) {
518 BBInfo &MInfo = MBBInfoMap[MBB];
519 if (!MInfo.reachable) {
520 MInfo.reachable = true;
521 for (MachineBasicBlock::const_succ_iterator SuI = MBB->succ_begin(),
522 SuE = MBB->succ_end(); SuI != SuE; ++SuI)
523 markReachable(*SuI);
524 }
525}
526
527void MachineVerifier::visitMachineFunctionBefore() {
528 lastIndex = SlotIndex();
529 regsReserved = MRI->getReservedRegs();
530
531 markReachable(&MF->front());
532
533 // Build a set of the basic blocks in the function.
534 FunctionBlocks.clear();
535 for (const auto &MBB : *MF) {
536 FunctionBlocks.insert(&MBB);
537 BBInfo &MInfo = MBBInfoMap[&MBB];
538
539 MInfo.Preds.insert(MBB.pred_begin(), MBB.pred_end());
540 if (MInfo.Preds.size() != MBB.pred_size())
541 report("MBB has duplicate entries in its predecessor list.", &MBB);
542
543 MInfo.Succs.insert(MBB.succ_begin(), MBB.succ_end());
544 if (MInfo.Succs.size() != MBB.succ_size())
545 report("MBB has duplicate entries in its successor list.", &MBB);
546 }
547
548 // Check that the register use lists are sane.
549 MRI->verifyUseLists();
550
551 verifyStackFrame();
552}
553
554// Does iterator point to a and b as the first two elements?
555static bool matchPair(MachineBasicBlock::const_succ_iterator i,
556 const MachineBasicBlock *a, const MachineBasicBlock *b) {
557 if (*i == a)
558 return *++i == b;
559 if (*i == b)
560 return *++i == a;
561 return false;
562}
563
564void
565MachineVerifier::visitMachineBasicBlockBefore(const MachineBasicBlock *MBB) {
566 FirstTerminator = nullptr;
567
568 if (!MF->getProperties().hasProperty(
569 MachineFunctionProperties::Property::NoPHIs) && MRI->tracksLiveness()) {
570 // If this block has allocatable physical registers live-in, check that
571 // it is an entry block or landing pad.
572 for (const auto &LI : MBB->liveins()) {
573 if (isAllocatable(LI.PhysReg) && !MBB->isEHPad() &&
574 MBB->getIterator() != MBB->getParent()->begin()) {
575 report("MBB has allocatable live-in, but isn't entry or landing-pad.", MBB);
576 }
577 }
578 }
579
580 // Count the number of landing pad successors.
581 SmallPtrSet<MachineBasicBlock*, 4> LandingPadSuccs;
582 for (MachineBasicBlock::const_succ_iterator I = MBB->succ_begin(),
583 E = MBB->succ_end(); I != E; ++I) {
584 if ((*I)->isEHPad())
585 LandingPadSuccs.insert(*I);
586 if (!FunctionBlocks.count(*I))
587 report("MBB has successor that isn't part of the function.", MBB);
588 if (!MBBInfoMap[*I].Preds.count(MBB)) {
589 report("Inconsistent CFG", MBB);
590 errs() << "MBB is not in the predecessor list of the successor BB#"
591 << (*I)->getNumber() << ".\n";
592 }
593 }
594
595 // Check the predecessor list.
596 for (MachineBasicBlock::const_pred_iterator I = MBB->pred_begin(),
597 E = MBB->pred_end(); I != E; ++I) {
598 if (!FunctionBlocks.count(*I))
599 report("MBB has predecessor that isn't part of the function.", MBB);
600 if (!MBBInfoMap[*I].Succs.count(MBB)) {
601 report("Inconsistent CFG", MBB);
602 errs() << "MBB is not in the successor list of the predecessor BB#"
603 << (*I)->getNumber() << ".\n";
604 }
605 }
606
607 const MCAsmInfo *AsmInfo = TM->getMCAsmInfo();
608 const BasicBlock *BB = MBB->getBasicBlock();
609 const Function *Fn = MF->getFunction();
610 if (LandingPadSuccs.size() > 1 &&
611 !(AsmInfo &&
612 AsmInfo->getExceptionHandlingType() == ExceptionHandling::SjLj &&
613 BB && isa<SwitchInst>(BB->getTerminator())) &&
614 !isFuncletEHPersonality(classifyEHPersonality(Fn->getPersonalityFn())))
615 report("MBB has more than one landing pad successor", MBB);
616
617 // Call AnalyzeBranch. If it succeeds, there several more conditions to check.
618 MachineBasicBlock *TBB = nullptr, *FBB = nullptr;
619 SmallVector<MachineOperand, 4> Cond;
620 if (!TII->analyzeBranch(*const_cast<MachineBasicBlock *>(MBB), TBB, FBB,
621 Cond)) {
622 // Ok, AnalyzeBranch thinks it knows what's going on with this block. Let's
623 // check whether its answers match up with reality.
624 if (!TBB && !FBB) {
625 // Block falls through to its successor.
626 MachineFunction::const_iterator MBBI = MBB->getIterator();
627 ++MBBI;
628 if (MBBI == MF->end()) {
629 // It's possible that the block legitimately ends with a noreturn
630 // call or an unreachable, in which case it won't actually fall
631 // out the bottom of the function.
632 } else if (MBB->succ_size() == LandingPadSuccs.size()) {
633 // It's possible that the block legitimately ends with a noreturn
634 // call or an unreachable, in which case it won't actuall fall
635 // out of the block.
636 } else if (MBB->succ_size() != 1+LandingPadSuccs.size()) {
637 report("MBB exits via unconditional fall-through but doesn't have "
638 "exactly one CFG successor!", MBB);
639 } else if (!MBB->isSuccessor(&*MBBI)) {
640 report("MBB exits via unconditional fall-through but its successor "
641 "differs from its CFG successor!", MBB);
642 }
643 if (!MBB->empty() && MBB->back().isBarrier() &&
644 !TII->isPredicated(MBB->back())) {
645 report("MBB exits via unconditional fall-through but ends with a "
646 "barrier instruction!", MBB);
647 }
648 if (!Cond.empty()) {
649 report("MBB exits via unconditional fall-through but has a condition!",
650 MBB);
651 }
652 } else if (TBB && !FBB && Cond.empty()) {
653 // Block unconditionally branches somewhere.
654 // If the block has exactly one successor, that happens to be a
655 // landingpad, accept it as valid control flow.
656 if (MBB->succ_size() != 1+LandingPadSuccs.size() &&
657 (MBB->succ_size() != 1 || LandingPadSuccs.size() != 1 ||
658 *MBB->succ_begin() != *LandingPadSuccs.begin())) {
659 report("MBB exits via unconditional branch but doesn't have "
660 "exactly one CFG successor!", MBB);
661 } else if (!MBB->isSuccessor(TBB)) {
662 report("MBB exits via unconditional branch but the CFG "
663 "successor doesn't match the actual successor!", MBB);
664 }
665 if (MBB->empty()) {
666 report("MBB exits via unconditional branch but doesn't contain "
667 "any instructions!", MBB);
668 } else if (!MBB->back().isBarrier()) {
669 report("MBB exits via unconditional branch but doesn't end with a "
670 "barrier instruction!", MBB);
671 } else if (!MBB->back().isTerminator()) {
672 report("MBB exits via unconditional branch but the branch isn't a "
673 "terminator instruction!", MBB);
674 }
675 } else if (TBB && !FBB && !Cond.empty()) {
676 // Block conditionally branches somewhere, otherwise falls through.
677 MachineFunction::const_iterator MBBI = MBB->getIterator();
678 ++MBBI;
679 if (MBBI == MF->end()) {
680 report("MBB conditionally falls through out of function!", MBB);
681 } else if (MBB->succ_size() == 1) {
682 // A conditional branch with only one successor is weird, but allowed.
683 if (&*MBBI != TBB)
684 report("MBB exits via conditional branch/fall-through but only has "
685 "one CFG successor!", MBB);
686 else if (TBB != *MBB->succ_begin())
687 report("MBB exits via conditional branch/fall-through but the CFG "
688 "successor don't match the actual successor!", MBB);
689 } else if (MBB->succ_size() != 2) {
690 report("MBB exits via conditional branch/fall-through but doesn't have "
691 "exactly two CFG successors!", MBB);
692 } else if (!matchPair(MBB->succ_begin(), TBB, &*MBBI)) {
693 report("MBB exits via conditional branch/fall-through but the CFG "
694 "successors don't match the actual successors!", MBB);
695 }
696 if (MBB->empty()) {
697 report("MBB exits via conditional branch/fall-through but doesn't "
698 "contain any instructions!", MBB);
699 } else if (MBB->back().isBarrier()) {
700 report("MBB exits via conditional branch/fall-through but ends with a "
701 "barrier instruction!", MBB);
702 } else if (!MBB->back().isTerminator()) {
703 report("MBB exits via conditional branch/fall-through but the branch "
704 "isn't a terminator instruction!", MBB);
705 }
706 } else if (TBB && FBB) {
707 // Block conditionally branches somewhere, otherwise branches
708 // somewhere else.
709 if (MBB->succ_size() == 1) {
710 // A conditional branch with only one successor is weird, but allowed.
711 if (FBB != TBB)
712 report("MBB exits via conditional branch/branch through but only has "
713 "one CFG successor!", MBB);
714 else if (TBB != *MBB->succ_begin())
715 report("MBB exits via conditional branch/branch through but the CFG "
716 "successor don't match the actual successor!", MBB);
717 } else if (MBB->succ_size() != 2) {
718 report("MBB exits via conditional branch/branch but doesn't have "
719 "exactly two CFG successors!", MBB);
720 } else if (!matchPair(MBB->succ_begin(), TBB, FBB)) {
721 report("MBB exits via conditional branch/branch but the CFG "
722 "successors don't match the actual successors!", MBB);
723 }
724 if (MBB->empty()) {
725 report("MBB exits via conditional branch/branch but doesn't "
726 "contain any instructions!", MBB);
727 } else if (!MBB->back().isBarrier()) {
728 report("MBB exits via conditional branch/branch but doesn't end with a "
729 "barrier instruction!", MBB);
730 } else if (!MBB->back().isTerminator()) {
731 report("MBB exits via conditional branch/branch but the branch "
732 "isn't a terminator instruction!", MBB);
733 }
734 if (Cond.empty()) {
735 report("MBB exits via conditinal branch/branch but there's no "
736 "condition!", MBB);
737 }
738 } else {
739 report("AnalyzeBranch returned invalid data!", MBB);
740 }
741 }
742
743 regsLive.clear();
744 if (MRI->tracksLiveness()) {
745 for (const auto &LI : MBB->liveins()) {
746 if (!TargetRegisterInfo::isPhysicalRegister(LI.PhysReg)) {
747 report("MBB live-in list contains non-physical register", MBB);
748 continue;
749 }
750 for (MCSubRegIterator SubRegs(LI.PhysReg, TRI, /*IncludeSelf=*/true);
751 SubRegs.isValid(); ++SubRegs)
752 regsLive.insert(*SubRegs);
753 }
754 }
755 regsLiveInButUnused = regsLive;
756
757 const MachineFrameInfo &MFI = MF->getFrameInfo();
758 BitVector PR = MFI.getPristineRegs(*MF);
759 for (int I = PR.find_first(); I>0; I = PR.find_next(I)) {
760 for (MCSubRegIterator SubRegs(I, TRI, /*IncludeSelf=*/true);
761 SubRegs.isValid(); ++SubRegs)
762 regsLive.insert(*SubRegs);
763 }
764
765 regsKilled.clear();
766 regsDefined.clear();
767
768 if (Indexes)
769 lastIndex = Indexes->getMBBStartIdx(MBB);
770}
771
772// This function gets called for all bundle headers, including normal
773// stand-alone unbundled instructions.
774void MachineVerifier::visitMachineBundleBefore(const MachineInstr *MI) {
775 if (Indexes && Indexes->hasIndex(*MI)) {
776 SlotIndex idx = Indexes->getInstructionIndex(*MI);
777 if (!(idx > lastIndex)) {
778 report("Instruction index out of order", MI);
779 errs() << "Last instruction was at " << lastIndex << '\n';
780 }
781 lastIndex = idx;
782 }
783
784 // Ensure non-terminators don't follow terminators.
785 // Ignore predicated terminators formed by if conversion.
786 // FIXME: If conversion shouldn't need to violate this rule.
787 if (MI->isTerminator() && !TII->isPredicated(*MI)) {
788 if (!FirstTerminator)
789 FirstTerminator = MI;
790 } else if (FirstTerminator) {
791 report("Non-terminator instruction after the first terminator", MI);
792 errs() << "First terminator was:\t" << *FirstTerminator;
793 }
794}
795
796// The operands on an INLINEASM instruction must follow a template.
797// Verify that the flag operands make sense.
798void MachineVerifier::verifyInlineAsm(const MachineInstr *MI) {
799 // The first two operands on INLINEASM are the asm string and global flags.
800 if (MI->getNumOperands() < 2) {
801 report("Too few operands on inline asm", MI);
802 return;
803 }
804 if (!MI->getOperand(0).isSymbol())
805 report("Asm string must be an external symbol", MI);
806 if (!MI->getOperand(1).isImm())
807 report("Asm flags must be an immediate", MI);
808 // Allowed flags are Extra_HasSideEffects = 1, Extra_IsAlignStack = 2,
809 // Extra_AsmDialect = 4, Extra_MayLoad = 8, and Extra_MayStore = 16,
810 // and Extra_IsConvergent = 32.
811 if (!isUInt<6>(MI->getOperand(1).getImm()))
812 report("Unknown asm flags", &MI->getOperand(1), 1);
813
814 static_assert(InlineAsm::MIOp_FirstOperand == 2, "Asm format changed");
815
816 unsigned OpNo = InlineAsm::MIOp_FirstOperand;
817 unsigned NumOps;
818 for (unsigned e = MI->getNumOperands(); OpNo < e; OpNo += NumOps) {
819 const MachineOperand &MO = MI->getOperand(OpNo);
820 // There may be implicit ops after the fixed operands.
821 if (!MO.isImm())
822 break;
823 NumOps = 1 + InlineAsm::getNumOperandRegisters(MO.getImm());
824 }
825
826 if (OpNo > MI->getNumOperands())
827 report("Missing operands in last group", MI);
828
829 // An optional MDNode follows the groups.
830 if (OpNo < MI->getNumOperands() && MI->getOperand(OpNo).isMetadata())
831 ++OpNo;
832
833 // All trailing operands must be implicit registers.
834 for (unsigned e = MI->getNumOperands(); OpNo < e; ++OpNo) {
835 const MachineOperand &MO = MI->getOperand(OpNo);
836 if (!MO.isReg() || !MO.isImplicit())
837 report("Expected implicit register after groups", &MO, OpNo);
838 }
839}
840
841void MachineVerifier::visitMachineInstrBefore(const MachineInstr *MI) {
842 const MCInstrDesc &MCID = MI->getDesc();
843 if (MI->getNumOperands() < MCID.getNumOperands()) {
844 report("Too few operands", MI);
845 errs() << MCID.getNumOperands() << " operands expected, but "
846 << MI->getNumOperands() << " given.\n";
847 }
848
849 if (MI->isPHI() && MF->getProperties().hasProperty(
850 MachineFunctionProperties::Property::NoPHIs))
851 report("Found PHI instruction with NoPHIs property set", MI);
852
853 // Check the tied operands.
854 if (MI->isInlineAsm())
855 verifyInlineAsm(MI);
856
857 // Check the MachineMemOperands for basic consistency.
858 for (MachineInstr::mmo_iterator I = MI->memoperands_begin(),
859 E = MI->memoperands_end(); I != E; ++I) {
860 if ((*I)->isLoad() && !MI->mayLoad())
861 report("Missing mayLoad flag", MI);
862 if ((*I)->isStore() && !MI->mayStore())
863 report("Missing mayStore flag", MI);
864 }
865
866 // Debug values must not have a slot index.
867 // Other instructions must have one, unless they are inside a bundle.
868 if (LiveInts) {
869 bool mapped = !LiveInts->isNotInMIMap(*MI);
870 if (MI->isDebugValue()) {
871 if (mapped)
872 report("Debug instruction has a slot index", MI);
873 } else if (MI->isInsideBundle()) {
874 if (mapped)
875 report("Instruction inside bundle has a slot index", MI);
876 } else {
877 if (!mapped)
878 report("Missing slot index", MI);
879 }
880 }
881
882 // Check types.
883 if (isPreISelGenericOpcode(MCID.getOpcode())) {
884 if (isFunctionSelected)
885 report("Unexpected generic instruction in a Selected function", MI);
886
887 // Generic instructions specify equality constraints between some
888 // of their operands. Make sure these are consistent.
889 SmallVector<LLT, 4> Types;
890 for (unsigned i = 0; i < MCID.getNumOperands(); ++i) {
891 if (!MCID.OpInfo[i].isGenericType())
892 continue;
893 size_t TypeIdx = MCID.OpInfo[i].getGenericTypeIndex();
894 Types.resize(std::max(TypeIdx + 1, Types.size()));
895
896 LLT OpTy = MRI->getType(MI->getOperand(i).getReg());
897 if (Types[TypeIdx].isValid() && Types[TypeIdx] != OpTy)
898 report("type mismatch in generic instruction", MI);
899 Types[TypeIdx] = OpTy;
900 }
901 }
902
903 // Generic opcodes must not have physical register operands.
904 if (isPreISelGenericOpcode(MCID.getOpcode())) {
905 for (auto &Op : MI->operands()) {
906 if (Op.isReg() && TargetRegisterInfo::isPhysicalRegister(Op.getReg()))
907 report("Generic instruction cannot have physical register", MI);
908 }
909 }
910
911 // Generic loads and stores must have a single MachineMemOperand
912 // describing that access.
913 if ((MI->getOpcode() == TargetOpcode::G_LOAD ||
914 MI->getOpcode() == TargetOpcode::G_STORE) &&
915 !MI->hasOneMemOperand())
916 report("Generic instruction accessing memory must have one mem operand",
917 MI);
918
919 StringRef ErrorInfo;
920 if (!TII->verifyInstruction(*MI, ErrorInfo))
921 report(ErrorInfo.data(), MI);
922}
923
924void
925MachineVerifier::visitMachineOperand(const MachineOperand *MO, unsigned MONum) {
926 const MachineInstr *MI = MO->getParent();
927 const MCInstrDesc &MCID = MI->getDesc();
928 unsigned NumDefs = MCID.getNumDefs();
929 if (MCID.getOpcode() == TargetOpcode::PATCHPOINT)
930 NumDefs = (MONum == 0 && MO->isReg()) ? NumDefs : 0;
931
932 // The first MCID.NumDefs operands must be explicit register defines
933 if (MONum < NumDefs) {
934 const MCOperandInfo &MCOI = MCID.OpInfo[MONum];
935 if (!MO->isReg())
936 report("Explicit definition must be a register", MO, MONum);
937 else if (!MO->isDef() && !MCOI.isOptionalDef())
938 report("Explicit definition marked as use", MO, MONum);
939 else if (MO->isImplicit())
940 report("Explicit definition marked as implicit", MO, MONum);
941 } else if (MONum < MCID.getNumOperands()) {
942 const MCOperandInfo &MCOI = MCID.OpInfo[MONum];
943 // Don't check if it's the last operand in a variadic instruction. See,
944 // e.g., LDM_RET in the arm back end.
945 if (MO->isReg() &&
946 !(MI->isVariadic() && MONum == MCID.getNumOperands()-1)) {
947 if (MO->isDef() && !MCOI.isOptionalDef())
948 report("Explicit operand marked as def", MO, MONum);
949 if (MO->isImplicit())
950 report("Explicit operand marked as implicit", MO, MONum);
951 }
952
953 int TiedTo = MCID.getOperandConstraint(MONum, MCOI::TIED_TO);
954 if (TiedTo != -1) {
955 if (!MO->isReg())
956 report("Tied use must be a register", MO, MONum);
957 else if (!MO->isTied())
958 report("Operand should be tied", MO, MONum);
959 else if (unsigned(TiedTo) != MI->findTiedOperandIdx(MONum))
960 report("Tied def doesn't match MCInstrDesc", MO, MONum);
961 } else if (MO->isReg() && MO->isTied())
962 report("Explicit operand should not be tied", MO, MONum);
963 } else {
964 // ARM adds %reg0 operands to indicate predicates. We'll allow that.
965 if (MO->isReg() && !MO->isImplicit() && !MI->isVariadic() && MO->getReg())
966 report("Extra explicit operand on non-variadic instruction", MO, MONum);
967 }
968
969 switch (MO->getType()) {
970 case MachineOperand::MO_Register: {
971 const unsigned Reg = MO->getReg();
972 if (!Reg)
973 return;
974 if (MRI->tracksLiveness() && !MI->isDebugValue())
975 checkLiveness(MO, MONum);
976
977 // Verify the consistency of tied operands.
978 if (MO->isTied()) {
979 unsigned OtherIdx = MI->findTiedOperandIdx(MONum);
980 const MachineOperand &OtherMO = MI->getOperand(OtherIdx);
981 if (!OtherMO.isReg())
982 report("Must be tied to a register", MO, MONum);
983 if (!OtherMO.isTied())
984 report("Missing tie flags on tied operand", MO, MONum);
985 if (MI->findTiedOperandIdx(OtherIdx) != MONum)
986 report("Inconsistent tie links", MO, MONum);
987 if (MONum < MCID.getNumDefs()) {
988 if (OtherIdx < MCID.getNumOperands()) {
989 if (-1 == MCID.getOperandConstraint(OtherIdx, MCOI::TIED_TO))
990 report("Explicit def tied to explicit use without tie constraint",
991 MO, MONum);
992 } else {
993 if (!OtherMO.isImplicit())
994 report("Explicit def should be tied to implicit use", MO, MONum);
995 }
996 }
997 }
998
999 // Verify two-address constraints after leaving SSA form.
1000 unsigned DefIdx;
1001 if (!MRI->isSSA() && MO->isUse() &&
1002 MI->isRegTiedToDefOperand(MONum, &DefIdx) &&
1003 Reg != MI->getOperand(DefIdx).getReg())
1004 report("Two-address instruction operands must be identical", MO, MONum);
1005
1006 // Check register classes.
1007 if (MONum < MCID.getNumOperands() && !MO->isImplicit()) {
1008 unsigned SubIdx = MO->getSubReg();
1009
1010 if (TargetRegisterInfo::isPhysicalRegister(Reg)) {
1011 if (SubIdx) {
1012 report("Illegal subregister index for physical register", MO, MONum);
1013 return;
1014 }
1015 if (const TargetRegisterClass *DRC =
1016 TII->getRegClass(MCID, MONum, TRI, *MF)) {
1017 if (!DRC->contains(Reg)) {
1018 report("Illegal physical register for instruction", MO, MONum);
1019 errs() << TRI->getName(Reg) << " is not a "
1020 << TRI->getRegClassName(DRC) << " register.\n";
1021 }
1022 }
1023 } else {
1024 // Virtual register.
1025 const TargetRegisterClass *RC = MRI->getRegClassOrNull(Reg);
1026 if (!RC) {
1027 // This is a generic virtual register.
1028
1029 // If we're post-Select, we can't have gvregs anymore.
1030 if (isFunctionSelected) {
1031 report("Generic virtual register invalid in a Selected function",
1032 MO, MONum);
1033 return;
1034 }
1035
1036 // The gvreg must have a type and it must not have a SubIdx.
1037 LLT Ty = MRI->getType(Reg);
1038 if (!Ty.isValid()) {
1039 report("Generic virtual register must have a valid type", MO,
1040 MONum);
1041 return;
1042 }
1043
1044 const RegisterBank *RegBank = MRI->getRegBankOrNull(Reg);
1045
1046 // If we're post-RegBankSelect, the gvreg must have a bank.
1047 if (!RegBank && isFunctionRegBankSelected) {
1048 report("Generic virtual register must have a bank in a "
1049 "RegBankSelected function",
1050 MO, MONum);
1051 return;
1052 }
1053
1054 // Make sure the register fits into its register bank if any.
1055 if (RegBank && Ty.isValid() &&
1056 RegBank->getSize() < Ty.getSizeInBits()) {
1057 report("Register bank is too small for virtual register", MO,
1058 MONum);
1059 errs() << "Register bank " << RegBank->getName() << " too small("
1060 << RegBank->getSize() << ") to fit " << Ty.getSizeInBits()
1061 << "-bits\n";
1062 return;
1063 }
1064 if (SubIdx) {
1065 report("Generic virtual register does not subregister index", MO,
1066 MONum);
1067 return;
1068 }
1069
1070 // If this is a target specific instruction and this operand
1071 // has register class constraint, the virtual register must
1072 // comply to it.
1073 if (!isPreISelGenericOpcode(MCID.getOpcode()) &&
1074 TII->getRegClass(MCID, MONum, TRI, *MF)) {
1075 report("Virtual register does not match instruction constraint", MO,
1076 MONum);
1077 errs() << "Expect register class "
1078 << TRI->getRegClassName(
1079 TII->getRegClass(MCID, MONum, TRI, *MF))
1080 << " but got nothing\n";
1081 return;
1082 }
1083
1084 break;
1085 }
1086 if (SubIdx) {
1087 const TargetRegisterClass *SRC =
1088 TRI->getSubClassWithSubReg(RC, SubIdx);
1089 if (!SRC) {
1090 report("Invalid subregister index for virtual register", MO, MONum);
1091 errs() << "Register class " << TRI->getRegClassName(RC)
1092 << " does not support subreg index " << SubIdx << "\n";
1093 return;
1094 }
1095 if (RC != SRC) {
1096 report("Invalid register class for subregister index", MO, MONum);
1097 errs() << "Register class " << TRI->getRegClassName(RC)
1098 << " does not fully support subreg index " << SubIdx << "\n";
1099 return;
1100 }
1101 }
1102 if (const TargetRegisterClass *DRC =
1103 TII->getRegClass(MCID, MONum, TRI, *MF)) {
1104 if (SubIdx) {
1105 const TargetRegisterClass *SuperRC =
1106 TRI->getLargestLegalSuperClass(RC, *MF);
1107 if (!SuperRC) {
1108 report("No largest legal super class exists.", MO, MONum);
1109 return;
1110 }
1111 DRC = TRI->getMatchingSuperRegClass(SuperRC, DRC, SubIdx);
1112 if (!DRC) {
1113 report("No matching super-reg register class.", MO, MONum);
1114 return;
1115 }
1116 }
1117 if (!RC->hasSuperClassEq(DRC)) {
1118 report("Illegal virtual register for instruction", MO, MONum);
1119 errs() << "Expected a " << TRI->getRegClassName(DRC)
1120 << " register, but got a " << TRI->getRegClassName(RC)
1121 << " register\n";
1122 }
1123 }
1124 }
1125 }
1126 break;
1127 }
1128
1129 case MachineOperand::MO_RegisterMask:
1130 regMasks.push_back(MO->getRegMask());
1131 break;
1132
1133 case MachineOperand::MO_MachineBasicBlock:
1134 if (MI->isPHI() && !MO->getMBB()->isSuccessor(MI->getParent()))
1135 report("PHI operand is not in the CFG", MO, MONum);
1136 break;
1137
1138 case MachineOperand::MO_FrameIndex:
1139 if (LiveStks && LiveStks->hasInterval(MO->getIndex()) &&
1140 LiveInts && !LiveInts->isNotInMIMap(*MI)) {
1141 int FI = MO->getIndex();
1142 LiveInterval &LI = LiveStks->getInterval(FI);
1143 SlotIndex Idx = LiveInts->getInstructionIndex(*MI);
1144
1145 bool stores = MI->mayStore();
1146 bool loads = MI->mayLoad();
1147 // For a memory-to-memory move, we need to check if the frame
1148 // index is used for storing or loading, by inspecting the
1149 // memory operands.
1150 if (stores && loads) {
1151 for (auto *MMO : MI->memoperands()) {
1152 const PseudoSourceValue *PSV = MMO->getPseudoValue();
1153 if (PSV == nullptr) continue;
1154 const FixedStackPseudoSourceValue *Value =
1155 dyn_cast<FixedStackPseudoSourceValue>(PSV);
1156 if (Value == nullptr) continue;
1157 if (Value->getFrameIndex() != FI) continue;
1158
1159 if (MMO->isStore())
1160 loads = false;
1161 else
1162 stores = false;
1163 break;
1164 }
1165 if (loads == stores)
1166 report("Missing fixed stack memoperand.", MI);
1167 }
1168 if (loads && !LI.liveAt(Idx.getRegSlot(true))) {
1169 report("Instruction loads from dead spill slot", MO, MONum);
1170 errs() << "Live stack: " << LI << '\n';
1171 }
1172 if (stores && !LI.liveAt(Idx.getRegSlot())) {
1173 report("Instruction stores to dead spill slot", MO, MONum);
1174 errs() << "Live stack: " << LI << '\n';
1175 }
1176 }
1177 break;
1178
1179 default:
1180 break;
1181 }
1182}
1183
1184void MachineVerifier::checkLivenessAtUse(const MachineOperand *MO,
1185 unsigned MONum, SlotIndex UseIdx, const LiveRange &LR, unsigned VRegOrUnit,
1186 LaneBitmask LaneMask) {
1187 LiveQueryResult LRQ = LR.Query(UseIdx);
1188 // Check if we have a segment at the use, note however that we only need one
1189 // live subregister range, the others may be dead.
1190 if (!LRQ.valueIn() && LaneMask.none()) {
1191 report("No live segment at use", MO, MONum);
1192 report_context_liverange(LR);
1193 report_context_vreg_regunit(VRegOrUnit);
1194 report_context(UseIdx);
1195 }
1196 if (MO->isKill() && !LRQ.isKill()) {
1197 report("Live range continues after kill flag", MO, MONum);
1198 report_context_liverange(LR);
1199 report_context_vreg_regunit(VRegOrUnit);
1200 if (LaneMask.any())
1201 report_context_lanemask(LaneMask);
1202 report_context(UseIdx);
1203 }
1204}
1205
1206void MachineVerifier::checkLivenessAtDef(const MachineOperand *MO,
1207 unsigned MONum, SlotIndex DefIdx, const LiveRange &LR, unsigned VRegOrUnit,
1208 LaneBitmask LaneMask) {
1209 if (const VNInfo *VNI = LR.getVNInfoAt(DefIdx)) {
1210 assert(VNI && "NULL valno is not allowed")((VNI && "NULL valno is not allowed") ? static_cast<
void> (0) : __assert_fail ("VNI && \"NULL valno is not allowed\""
, "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn298304/lib/CodeGen/MachineVerifier.cpp"
, 1210, __PRETTY_FUNCTION__))
;
1211 if (VNI->def != DefIdx) {
1212 report("Inconsistent valno->def", MO, MONum);
1213 report_context_liverange(LR);
1214 report_context_vreg_regunit(VRegOrUnit);
1215 if (LaneMask.any())
1216 report_context_lanemask(LaneMask);
1217 report_context(*VNI);
1218 report_context(DefIdx);
1219 }
1220 } else {
1221 report("No live segment at def", MO, MONum);
1222 report_context_liverange(LR);
1223 report_context_vreg_regunit(VRegOrUnit);
1224 if (LaneMask.any())
1225 report_context_lanemask(LaneMask);
1226 report_context(DefIdx);
1227 }
1228 // Check that, if the dead def flag is present, LiveInts agree.
1229 if (MO->isDead()) {
1230 LiveQueryResult LRQ = LR.Query(DefIdx);
1231 if (!LRQ.isDeadDef()) {
1232 // In case of physregs we can have a non-dead definition on another
1233 // operand.
1234 bool otherDef = false;
1235 if (!TargetRegisterInfo::isVirtualRegister(VRegOrUnit)) {
1236 const MachineInstr &MI = *MO->getParent();
1237 for (const MachineOperand &MO : MI.operands()) {
1238 if (!MO.isReg() || !MO.isDef() || MO.isDead())
1239 continue;
1240 unsigned Reg = MO.getReg();
1241 for (MCRegUnitIterator Units(Reg, TRI); Units.isValid(); ++Units) {
1242 if (*Units == VRegOrUnit) {
1243 otherDef = true;
1244 break;
1245 }
1246 }
1247 }
1248 }
1249
1250 if (!otherDef) {
1251 report("Live range continues after dead def flag", MO, MONum);
1252 report_context_liverange(LR);
1253 report_context_vreg_regunit(VRegOrUnit);
1254 if (LaneMask.any())
1255 report_context_lanemask(LaneMask);
1256 }
1257 }
1258 }
1259}
1260
1261void MachineVerifier::checkLiveness(const MachineOperand *MO, unsigned MONum) {
1262 const MachineInstr *MI = MO->getParent();
1263 const unsigned Reg = MO->getReg();
1264
1265 // Both use and def operands can read a register.
1266 if (MO->readsReg()) {
1267 regsLiveInButUnused.erase(Reg);
1268
1269 if (MO->isKill())
1270 addRegWithSubRegs(regsKilled, Reg);
1271
1272 // Check that LiveVars knows this kill.
1273 if (LiveVars && TargetRegisterInfo::isVirtualRegister(Reg) &&
1274 MO->isKill()) {
1275 LiveVariables::VarInfo &VI = LiveVars->getVarInfo(Reg);
1276 if (!is_contained(VI.Kills, MI))
1277 report("Kill missing from LiveVariables", MO, MONum);
1278 }
1279
1280 // Check LiveInts liveness and kill.
1281 if (LiveInts && !LiveInts->isNotInMIMap(*MI)) {
1282 SlotIndex UseIdx = LiveInts->getInstructionIndex(*MI);
1283 // Check the cached regunit intervals.
1284 if (TargetRegisterInfo::isPhysicalRegister(Reg) && !isReserved(Reg)) {
1285 for (MCRegUnitIterator Units(Reg, TRI); Units.isValid(); ++Units) {
1286 if (const LiveRange *LR = LiveInts->getCachedRegUnit(*Units))
1287 checkLivenessAtUse(MO, MONum, UseIdx, *LR, *Units);
1288 }
1289 }
1290
1291 if (TargetRegisterInfo::isVirtualRegister(Reg)) {
1292 if (LiveInts->hasInterval(Reg)) {
1293 // This is a virtual register interval.
1294 const LiveInterval &LI = LiveInts->getInterval(Reg);
1295 checkLivenessAtUse(MO, MONum, UseIdx, LI, Reg);
1296
1297 if (LI.hasSubRanges() && !MO->isDef()) {
1298 unsigned SubRegIdx = MO->getSubReg();
1299 LaneBitmask MOMask = SubRegIdx != 0
1300 ? TRI->getSubRegIndexLaneMask(SubRegIdx)
1301 : MRI->getMaxLaneMaskForVReg(Reg);
1302 LaneBitmask LiveInMask;
1303 for (const LiveInterval::SubRange &SR : LI.subranges()) {
1304 if ((MOMask & SR.LaneMask).none())
1305 continue;
1306 checkLivenessAtUse(MO, MONum, UseIdx, SR, Reg, SR.LaneMask);
1307 LiveQueryResult LRQ = SR.Query(UseIdx);
1308 if (LRQ.valueIn())
1309 LiveInMask |= SR.LaneMask;
1310 }
1311 // At least parts of the register has to be live at the use.
1312 if ((LiveInMask & MOMask).none()) {
1313 report("No live subrange at use", MO, MONum);
1314 report_context(LI);
1315 report_context(UseIdx);
1316 }
1317 }
1318 } else {
1319 report("Virtual register has no live interval", MO, MONum);
1320 }
1321 }
1322 }
1323
1324 // Use of a dead register.
1325 if (!regsLive.count(Reg)) {
1326 if (TargetRegisterInfo::isPhysicalRegister(Reg)) {
1327 // Reserved registers may be used even when 'dead'.
1328 bool Bad = !isReserved(Reg);
1329 // We are fine if just any subregister has a defined value.
1330 if (Bad) {
1331 for (MCSubRegIterator SubRegs(Reg, TRI); SubRegs.isValid();
1332 ++SubRegs) {
1333 if (regsLive.count(*SubRegs)) {
1334 Bad = false;
1335 break;
1336 }
1337 }
1338 }
1339 // If there is an additional implicit-use of a super register we stop
1340 // here. By definition we are fine if the super register is not
1341 // (completely) dead, if the complete super register is dead we will
1342 // get a report for its operand.
1343 if (Bad) {
1344 for (const MachineOperand &MOP : MI->uses()) {
1345 if (!MOP.isReg())
1346 continue;
1347 if (!MOP.isImplicit())
1348 continue;
1349 for (MCSubRegIterator SubRegs(MOP.getReg(), TRI); SubRegs.isValid();
1350 ++SubRegs) {
1351 if (*SubRegs == Reg) {
1352 Bad = false;
1353 break;
1354 }
1355 }
1356 }
1357 }
1358 if (Bad)
1359 report("Using an undefined physical register", MO, MONum);
1360 } else if (MRI->def_empty(Reg)) {
1361 report("Reading virtual register without a def", MO, MONum);
1362 } else {
1363 BBInfo &MInfo = MBBInfoMap[MI->getParent()];
1364 // We don't know which virtual registers are live in, so only complain
1365 // if vreg was killed in this MBB. Otherwise keep track of vregs that
1366 // must be live in. PHI instructions are handled separately.
1367 if (MInfo.regsKilled.count(Reg))
1368 report("Using a killed virtual register", MO, MONum);
1369 else if (!MI->isPHI())
1370 MInfo.vregsLiveIn.insert(std::make_pair(Reg, MI));
1371 }
1372 }
1373 }
1374
1375 if (MO->isDef()) {
1376 // Register defined.
1377 // TODO: verify that earlyclobber ops are not used.
1378 if (MO->isDead())
1379 addRegWithSubRegs(regsDead, Reg);
1380 else
1381 addRegWithSubRegs(regsDefined, Reg);
1382
1383 // Verify SSA form.
1384 if (MRI->isSSA() && TargetRegisterInfo::isVirtualRegister(Reg) &&
1385 std::next(MRI->def_begin(Reg)) != MRI->def_end())
1386 report("Multiple virtual register defs in SSA form", MO, MONum);
1387
1388 // Check LiveInts for a live segment, but only for virtual registers.
1389 if (LiveInts && !LiveInts->isNotInMIMap(*MI)) {
1390 SlotIndex DefIdx = LiveInts->getInstructionIndex(*MI);
1391 DefIdx = DefIdx.getRegSlot(MO->isEarlyClobber());
1392
1393 if (TargetRegisterInfo::isVirtualRegister(Reg)) {
1394 if (LiveInts->hasInterval(Reg)) {
1395 const LiveInterval &LI = LiveInts->getInterval(Reg);
1396 checkLivenessAtDef(MO, MONum, DefIdx, LI, Reg);
1397
1398 if (LI.hasSubRanges()) {
1399 unsigned SubRegIdx = MO->getSubReg();
1400 LaneBitmask MOMask = SubRegIdx != 0
1401 ? TRI->getSubRegIndexLaneMask(SubRegIdx)
1402 : MRI->getMaxLaneMaskForVReg(Reg);
1403 for (const LiveInterval::SubRange &SR : LI.subranges()) {
1404 if ((SR.LaneMask & MOMask).none())
1405 continue;
1406 checkLivenessAtDef(MO, MONum, DefIdx, SR, Reg, SR.LaneMask);
1407 }
1408 }
1409 } else {
1410 report("Virtual register has no Live interval", MO, MONum);
1411 }
1412 }
1413 }
1414 }
1415}
1416
1417void MachineVerifier::visitMachineInstrAfter(const MachineInstr *MI) {
1418}
1419
1420// This function gets called after visiting all instructions in a bundle. The
1421// argument points to the bundle header.
1422// Normal stand-alone instructions are also considered 'bundles', and this
1423// function is called for all of them.
1424void MachineVerifier::visitMachineBundleAfter(const MachineInstr *MI) {
1425 BBInfo &MInfo = MBBInfoMap[MI->getParent()];
1426 set_union(MInfo.regsKilled, regsKilled);
1427 set_subtract(regsLive, regsKilled); regsKilled.clear();
1428 // Kill any masked registers.
1429 while (!regMasks.empty()) {
1430 const uint32_t *Mask = regMasks.pop_back_val();
1431 for (RegSet::iterator I = regsLive.begin(), E = regsLive.end(); I != E; ++I)
1432 if (TargetRegisterInfo::isPhysicalRegister(*I) &&
1433 MachineOperand::clobbersPhysReg(Mask, *I))
1434 regsDead.push_back(*I);
1435 }
1436 set_subtract(regsLive, regsDead); regsDead.clear();
1437 set_union(regsLive, regsDefined); regsDefined.clear();
1438}
1439
1440void
1441MachineVerifier::visitMachineBasicBlockAfter(const MachineBasicBlock *MBB) {
1442 MBBInfoMap[MBB].regsLiveOut = regsLive;
1443 regsLive.clear();
1444
1445 if (Indexes) {
1446 SlotIndex stop = Indexes->getMBBEndIdx(MBB);
1447 if (!(stop > lastIndex)) {
1448 report("Block ends before last instruction index", MBB);
1449 errs() << "Block ends at " << stop
1450 << " last instruction was at " << lastIndex << '\n';
1451 }
1452 lastIndex = stop;
1453 }
1454}
1455
1456// Calculate the largest possible vregsPassed sets. These are the registers that
1457// can pass through an MBB live, but may not be live every time. It is assumed
1458// that all vregsPassed sets are empty before the call.
1459void MachineVerifier::calcRegsPassed() {
1460 // First push live-out regs to successors' vregsPassed. Remember the MBBs that
1461 // have any vregsPassed.
1462 SmallPtrSet<const MachineBasicBlock*, 8> todo;
1463 for (const auto &MBB : *MF) {
1464 BBInfo &MInfo = MBBInfoMap[&MBB];
1465 if (!MInfo.reachable)
1466 continue;
1467 for (MachineBasicBlock::const_succ_iterator SuI = MBB.succ_begin(),
1468 SuE = MBB.succ_end(); SuI != SuE; ++SuI) {
1469 BBInfo &SInfo = MBBInfoMap[*SuI];
1470 if (SInfo.addPassed(MInfo.regsLiveOut))
1471 todo.insert(*SuI);
1472 }
1473 }
1474
1475 // Iteratively push vregsPassed to successors. This will converge to the same
1476 // final state regardless of DenseSet iteration order.
1477 while (!todo.empty()) {
1478 const MachineBasicBlock *MBB = *todo.begin();
1479 todo.erase(MBB);
1480 BBInfo &MInfo = MBBInfoMap[MBB];
1481 for (MachineBasicBlock::const_succ_iterator SuI = MBB->succ_begin(),
1482 SuE = MBB->succ_end(); SuI != SuE; ++SuI) {
1483 if (*SuI == MBB)
1484 continue;
1485 BBInfo &SInfo = MBBInfoMap[*SuI];
1486 if (SInfo.addPassed(MInfo.vregsPassed))
1487 todo.insert(*SuI);
1488 }
1489 }
1490}
1491
1492// Calculate the set of virtual registers that must be passed through each basic
1493// block in order to satisfy the requirements of successor blocks. This is very
1494// similar to calcRegsPassed, only backwards.
1495void MachineVerifier::calcRegsRequired() {
1496 // First push live-in regs to predecessors' vregsRequired.
1497 SmallPtrSet<const MachineBasicBlock*, 8> todo;
1498 for (const auto &MBB : *MF) {
1499 BBInfo &MInfo = MBBInfoMap[&MBB];
1500 for (MachineBasicBlock::const_pred_iterator PrI = MBB.pred_begin(),
1501 PrE = MBB.pred_end(); PrI != PrE; ++PrI) {
1502 BBInfo &PInfo = MBBInfoMap[*PrI];
1503 if (PInfo.addRequired(MInfo.vregsLiveIn))
1504 todo.insert(*PrI);
1505 }
1506 }
1507
1508 // Iteratively push vregsRequired to predecessors. This will converge to the
1509 // same final state regardless of DenseSet iteration order.
1510 while (!todo.empty()) {
1511 const MachineBasicBlock *MBB = *todo.begin();
1512 todo.erase(MBB);
1513 BBInfo &MInfo = MBBInfoMap[MBB];
1514 for (MachineBasicBlock::const_pred_iterator PrI = MBB->pred_begin(),
1515 PrE = MBB->pred_end(); PrI != PrE; ++PrI) {
1516 if (*PrI == MBB)
1517 continue;
1518 BBInfo &SInfo = MBBInfoMap[*PrI];
1519 if (SInfo.addRequired(MInfo.vregsRequired))
1520 todo.insert(*PrI);
1521 }
1522 }
1523}
1524
1525// Check PHI instructions at the beginning of MBB. It is assumed that
1526// calcRegsPassed has been run so BBInfo::isLiveOut is valid.
1527void MachineVerifier::checkPHIOps(const MachineBasicBlock *MBB) {
1528 SmallPtrSet<const MachineBasicBlock*, 8> seen;
1529 for (const auto &BBI : *MBB) {
1530 if (!BBI.isPHI())
1531 break;
1532 seen.clear();
1533
1534 for (unsigned i = 1, e = BBI.getNumOperands(); i != e; i += 2) {
1535 unsigned Reg = BBI.getOperand(i).getReg();
1536 const MachineBasicBlock *Pre = BBI.getOperand(i + 1).getMBB();
1537 if (!Pre->isSuccessor(MBB))
1538 continue;
1539 seen.insert(Pre);
1540 BBInfo &PrInfo = MBBInfoMap[Pre];
1541 if (PrInfo.reachable && !PrInfo.isLiveOut(Reg))
1542 report("PHI operand is not live-out from predecessor",
1543 &BBI.getOperand(i), i);
1544 }
1545
1546 // Did we see all predecessors?
1547 for (MachineBasicBlock::const_pred_iterator PrI = MBB->pred_begin(),
1548 PrE = MBB->pred_end(); PrI != PrE; ++PrI) {
1549 if (!seen.count(*PrI)) {
1550 report("Missing PHI operand", &BBI);
1551 errs() << "BB#" << (*PrI)->getNumber()
1552 << " is a predecessor according to the CFG.\n";
1553 }
1554 }
1555 }
1556}
1557
1558void MachineVerifier::visitMachineFunctionAfter() {
1559 calcRegsPassed();
1560
1561 for (const auto &MBB : *MF) {
1562 BBInfo &MInfo = MBBInfoMap[&MBB];
1563
1564 // Skip unreachable MBBs.
1565 if (!MInfo.reachable)
1566 continue;
1567
1568 checkPHIOps(&MBB);
1569 }
1570
1571 // Now check liveness info if available
1572 calcRegsRequired();
1573
1574 // Check for killed virtual registers that should be live out.
1575 for (const auto &MBB : *MF) {
1576 BBInfo &MInfo = MBBInfoMap[&MBB];
1577 for (RegSet::iterator
1578 I = MInfo.vregsRequired.begin(), E = MInfo.vregsRequired.end(); I != E;
1579 ++I)
1580 if (MInfo.regsKilled.count(*I)) {
1581 report("Virtual register killed in block, but needed live out.", &MBB);
1582 errs() << "Virtual register " << PrintReg(*I)
1583 << " is used after the block.\n";
1584 }
1585 }
1586
1587 if (!MF->empty()) {
1588 BBInfo &MInfo = MBBInfoMap[&MF->front()];
1589 for (RegSet::iterator
1590 I = MInfo.vregsRequired.begin(), E = MInfo.vregsRequired.end(); I != E;
1591 ++I) {
1592 report("Virtual register defs don't dominate all uses.", MF);
1593 report_context_vreg(*I);
1594 }
1595 }
1596
1597 if (LiveVars)
1598 verifyLiveVariables();
1599 if (LiveInts)
1600 verifyLiveIntervals();
1601}
1602
1603void MachineVerifier::verifyLiveVariables() {
1604 assert(LiveVars && "Don't call verifyLiveVariables without LiveVars")((LiveVars && "Don't call verifyLiveVariables without LiveVars"
) ? static_cast<void> (0) : __assert_fail ("LiveVars && \"Don't call verifyLiveVariables without LiveVars\""
, "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn298304/lib/CodeGen/MachineVerifier.cpp"
, 1604, __PRETTY_FUNCTION__))
;
1605 for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) {
1606 unsigned Reg = TargetRegisterInfo::index2VirtReg(i);
1607 LiveVariables::VarInfo &VI = LiveVars->getVarInfo(Reg);
1608 for (const auto &MBB : *MF) {
1609 BBInfo &MInfo = MBBInfoMap[&MBB];
1610
1611 // Our vregsRequired should be identical to LiveVariables' AliveBlocks
1612 if (MInfo.vregsRequired.count(Reg)) {
1613 if (!VI.AliveBlocks.test(MBB.getNumber())) {
1614 report("LiveVariables: Block missing from AliveBlocks", &MBB);
1615 errs() << "Virtual register " << PrintReg(Reg)
1616 << " must be live through the block.\n";
1617 }
1618 } else {
1619 if (VI.AliveBlocks.test(MBB.getNumber())) {
1620 report("LiveVariables: Block should not be in AliveBlocks", &MBB);
1621 errs() << "Virtual register " << PrintReg(Reg)
1622 << " is not needed live through the block.\n";
1623 }
1624 }
1625 }
1626 }
1627}
1628
1629void MachineVerifier::verifyLiveIntervals() {
1630 assert(LiveInts && "Don't call verifyLiveIntervals without LiveInts")((LiveInts && "Don't call verifyLiveIntervals without LiveInts"
) ? static_cast<void> (0) : __assert_fail ("LiveInts && \"Don't call verifyLiveIntervals without LiveInts\""
, "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn298304/lib/CodeGen/MachineVerifier.cpp"
, 1630, __PRETTY_FUNCTION__))
;
1631 for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) {
1632 unsigned Reg = TargetRegisterInfo::index2VirtReg(i);
1633
1634 // Spilling and splitting may leave unused registers around. Skip them.
1635 if (MRI->reg_nodbg_empty(Reg))
1636 continue;
1637
1638 if (!LiveInts->hasInterval(Reg)) {
1639 report("Missing live interval for virtual register", MF);
1640 errs() << PrintReg(Reg, TRI) << " still has defs or uses\n";
1641 continue;
1642 }
1643
1644 const LiveInterval &LI = LiveInts->getInterval(Reg);
1645 assert(Reg == LI.reg && "Invalid reg to interval mapping")((Reg == LI.reg && "Invalid reg to interval mapping")
? static_cast<void> (0) : __assert_fail ("Reg == LI.reg && \"Invalid reg to interval mapping\""
, "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn298304/lib/CodeGen/MachineVerifier.cpp"
, 1645, __PRETTY_FUNCTION__))
;
1646 verifyLiveInterval(LI);
1647 }
1648
1649 // Verify all the cached regunit intervals.
1650 for (unsigned i = 0, e = TRI->getNumRegUnits(); i != e; ++i)
1651 if (const LiveRange *LR = LiveInts->getCachedRegUnit(i))
1652 verifyLiveRange(*LR, i);
1653}
1654
1655void MachineVerifier::verifyLiveRangeValue(const LiveRange &LR,
1656 const VNInfo *VNI, unsigned Reg,
1657 LaneBitmask LaneMask) {
1658 if (VNI->isUnused())
1659 return;
1660
1661 const VNInfo *DefVNI = LR.getVNInfoAt(VNI->def);
1662
1663 if (!DefVNI) {
1664 report("Value not live at VNInfo def and not marked unused", MF);
1665 report_context(LR, Reg, LaneMask);
1666 report_context(*VNI);
1667 return;
1668 }
1669
1670 if (DefVNI != VNI) {
1671 report("Live segment at def has different VNInfo", MF);
1672 report_context(LR, Reg, LaneMask);
1673 report_context(*VNI);
1674 return;
1675 }
1676
1677 const MachineBasicBlock *MBB = LiveInts->getMBBFromIndex(VNI->def);
1678 if (!MBB) {
1679 report("Invalid VNInfo definition index", MF);
1680 report_context(LR, Reg, LaneMask);
1681 report_context(*VNI);
1682 return;
1683 }
1684
1685 if (VNI->isPHIDef()) {
1686 if (VNI->def != LiveInts->getMBBStartIdx(MBB)) {
1687 report("PHIDef VNInfo is not defined at MBB start", MBB);
1688 report_context(LR, Reg, LaneMask);
1689 report_context(*VNI);
1690 }
1691 return;
1692 }
1693
1694 // Non-PHI def.
1695 const MachineInstr *MI = LiveInts->getInstructionFromIndex(VNI->def);
1696 if (!MI) {
1697 report("No instruction at VNInfo def index", MBB);
1698 report_context(LR, Reg, LaneMask);
1699 report_context(*VNI);
1700 return;
1701 }
1702
1703 if (Reg != 0) {
1704 bool hasDef = false;
1705 bool isEarlyClobber = false;
1706 for (ConstMIBundleOperands MOI(*MI); MOI.isValid(); ++MOI) {
1707 if (!MOI->isReg() || !MOI->isDef())
1708 continue;
1709 if (TargetRegisterInfo::isVirtualRegister(Reg)) {
1710 if (MOI->getReg() != Reg)
1711 continue;
1712 } else {
1713 if (!TargetRegisterInfo::isPhysicalRegister(MOI->getReg()) ||
1714 !TRI->hasRegUnit(MOI->getReg(), Reg))
1715 continue;
1716 }
1717 if (LaneMask.any() &&
1718 (TRI->getSubRegIndexLaneMask(MOI->getSubReg()) & LaneMask).none())
1719 continue;
1720 hasDef = true;
1721 if (MOI->isEarlyClobber())
1722 isEarlyClobber = true;
1723 }
1724
1725 if (!hasDef) {
1726 report("Defining instruction does not modify register", MI);
1727 report_context(LR, Reg, LaneMask);
1728 report_context(*VNI);
1729 }
1730
1731 // Early clobber defs begin at USE slots, but other defs must begin at
1732 // DEF slots.
1733 if (isEarlyClobber) {
1734 if (!VNI->def.isEarlyClobber()) {
1735 report("Early clobber def must be at an early-clobber slot", MBB);
1736 report_context(LR, Reg, LaneMask);
1737 report_context(*VNI);
1738 }
1739 } else if (!VNI->def.isRegister()) {
1740 report("Non-PHI, non-early clobber def must be at a register slot", MBB);
1741 report_context(LR, Reg, LaneMask);
1742 report_context(*VNI);
1743 }
1744 }
1745}
1746
1747void MachineVerifier::verifyLiveRangeSegment(const LiveRange &LR,
1748 const LiveRange::const_iterator I,
1749 unsigned Reg, LaneBitmask LaneMask)
1750{
1751 const LiveRange::Segment &S = *I;
1752 const VNInfo *VNI = S.valno;
1753 assert(VNI && "Live segment has no valno")((VNI && "Live segment has no valno") ? static_cast<
void> (0) : __assert_fail ("VNI && \"Live segment has no valno\""
, "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn298304/lib/CodeGen/MachineVerifier.cpp"
, 1753, __PRETTY_FUNCTION__))
;
1754
1755 if (VNI->id >= LR.getNumValNums() || VNI != LR.getValNumInfo(VNI->id)) {
1
Assuming the condition is false
2
Taking false branch
1756 report("Foreign valno in live segment", MF);
1757 report_context(LR, Reg, LaneMask);
1758 report_context(S);
1759 report_context(*VNI);
1760 }
1761
1762 if (VNI->isUnused()) {
3
Taking false branch
1763 report("Live segment valno is marked unused", MF);
1764 report_context(LR, Reg, LaneMask);
1765 report_context(S);
1766 }
1767
1768 const MachineBasicBlock *MBB = LiveInts->getMBBFromIndex(S.start);
1769 if (!MBB) {
4
Taking false branch
1770 report("Bad start of live segment, no basic block", MF);
1771 report_context(LR, Reg, LaneMask);
1772 report_context(S);
1773 return;
1774 }
1775 SlotIndex MBBStartIdx = LiveInts->getMBBStartIdx(MBB);
1776 if (S.start != MBBStartIdx && S.start != VNI->def) {
1777 report("Live segment must begin at MBB entry or valno def", MBB);
1778 report_context(LR, Reg, LaneMask);
1779 report_context(S);
1780 }
1781
1782 const MachineBasicBlock *EndMBB =
1783 LiveInts->getMBBFromIndex(S.end.getPrevSlot());
1784 if (!EndMBB) {
5
Taking false branch
1785 report("Bad end of live segment, no basic block", MF);
1786 report_context(LR, Reg, LaneMask);
1787 report_context(S);
1788 return;
1789 }
1790
1791 // No more checks for live-out segments.
1792 if (S.end == LiveInts->getMBBEndIdx(EndMBB))
6
Taking false branch
1793 return;
1794
1795 // RegUnit intervals are allowed dead phis.
1796 if (!TargetRegisterInfo::isVirtualRegister(Reg) && VNI->isPHIDef() &&
7
Taking false branch
1797 S.start == VNI->def && S.end == VNI->def.getDeadSlot())
1798 return;
1799
1800 // The live segment is ending inside EndMBB
1801 const MachineInstr *MI =
1802 LiveInts->getInstructionFromIndex(S.end.getPrevSlot());
1803 if (!MI) {
8
Assuming 'MI' is non-null
9
Taking false branch
1804 report("Live segment doesn't end at a valid instruction", EndMBB);
1805 report_context(LR, Reg, LaneMask);
1806 report_context(S);
1807 return;
1808 }
1809
1810 // The block slot must refer to a basic block boundary.
1811 if (S.end.isBlock()) {
10
Taking false branch
1812 report("Live segment ends at B slot of an instruction", EndMBB);
1813 report_context(LR, Reg, LaneMask);
1814 report_context(S);
1815 }
1816
1817 if (S.end.isDead()) {
11
Taking false branch
1818 // Segment ends on the dead slot.
1819 // That means there must be a dead def.
1820 if (!SlotIndex::isSameInstr(S.start, S.end)) {
1821 report("Live segment ending at dead slot spans instructions", EndMBB);
1822 report_context(LR, Reg, LaneMask);
1823 report_context(S);
1824 }
1825 }
1826
1827 // A live segment can only end at an early-clobber slot if it is being
1828 // redefined by an early-clobber def.
1829 if (S.end.isEarlyClobber()) {
12
Taking false branch
1830 if (I+1 == LR.end() || (I+1)->start != S.end) {
1831 report("Live segment ending at early clobber slot must be "
1832 "redefined by an EC def in the same instruction", EndMBB);
1833 report_context(LR, Reg, LaneMask);
1834 report_context(S);
1835 }
1836 }
1837
1838 // The following checks only apply to virtual registers. Physreg liveness
1839 // is too weird to check.
1840 if (TargetRegisterInfo::isVirtualRegister(Reg)) {
13
Taking false branch
1841 // A live segment can end with either a redefinition, a kill flag on a
1842 // use, or a dead flag on a def.
1843 bool hasRead = false;
1844 bool hasSubRegDef = false;
1845 bool hasDeadDef = false;
1846 for (ConstMIBundleOperands MOI(*MI); MOI.isValid(); ++MOI) {
1847 if (!MOI->isReg() || MOI->getReg() != Reg)
1848 continue;
1849 unsigned Sub = MOI->getSubReg();
1850 LaneBitmask SLM = Sub != 0 ? TRI->getSubRegIndexLaneMask(Sub)
1851 : LaneBitmask::getAll();
1852 if (MOI->isDef()) {
1853 if (Sub != 0) {
1854 hasSubRegDef = true;
1855 // An operand vreg0:sub0<def> reads vreg0:sub1..n. Invert the lane
1856 // mask for subregister defs. Read-undef defs will be handled by
1857 // readsReg below.
1858 SLM = ~SLM;
1859 }
1860 if (MOI->isDead())
1861 hasDeadDef = true;
1862 }
1863 if (LaneMask.any() && (LaneMask & SLM).none())
1864 continue;
1865 if (MOI->readsReg())
1866 hasRead = true;
1867 }
1868 if (S.end.isDead()) {
1869 // Make sure that the corresponding machine operand for a "dead" live
1870 // range has the dead flag. We cannot perform this check for subregister
1871 // liveranges as partially dead values are allowed.
1872 if (LaneMask.none() && !hasDeadDef) {
1873 report("Instruction ending live segment on dead slot has no dead flag",
1874 MI);
1875 report_context(LR, Reg, LaneMask);
1876 report_context(S);
1877 }
1878 } else {
1879 if (!hasRead) {
1880 // When tracking subregister liveness, the main range must start new
1881 // values on partial register writes, even if there is no read.
1882 if (!MRI->shouldTrackSubRegLiveness(Reg) || LaneMask.any() ||
1883 !hasSubRegDef) {
1884 report("Instruction ending live segment doesn't read the register",
1885 MI);
1886 report_context(LR, Reg, LaneMask);
1887 report_context(S);
1888 }
1889 }
1890 }
1891 }
1892
1893 // Now check all the basic blocks in this live segment.
1894 MachineFunction::const_iterator MFI = MBB->getIterator();
1895 // Is this live segment the beginning of a non-PHIDef VN?
1896 if (S.start == VNI->def && !VNI->isPHIDef()) {
1897 // Not live-in to any blocks.
1898 if (MBB == EndMBB)
1899 return;
1900 // Skip this block.
1901 ++MFI;
1902 }
1903 for (;;) {
14
Loop condition is true. Entering loop body
20
Loop condition is true. Entering loop body
26
Loop condition is true. Entering loop body
32
Loop condition is true. Entering loop body
1904 assert(LiveInts->isLiveInToMBB(LR, &*MFI))((LiveInts->isLiveInToMBB(LR, &*MFI)) ? static_cast<
void> (0) : __assert_fail ("LiveInts->isLiveInToMBB(LR, &*MFI)"
, "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn298304/lib/CodeGen/MachineVerifier.cpp"
, 1904, __PRETTY_FUNCTION__))
;
1905 // We don't know how to track physregs into a landing pad.
1906 if (!TargetRegisterInfo::isVirtualRegister(Reg) &&
16
Taking false branch
22
Taking false branch
28
Taking false branch
34
Taking false branch
1907 MFI->isEHPad()) {
15
Assuming the condition is false
21
Assuming the condition is false
27
Assuming the condition is false
33
Assuming the condition is false
1908 if (&*MFI == EndMBB)
1909 break;
1910 ++MFI;
1911 continue;
1912 }
1913
1914 // Is VNI a PHI-def in the current block?
1915 bool IsPHI = VNI->isPHIDef() &&
1916 VNI->def == LiveInts->getMBBStartIdx(&*MFI);
1917
1918 // Check that VNI is live-out of all predecessors.
1919 for (MachineBasicBlock::const_pred_iterator PI = MFI->pred_begin(),
17
Loop condition is false. Execution continues on line 1946
23
Loop condition is false. Execution continues on line 1946
29
Loop condition is false. Execution continues on line 1946
35
Loop condition is true. Entering loop body
1920 PE = MFI->pred_end(); PI != PE; ++PI) {
1921 SlotIndex PEnd = LiveInts->getMBBEndIdx(*PI);
1922 const VNInfo *PVNI = LR.getVNInfoBefore(PEnd);
36
'PVNI' initialized here
1923
1924 // All predecessors must have a live-out value if this is not a
1925 // subregister liverange.
1926 if (!PVNI && LaneMask.none()) {
37
Assuming 'PVNI' is null
38
Taking false branch
1927 report("Register not marked live out of predecessor", *PI);
1928 report_context(LR, Reg, LaneMask);
1929 report_context(*VNI);
1930 errs() << " live into BB#" << MFI->getNumber()
1931 << '@' << LiveInts->getMBBStartIdx(&*MFI) << ", not live before "
1932 << PEnd << '\n';
1933 continue;
1934 }
1935
1936 // Only PHI-defs can take different predecessor values.
1937 if (!IsPHI && PVNI != VNI) {
39
Taking true branch
1938 report("Different value live out of predecessor", *PI);
1939 report_context(LR, Reg, LaneMask);
1940 errs() << "Valno #" << PVNI->id << " live out of BB#"
40
Access to field 'id' results in a dereference of a null pointer (loaded from variable 'PVNI')
1941 << (*PI)->getNumber() << '@' << PEnd << "\nValno #" << VNI->id
1942 << " live into BB#" << MFI->getNumber() << '@'
1943 << LiveInts->getMBBStartIdx(&*MFI) << '\n';
1944 }
1945 }
1946 if (&*MFI == EndMBB)
18
Assuming the condition is false
19
Taking false branch
24
Assuming the condition is false
25
Taking false branch
30
Assuming the condition is false
31
Taking false branch
1947 break;
1948 ++MFI;
1949 }
1950}
1951
1952void MachineVerifier::verifyLiveRange(const LiveRange &LR, unsigned Reg,
1953 LaneBitmask LaneMask) {
1954 for (const VNInfo *VNI : LR.valnos)
1955 verifyLiveRangeValue(LR, VNI, Reg, LaneMask);
1956
1957 for (LiveRange::const_iterator I = LR.begin(), E = LR.end(); I != E; ++I)
1958 verifyLiveRangeSegment(LR, I, Reg, LaneMask);
1959}
1960
1961void MachineVerifier::verifyLiveInterval(const LiveInterval &LI) {
1962 unsigned Reg = LI.reg;
1963 assert(TargetRegisterInfo::isVirtualRegister(Reg))((TargetRegisterInfo::isVirtualRegister(Reg)) ? static_cast<
void> (0) : __assert_fail ("TargetRegisterInfo::isVirtualRegister(Reg)"
, "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn298304/lib/CodeGen/MachineVerifier.cpp"
, 1963, __PRETTY_FUNCTION__))
;
1964 verifyLiveRange(LI, Reg);
1965
1966 LaneBitmask Mask;
1967 LaneBitmask MaxMask = MRI->getMaxLaneMaskForVReg(Reg);
1968 for (const LiveInterval::SubRange &SR : LI.subranges()) {
1969 if ((Mask & SR.LaneMask).any()) {
1970 report("Lane masks of sub ranges overlap in live interval", MF);
1971 report_context(LI);
1972 }
1973 if ((SR.LaneMask & ~MaxMask).any()) {
1974 report("Subrange lanemask is invalid", MF);
1975 report_context(LI);
1976 }
1977 if (SR.empty()) {
1978 report("Subrange must not be empty", MF);
1979 report_context(SR, LI.reg, SR.LaneMask);
1980 }
1981 Mask |= SR.LaneMask;
1982 verifyLiveRange(SR, LI.reg, SR.LaneMask);
1983 if (!LI.covers(SR)) {
1984 report("A Subrange is not covered by the main range", MF);
1985 report_context(LI);
1986 }
1987 }
1988
1989 // Check the LI only has one connected component.
1990 ConnectedVNInfoEqClasses ConEQ(*LiveInts);
1991 unsigned NumComp = ConEQ.Classify(LI);
1992 if (NumComp > 1) {
1993 report("Multiple connected components in live interval", MF);
1994 report_context(LI);
1995 for (unsigned comp = 0; comp != NumComp; ++comp) {
1996 errs() << comp << ": valnos";
1997 for (LiveInterval::const_vni_iterator I = LI.vni_begin(),
1998 E = LI.vni_end(); I!=E; ++I)
1999 if (comp == ConEQ.getEqClass(*I))
2000 errs() << ' ' << (*I)->id;
2001 errs() << '\n';
2002 }
2003 }
2004}
2005
2006namespace {
2007 // FrameSetup and FrameDestroy can have zero adjustment, so using a single
2008 // integer, we can't tell whether it is a FrameSetup or FrameDestroy if the
2009 // value is zero.
2010 // We use a bool plus an integer to capture the stack state.
2011 struct StackStateOfBB {
2012 StackStateOfBB() : EntryValue(0), ExitValue(0), EntryIsSetup(false),
2013 ExitIsSetup(false) { }
2014 StackStateOfBB(int EntryVal, int ExitVal, bool EntrySetup, bool ExitSetup) :
2015 EntryValue(EntryVal), ExitValue(ExitVal), EntryIsSetup(EntrySetup),
2016 ExitIsSetup(ExitSetup) { }
2017 // Can be negative, which means we are setting up a frame.
2018 int EntryValue;
2019 int ExitValue;
2020 bool EntryIsSetup;
2021 bool ExitIsSetup;
2022 };
2023}
2024
2025/// Make sure on every path through the CFG, a FrameSetup <n> is always followed
2026/// by a FrameDestroy <n>, stack adjustments are identical on all
2027/// CFG edges to a merge point, and frame is destroyed at end of a return block.
2028void MachineVerifier::verifyStackFrame() {
2029 unsigned FrameSetupOpcode = TII->getCallFrameSetupOpcode();
2030 unsigned FrameDestroyOpcode = TII->getCallFrameDestroyOpcode();
2031
2032 SmallVector<StackStateOfBB, 8> SPState;
2033 SPState.resize(MF->getNumBlockIDs());
2034 df_iterator_default_set<const MachineBasicBlock*> Reachable;
2035
2036 // Visit the MBBs in DFS order.
2037 for (df_ext_iterator<const MachineFunction*,
2038 df_iterator_default_set<const MachineBasicBlock*> >
2039 DFI = df_ext_begin(MF, Reachable), DFE = df_ext_end(MF, Reachable);
2040 DFI != DFE; ++DFI) {
2041 const MachineBasicBlock *MBB = *DFI;
2042
2043 StackStateOfBB BBState;
2044 // Check the exit state of the DFS stack predecessor.
2045 if (DFI.getPathLength() >= 2) {
2046 const MachineBasicBlock *StackPred = DFI.getPath(DFI.getPathLength() - 2);
2047 assert(Reachable.count(StackPred) &&((Reachable.count(StackPred) && "DFS stack predecessor is already visited.\n"
) ? static_cast<void> (0) : __assert_fail ("Reachable.count(StackPred) && \"DFS stack predecessor is already visited.\\n\""
, "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn298304/lib/CodeGen/MachineVerifier.cpp"
, 2048, __PRETTY_FUNCTION__))
2048 "DFS stack predecessor is already visited.\n")((Reachable.count(StackPred) && "DFS stack predecessor is already visited.\n"
) ? static_cast<void> (0) : __assert_fail ("Reachable.count(StackPred) && \"DFS stack predecessor is already visited.\\n\""
, "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn298304/lib/CodeGen/MachineVerifier.cpp"
, 2048, __PRETTY_FUNCTION__))
;
2049 BBState.EntryValue = SPState[StackPred->getNumber()].ExitValue;
2050 BBState.EntryIsSetup = SPState[StackPred->getNumber()].ExitIsSetup;
2051 BBState.ExitValue = BBState.EntryValue;
2052 BBState.ExitIsSetup = BBState.EntryIsSetup;
2053 }
2054
2055 // Update stack state by checking contents of MBB.
2056 for (const auto &I : *MBB) {
2057 if (I.getOpcode() == FrameSetupOpcode) {
2058 // The first operand of a FrameOpcode should be i32.
2059 int Size = I.getOperand(0).getImm();
2060 assert(Size >= 0 &&((Size >= 0 && "Value should be non-negative in FrameSetup and FrameDestroy.\n"
) ? static_cast<void> (0) : __assert_fail ("Size >= 0 && \"Value should be non-negative in FrameSetup and FrameDestroy.\\n\""
, "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn298304/lib/CodeGen/MachineVerifier.cpp"
, 2061, __PRETTY_FUNCTION__))
2061 "Value should be non-negative in FrameSetup and FrameDestroy.\n")((Size >= 0 && "Value should be non-negative in FrameSetup and FrameDestroy.\n"
) ? static_cast<void> (0) : __assert_fail ("Size >= 0 && \"Value should be non-negative in FrameSetup and FrameDestroy.\\n\""
, "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn298304/lib/CodeGen/MachineVerifier.cpp"
, 2061, __PRETTY_FUNCTION__))
;
2062
2063 if (BBState.ExitIsSetup)
2064 report("FrameSetup is after another FrameSetup", &I);
2065 BBState.ExitValue -= Size;
2066 BBState.ExitIsSetup = true;
2067 }
2068
2069 if (I.getOpcode() == FrameDestroyOpcode) {
2070 // The first operand of a FrameOpcode should be i32.
2071 int Size = I.getOperand(0).getImm();
2072 assert(Size >= 0 &&((Size >= 0 && "Value should be non-negative in FrameSetup and FrameDestroy.\n"
) ? static_cast<void> (0) : __assert_fail ("Size >= 0 && \"Value should be non-negative in FrameSetup and FrameDestroy.\\n\""
, "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn298304/lib/CodeGen/MachineVerifier.cpp"
, 2073, __PRETTY_FUNCTION__))
2073 "Value should be non-negative in FrameSetup and FrameDestroy.\n")((Size >= 0 && "Value should be non-negative in FrameSetup and FrameDestroy.\n"
) ? static_cast<void> (0) : __assert_fail ("Size >= 0 && \"Value should be non-negative in FrameSetup and FrameDestroy.\\n\""
, "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn298304/lib/CodeGen/MachineVerifier.cpp"
, 2073, __PRETTY_FUNCTION__))
;
2074
2075 if (!BBState.ExitIsSetup)
2076 report("FrameDestroy is not after a FrameSetup", &I);
2077 int AbsSPAdj = BBState.ExitValue < 0 ? -BBState.ExitValue :
2078 BBState.ExitValue;
2079 if (BBState.ExitIsSetup && AbsSPAdj != Size) {
2080 report("FrameDestroy <n> is after FrameSetup <m>", &I);
2081 errs() << "FrameDestroy <" << Size << "> is after FrameSetup <"
2082 << AbsSPAdj << ">.\n";
2083 }
2084 BBState.ExitValue += Size;
2085 BBState.ExitIsSetup = false;
2086 }
2087 }
2088 SPState[MBB->getNumber()] = BBState;
2089
2090 // Make sure the exit state of any predecessor is consistent with the entry
2091 // state.
2092 for (MachineBasicBlock::const_pred_iterator I = MBB->pred_begin(),
2093 E = MBB->pred_end(); I != E; ++I) {
2094 if (Reachable.count(*I) &&
2095 (SPState[(*I)->getNumber()].ExitValue != BBState.EntryValue ||
2096 SPState[(*I)->getNumber()].ExitIsSetup != BBState.EntryIsSetup)) {
2097 report("The exit stack state of a predecessor is inconsistent.", MBB);
2098 errs() << "Predecessor BB#" << (*I)->getNumber() << " has exit state ("
2099 << SPState[(*I)->getNumber()].ExitValue << ", "
2100 << SPState[(*I)->getNumber()].ExitIsSetup
2101 << "), while BB#" << MBB->getNumber() << " has entry state ("
2102 << BBState.EntryValue << ", " << BBState.EntryIsSetup << ").\n";
2103 }
2104 }
2105
2106 // Make sure the entry state of any successor is consistent with the exit
2107 // state.
2108 for (MachineBasicBlock::const_succ_iterator I = MBB->succ_begin(),
2109 E = MBB->succ_end(); I != E; ++I) {
2110 if (Reachable.count(*I) &&
2111 (SPState[(*I)->getNumber()].EntryValue != BBState.ExitValue ||
2112 SPState[(*I)->getNumber()].EntryIsSetup != BBState.ExitIsSetup)) {
2113 report("The entry stack state of a successor is inconsistent.", MBB);
2114 errs() << "Successor BB#" << (*I)->getNumber() << " has entry state ("
2115 << SPState[(*I)->getNumber()].EntryValue << ", "
2116 << SPState[(*I)->getNumber()].EntryIsSetup
2117 << "), while BB#" << MBB->getNumber() << " has exit state ("
2118 << BBState.ExitValue << ", " << BBState.ExitIsSetup << ").\n";
2119 }
2120 }
2121
2122 // Make sure a basic block with return ends with zero stack adjustment.
2123 if (!MBB->empty() && MBB->back().isReturn()) {
2124 if (BBState.ExitIsSetup)
2125 report("A return block ends with a FrameSetup.", MBB);
2126 if (BBState.ExitValue)
2127 report("A return block ends with a nonzero stack adjustment.", MBB);
2128 }
2129 }
2130}