Bug Summary

File:lib/CodeGen/MachineVerifier.cpp
Warning:line 1942, column 32
Access to field 'id' results in a dereference of a null pointer (loaded from variable 'PVNI')

Annotated Source Code

1//===-- MachineVerifier.cpp - Machine Code Verifier -----------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// Pass to verify generated machine code. The following is checked:
11//
12// Operand counts: All explicit operands must be present.
13//
14// Register classes: All physical and virtual register operands must be
15// compatible with the register class required by the instruction descriptor.
16//
17// Register live intervals: Registers must be defined only once, and must be
18// defined before use.
19//
20// The machine code verifier is enabled from LLVMTargetMachine.cpp with the
21// command-line option -verify-machineinstrs, or by defining the environment
22// variable LLVM_VERIFY_MACHINEINSTRS to the name of a file that will receive
23// the verifier errors.
24//===----------------------------------------------------------------------===//
25
26#include "llvm/CodeGen/Passes.h"
27#include "llvm/ADT/DenseSet.h"
28#include "llvm/ADT/DepthFirstIterator.h"
29#include "llvm/ADT/SetOperations.h"
30#include "llvm/ADT/SmallVector.h"
31#include "llvm/Analysis/EHPersonalities.h"
32#include "llvm/CodeGen/LiveIntervalAnalysis.h"
33#include "llvm/CodeGen/LiveStackAnalysis.h"
34#include "llvm/CodeGen/LiveVariables.h"
35#include "llvm/CodeGen/MachineFrameInfo.h"
36#include "llvm/CodeGen/MachineFunctionPass.h"
37#include "llvm/CodeGen/MachineMemOperand.h"
38#include "llvm/CodeGen/MachineRegisterInfo.h"
39#include "llvm/IR/BasicBlock.h"
40#include "llvm/IR/InlineAsm.h"
41#include "llvm/IR/Instructions.h"
42#include "llvm/MC/MCAsmInfo.h"
43#include "llvm/Support/Debug.h"
44#include "llvm/Support/ErrorHandling.h"
45#include "llvm/Support/FileSystem.h"
46#include "llvm/Support/raw_ostream.h"
47#include "llvm/Target/TargetInstrInfo.h"
48#include "llvm/Target/TargetMachine.h"
49#include "llvm/Target/TargetRegisterInfo.h"
50#include "llvm/Target/TargetSubtargetInfo.h"
51using namespace llvm;
52
53namespace {
54 struct MachineVerifier {
55
56 MachineVerifier(Pass *pass, const char *b) :
57 PASS(pass),
58 Banner(b)
59 {}
60
61 unsigned verify(MachineFunction &MF);
62
63 Pass *const PASS;
64 const char *Banner;
65 const MachineFunction *MF;
66 const TargetMachine *TM;
67 const TargetInstrInfo *TII;
68 const TargetRegisterInfo *TRI;
69 const MachineRegisterInfo *MRI;
70
71 unsigned foundErrors;
72
73 // Avoid querying the MachineFunctionProperties for each operand.
74 bool isFunctionRegBankSelected;
75 bool isFunctionSelected;
76
77 typedef SmallVector<unsigned, 16> RegVector;
78 typedef SmallVector<const uint32_t*, 4> RegMaskVector;
79 typedef DenseSet<unsigned> RegSet;
80 typedef DenseMap<unsigned, const MachineInstr*> RegMap;
81 typedef SmallPtrSet<const MachineBasicBlock*, 8> BlockSet;
82
83 const MachineInstr *FirstTerminator;
84 BlockSet FunctionBlocks;
85
86 BitVector regsReserved;
87 RegSet regsLive;
88 RegVector regsDefined, regsDead, regsKilled;
89 RegMaskVector regMasks;
90 RegSet regsLiveInButUnused;
91
92 SlotIndex lastIndex;
93
94 // Add Reg and any sub-registers to RV
95 void addRegWithSubRegs(RegVector &RV, unsigned Reg) {
96 RV.push_back(Reg);
97 if (TargetRegisterInfo::isPhysicalRegister(Reg))
98 for (MCSubRegIterator SubRegs(Reg, TRI); SubRegs.isValid(); ++SubRegs)
99 RV.push_back(*SubRegs);
100 }
101
102 struct BBInfo {
103 // Is this MBB reachable from the MF entry point?
104 bool reachable;
105
106 // Vregs that must be live in because they are used without being
107 // defined. Map value is the user.
108 RegMap vregsLiveIn;
109
110 // Regs killed in MBB. They may be defined again, and will then be in both
111 // regsKilled and regsLiveOut.
112 RegSet regsKilled;
113
114 // Regs defined in MBB and live out. Note that vregs passing through may
115 // be live out without being mentioned here.
116 RegSet regsLiveOut;
117
118 // Vregs that pass through MBB untouched. This set is disjoint from
119 // regsKilled and regsLiveOut.
120 RegSet vregsPassed;
121
122 // Vregs that must pass through MBB because they are needed by a successor
123 // block. This set is disjoint from regsLiveOut.
124 RegSet vregsRequired;
125
126 // Set versions of block's predecessor and successor lists.
127 BlockSet Preds, Succs;
128
129 BBInfo() : reachable(false) {}
130
131 // Add register to vregsPassed if it belongs there. Return true if
132 // anything changed.
133 bool addPassed(unsigned Reg) {
134 if (!TargetRegisterInfo::isVirtualRegister(Reg))
135 return false;
136 if (regsKilled.count(Reg) || regsLiveOut.count(Reg))
137 return false;
138 return vregsPassed.insert(Reg).second;
139 }
140
141 // Same for a full set.
142 bool addPassed(const RegSet &RS) {
143 bool changed = false;
144 for (RegSet::const_iterator I = RS.begin(), E = RS.end(); I != E; ++I)
145 if (addPassed(*I))
146 changed = true;
147 return changed;
148 }
149
150 // Add register to vregsRequired if it belongs there. Return true if
151 // anything changed.
152 bool addRequired(unsigned Reg) {
153 if (!TargetRegisterInfo::isVirtualRegister(Reg))
154 return false;
155 if (regsLiveOut.count(Reg))
156 return false;
157 return vregsRequired.insert(Reg).second;
158 }
159
160 // Same for a full set.
161 bool addRequired(const RegSet &RS) {
162 bool changed = false;
163 for (RegSet::const_iterator I = RS.begin(), E = RS.end(); I != E; ++I)
164 if (addRequired(*I))
165 changed = true;
166 return changed;
167 }
168
169 // Same for a full map.
170 bool addRequired(const RegMap &RM) {
171 bool changed = false;
172 for (RegMap::const_iterator I = RM.begin(), E = RM.end(); I != E; ++I)
173 if (addRequired(I->first))
174 changed = true;
175 return changed;
176 }
177
178 // Live-out registers are either in regsLiveOut or vregsPassed.
179 bool isLiveOut(unsigned Reg) const {
180 return regsLiveOut.count(Reg) || vregsPassed.count(Reg);
181 }
182 };
183
184 // Extra register info per MBB.
185 DenseMap<const MachineBasicBlock*, BBInfo> MBBInfoMap;
186
187 bool isReserved(unsigned Reg) {
188 return Reg < regsReserved.size() && regsReserved.test(Reg);
189 }
190
191 bool isAllocatable(unsigned Reg) {
192 return Reg < TRI->getNumRegs() && MRI->isAllocatable(Reg);
193 }
194
195 // Analysis information if available
196 LiveVariables *LiveVars;
197 LiveIntervals *LiveInts;
198 LiveStacks *LiveStks;
199 SlotIndexes *Indexes;
200
201 void visitMachineFunctionBefore();
202 void visitMachineBasicBlockBefore(const MachineBasicBlock *MBB);
203 void visitMachineBundleBefore(const MachineInstr *MI);
204 void visitMachineInstrBefore(const MachineInstr *MI);
205 void visitMachineOperand(const MachineOperand *MO, unsigned MONum);
206 void visitMachineInstrAfter(const MachineInstr *MI);
207 void visitMachineBundleAfter(const MachineInstr *MI);
208 void visitMachineBasicBlockAfter(const MachineBasicBlock *MBB);
209 void visitMachineFunctionAfter();
210
211 void report(const char *msg, const MachineFunction *MF);
212 void report(const char *msg, const MachineBasicBlock *MBB);
213 void report(const char *msg, const MachineInstr *MI);
214 void report(const char *msg, const MachineOperand *MO, unsigned MONum);
215
216 void report_context(const LiveInterval &LI) const;
217 void report_context(const LiveRange &LR, unsigned VRegUnit,
218 LaneBitmask LaneMask) const;
219 void report_context(const LiveRange::Segment &S) const;
220 void report_context(const VNInfo &VNI) const;
221 void report_context(SlotIndex Pos) const;
222 void report_context_liverange(const LiveRange &LR) const;
223 void report_context_lanemask(LaneBitmask LaneMask) const;
224 void report_context_vreg(unsigned VReg) const;
225 void report_context_vreg_regunit(unsigned VRegOrRegUnit) const;
226
227 void verifyInlineAsm(const MachineInstr *MI);
228
229 void checkLiveness(const MachineOperand *MO, unsigned MONum);
230 void checkLivenessAtUse(const MachineOperand *MO, unsigned MONum,
231 SlotIndex UseIdx, const LiveRange &LR, unsigned Reg,
232 LaneBitmask LaneMask = LaneBitmask::getNone());
233 void checkLivenessAtDef(const MachineOperand *MO, unsigned MONum,
234 SlotIndex DefIdx, const LiveRange &LR, unsigned Reg,
235 LaneBitmask LaneMask = LaneBitmask::getNone());
236
237 void markReachable(const MachineBasicBlock *MBB);
238 void calcRegsPassed();
239 void checkPHIOps(const MachineBasicBlock *MBB);
240
241 void calcRegsRequired();
242 void verifyLiveVariables();
243 void verifyLiveIntervals();
244 void verifyLiveInterval(const LiveInterval&);
245 void verifyLiveRangeValue(const LiveRange&, const VNInfo*, unsigned,
246 LaneBitmask);
247 void verifyLiveRangeSegment(const LiveRange&,
248 const LiveRange::const_iterator I, unsigned,
249 LaneBitmask);
250 void verifyLiveRange(const LiveRange&, unsigned,
251 LaneBitmask LaneMask = LaneBitmask::getNone());
252
253 void verifyStackFrame();
254
255 void verifySlotIndexes() const;
256 void verifyProperties(const MachineFunction &MF);
257 };
258
259 struct MachineVerifierPass : public MachineFunctionPass {
260 static char ID; // Pass ID, replacement for typeid
261 const std::string Banner;
262
263 MachineVerifierPass(std::string banner = std::string())
264 : MachineFunctionPass(ID), Banner(std::move(banner)) {
265 initializeMachineVerifierPassPass(*PassRegistry::getPassRegistry());
266 }
267
268 void getAnalysisUsage(AnalysisUsage &AU) const override {
269 AU.setPreservesAll();
270 MachineFunctionPass::getAnalysisUsage(AU);
271 }
272
273 bool runOnMachineFunction(MachineFunction &MF) override {
274 unsigned FoundErrors = MachineVerifier(this, Banner.c_str()).verify(MF);
275 if (FoundErrors)
276 report_fatal_error("Found "+Twine(FoundErrors)+" machine code errors.");
277 return false;
278 }
279 };
280
281}
282
283char MachineVerifierPass::ID = 0;
284INITIALIZE_PASS(MachineVerifierPass, "machineverifier",static void *initializeMachineVerifierPassPassOnce(PassRegistry
&Registry) { PassInfo *PI = new PassInfo( "Verify generated machine code"
, "machineverifier", &MachineVerifierPass::ID, PassInfo::
NormalCtor_t(callDefaultCtor<MachineVerifierPass>), false
, false); Registry.registerPass(*PI, true); return PI; } static
llvm::once_flag InitializeMachineVerifierPassPassFlag; void llvm
::initializeMachineVerifierPassPass(PassRegistry &Registry
) { llvm::call_once(InitializeMachineVerifierPassPassFlag, initializeMachineVerifierPassPassOnce
, std::ref(Registry)); }
285 "Verify generated machine code", false, false)static void *initializeMachineVerifierPassPassOnce(PassRegistry
&Registry) { PassInfo *PI = new PassInfo( "Verify generated machine code"
, "machineverifier", &MachineVerifierPass::ID, PassInfo::
NormalCtor_t(callDefaultCtor<MachineVerifierPass>), false
, false); Registry.registerPass(*PI, true); return PI; } static
llvm::once_flag InitializeMachineVerifierPassPassFlag; void llvm
::initializeMachineVerifierPassPass(PassRegistry &Registry
) { llvm::call_once(InitializeMachineVerifierPassPassFlag, initializeMachineVerifierPassPassOnce
, std::ref(Registry)); }
286
287FunctionPass *llvm::createMachineVerifierPass(const std::string &Banner) {
288 return new MachineVerifierPass(Banner);
289}
290
291bool MachineFunction::verify(Pass *p, const char *Banner, bool AbortOnErrors)
292 const {
293 MachineFunction &MF = const_cast<MachineFunction&>(*this);
294 unsigned FoundErrors = MachineVerifier(p, Banner).verify(MF);
295 if (AbortOnErrors && FoundErrors)
296 report_fatal_error("Found "+Twine(FoundErrors)+" machine code errors.");
297 return FoundErrors == 0;
298}
299
300void MachineVerifier::verifySlotIndexes() const {
301 if (Indexes == nullptr)
302 return;
303
304 // Ensure the IdxMBB list is sorted by slot indexes.
305 SlotIndex Last;
306 for (SlotIndexes::MBBIndexIterator I = Indexes->MBBIndexBegin(),
307 E = Indexes->MBBIndexEnd(); I != E; ++I) {
308 assert(!Last.isValid() || I->first > Last)((!Last.isValid() || I->first > Last) ? static_cast<
void> (0) : __assert_fail ("!Last.isValid() || I->first > Last"
, "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn301135/lib/CodeGen/MachineVerifier.cpp"
, 308, __PRETTY_FUNCTION__))
;
309 Last = I->first;
310 }
311}
312
313void MachineVerifier::verifyProperties(const MachineFunction &MF) {
314 // If a pass has introduced virtual registers without clearing the
315 // NoVRegs property (or set it without allocating the vregs)
316 // then report an error.
317 if (MF.getProperties().hasProperty(
318 MachineFunctionProperties::Property::NoVRegs) &&
319 MRI->getNumVirtRegs())
320 report("Function has NoVRegs property but there are VReg operands", &MF);
321}
322
323unsigned MachineVerifier::verify(MachineFunction &MF) {
324 foundErrors = 0;
325
326 this->MF = &MF;
327 TM = &MF.getTarget();
328 TII = MF.getSubtarget().getInstrInfo();
329 TRI = MF.getSubtarget().getRegisterInfo();
330 MRI = &MF.getRegInfo();
331
332 isFunctionRegBankSelected = MF.getProperties().hasProperty(
333 MachineFunctionProperties::Property::RegBankSelected);
334 isFunctionSelected = MF.getProperties().hasProperty(
335 MachineFunctionProperties::Property::Selected);
336
337 LiveVars = nullptr;
338 LiveInts = nullptr;
339 LiveStks = nullptr;
340 Indexes = nullptr;
341 if (PASS) {
342 LiveInts = PASS->getAnalysisIfAvailable<LiveIntervals>();
343 // We don't want to verify LiveVariables if LiveIntervals is available.
344 if (!LiveInts)
345 LiveVars = PASS->getAnalysisIfAvailable<LiveVariables>();
346 LiveStks = PASS->getAnalysisIfAvailable<LiveStacks>();
347 Indexes = PASS->getAnalysisIfAvailable<SlotIndexes>();
348 }
349
350 verifySlotIndexes();
351
352 verifyProperties(MF);
353
354 visitMachineFunctionBefore();
355 for (MachineFunction::const_iterator MFI = MF.begin(), MFE = MF.end();
356 MFI!=MFE; ++MFI) {
357 visitMachineBasicBlockBefore(&*MFI);
358 // Keep track of the current bundle header.
359 const MachineInstr *CurBundle = nullptr;
360 // Do we expect the next instruction to be part of the same bundle?
361 bool InBundle = false;
362
363 for (MachineBasicBlock::const_instr_iterator MBBI = MFI->instr_begin(),
364 MBBE = MFI->instr_end(); MBBI != MBBE; ++MBBI) {
365 if (MBBI->getParent() != &*MFI) {
366 report("Bad instruction parent pointer", &*MFI);
367 errs() << "Instruction: " << *MBBI;
368 continue;
369 }
370
371 // Check for consistent bundle flags.
372 if (InBundle && !MBBI->isBundledWithPred())
373 report("Missing BundledPred flag, "
374 "BundledSucc was set on predecessor",
375 &*MBBI);
376 if (!InBundle && MBBI->isBundledWithPred())
377 report("BundledPred flag is set, "
378 "but BundledSucc not set on predecessor",
379 &*MBBI);
380
381 // Is this a bundle header?
382 if (!MBBI->isInsideBundle()) {
383 if (CurBundle)
384 visitMachineBundleAfter(CurBundle);
385 CurBundle = &*MBBI;
386 visitMachineBundleBefore(CurBundle);
387 } else if (!CurBundle)
388 report("No bundle header", &*MBBI);
389 visitMachineInstrBefore(&*MBBI);
390 for (unsigned I = 0, E = MBBI->getNumOperands(); I != E; ++I) {
391 const MachineInstr &MI = *MBBI;
392 const MachineOperand &Op = MI.getOperand(I);
393 if (Op.getParent() != &MI) {
394 // Make sure to use correct addOperand / RemoveOperand / ChangeTo
395 // functions when replacing operands of a MachineInstr.
396 report("Instruction has operand with wrong parent set", &MI);
397 }
398
399 visitMachineOperand(&Op, I);
400 }
401
402 visitMachineInstrAfter(&*MBBI);
403
404 // Was this the last bundled instruction?
405 InBundle = MBBI->isBundledWithSucc();
406 }
407 if (CurBundle)
408 visitMachineBundleAfter(CurBundle);
409 if (InBundle)
410 report("BundledSucc flag set on last instruction in block", &MFI->back());
411 visitMachineBasicBlockAfter(&*MFI);
412 }
413 visitMachineFunctionAfter();
414
415 // Clean up.
416 regsLive.clear();
417 regsDefined.clear();
418 regsDead.clear();
419 regsKilled.clear();
420 regMasks.clear();
421 regsLiveInButUnused.clear();
422 MBBInfoMap.clear();
423
424 return foundErrors;
425}
426
427void MachineVerifier::report(const char *msg, const MachineFunction *MF) {
428 assert(MF)((MF) ? static_cast<void> (0) : __assert_fail ("MF", "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn301135/lib/CodeGen/MachineVerifier.cpp"
, 428, __PRETTY_FUNCTION__))
;
429 errs() << '\n';
430 if (!foundErrors++) {
431 if (Banner)
432 errs() << "# " << Banner << '\n';
433 if (LiveInts != nullptr)
434 LiveInts->print(errs());
435 else
436 MF->print(errs(), Indexes);
437 }
438 errs() << "*** Bad machine code: " << msg << " ***\n"
439 << "- function: " << MF->getName() << "\n";
440}
441
442void MachineVerifier::report(const char *msg, const MachineBasicBlock *MBB) {
443 assert(MBB)((MBB) ? static_cast<void> (0) : __assert_fail ("MBB", "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn301135/lib/CodeGen/MachineVerifier.cpp"
, 443, __PRETTY_FUNCTION__))
;
444 report(msg, MBB->getParent());
445 errs() << "- basic block: BB#" << MBB->getNumber()
446 << ' ' << MBB->getName()
447 << " (" << (const void*)MBB << ')';
448 if (Indexes)
449 errs() << " [" << Indexes->getMBBStartIdx(MBB)
450 << ';' << Indexes->getMBBEndIdx(MBB) << ')';
451 errs() << '\n';
452}
453
454void MachineVerifier::report(const char *msg, const MachineInstr *MI) {
455 assert(MI)((MI) ? static_cast<void> (0) : __assert_fail ("MI", "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn301135/lib/CodeGen/MachineVerifier.cpp"
, 455, __PRETTY_FUNCTION__))
;
456 report(msg, MI->getParent());
457 errs() << "- instruction: ";
458 if (Indexes && Indexes->hasIndex(*MI))
459 errs() << Indexes->getInstructionIndex(*MI) << '\t';
460 MI->print(errs(), /*SkipOpers=*/true);
461 errs() << '\n';
462}
463
464void MachineVerifier::report(const char *msg,
465 const MachineOperand *MO, unsigned MONum) {
466 assert(MO)((MO) ? static_cast<void> (0) : __assert_fail ("MO", "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn301135/lib/CodeGen/MachineVerifier.cpp"
, 466, __PRETTY_FUNCTION__))
;
467 report(msg, MO->getParent());
468 errs() << "- operand " << MONum << ": ";
469 MO->print(errs(), TRI);
470 errs() << "\n";
471}
472
473void MachineVerifier::report_context(SlotIndex Pos) const {
474 errs() << "- at: " << Pos << '\n';
475}
476
477void MachineVerifier::report_context(const LiveInterval &LI) const {
478 errs() << "- interval: " << LI << '\n';
479}
480
481void MachineVerifier::report_context(const LiveRange &LR, unsigned VRegUnit,
482 LaneBitmask LaneMask) const {
483 report_context_liverange(LR);
484 report_context_vreg_regunit(VRegUnit);
485 if (LaneMask.any())
486 report_context_lanemask(LaneMask);
487}
488
489void MachineVerifier::report_context(const LiveRange::Segment &S) const {
490 errs() << "- segment: " << S << '\n';
491}
492
493void MachineVerifier::report_context(const VNInfo &VNI) const {
494 errs() << "- ValNo: " << VNI.id << " (def " << VNI.def << ")\n";
495}
496
497void MachineVerifier::report_context_liverange(const LiveRange &LR) const {
498 errs() << "- liverange: " << LR << '\n';
499}
500
501void MachineVerifier::report_context_vreg(unsigned VReg) const {
502 errs() << "- v. register: " << PrintReg(VReg, TRI) << '\n';
503}
504
505void MachineVerifier::report_context_vreg_regunit(unsigned VRegOrUnit) const {
506 if (TargetRegisterInfo::isVirtualRegister(VRegOrUnit)) {
507 report_context_vreg(VRegOrUnit);
508 } else {
509 errs() << "- regunit: " << PrintRegUnit(VRegOrUnit, TRI) << '\n';
510 }
511}
512
513void MachineVerifier::report_context_lanemask(LaneBitmask LaneMask) const {
514 errs() << "- lanemask: " << PrintLaneMask(LaneMask) << '\n';
515}
516
517void MachineVerifier::markReachable(const MachineBasicBlock *MBB) {
518 BBInfo &MInfo = MBBInfoMap[MBB];
519 if (!MInfo.reachable) {
520 MInfo.reachable = true;
521 for (MachineBasicBlock::const_succ_iterator SuI = MBB->succ_begin(),
522 SuE = MBB->succ_end(); SuI != SuE; ++SuI)
523 markReachable(*SuI);
524 }
525}
526
527void MachineVerifier::visitMachineFunctionBefore() {
528 lastIndex = SlotIndex();
529 regsReserved = MRI->getReservedRegs();
530
531 if (!MF->empty())
532 markReachable(&MF->front());
533
534 // Build a set of the basic blocks in the function.
535 FunctionBlocks.clear();
536 for (const auto &MBB : *MF) {
537 FunctionBlocks.insert(&MBB);
538 BBInfo &MInfo = MBBInfoMap[&MBB];
539
540 MInfo.Preds.insert(MBB.pred_begin(), MBB.pred_end());
541 if (MInfo.Preds.size() != MBB.pred_size())
542 report("MBB has duplicate entries in its predecessor list.", &MBB);
543
544 MInfo.Succs.insert(MBB.succ_begin(), MBB.succ_end());
545 if (MInfo.Succs.size() != MBB.succ_size())
546 report("MBB has duplicate entries in its successor list.", &MBB);
547 }
548
549 // Check that the register use lists are sane.
550 MRI->verifyUseLists();
551
552 if (!MF->empty())
553 verifyStackFrame();
554}
555
556// Does iterator point to a and b as the first two elements?
557static bool matchPair(MachineBasicBlock::const_succ_iterator i,
558 const MachineBasicBlock *a, const MachineBasicBlock *b) {
559 if (*i == a)
560 return *++i == b;
561 if (*i == b)
562 return *++i == a;
563 return false;
564}
565
566void
567MachineVerifier::visitMachineBasicBlockBefore(const MachineBasicBlock *MBB) {
568 FirstTerminator = nullptr;
569
570 if (!MF->getProperties().hasProperty(
571 MachineFunctionProperties::Property::NoPHIs) && MRI->tracksLiveness()) {
572 // If this block has allocatable physical registers live-in, check that
573 // it is an entry block or landing pad.
574 for (const auto &LI : MBB->liveins()) {
575 if (isAllocatable(LI.PhysReg) && !MBB->isEHPad() &&
576 MBB->getIterator() != MBB->getParent()->begin()) {
577 report("MBB has allocatable live-in, but isn't entry or landing-pad.", MBB);
578 }
579 }
580 }
581
582 // Count the number of landing pad successors.
583 SmallPtrSet<MachineBasicBlock*, 4> LandingPadSuccs;
584 for (MachineBasicBlock::const_succ_iterator I = MBB->succ_begin(),
585 E = MBB->succ_end(); I != E; ++I) {
586 if ((*I)->isEHPad())
587 LandingPadSuccs.insert(*I);
588 if (!FunctionBlocks.count(*I))
589 report("MBB has successor that isn't part of the function.", MBB);
590 if (!MBBInfoMap[*I].Preds.count(MBB)) {
591 report("Inconsistent CFG", MBB);
592 errs() << "MBB is not in the predecessor list of the successor BB#"
593 << (*I)->getNumber() << ".\n";
594 }
595 }
596
597 // Check the predecessor list.
598 for (MachineBasicBlock::const_pred_iterator I = MBB->pred_begin(),
599 E = MBB->pred_end(); I != E; ++I) {
600 if (!FunctionBlocks.count(*I))
601 report("MBB has predecessor that isn't part of the function.", MBB);
602 if (!MBBInfoMap[*I].Succs.count(MBB)) {
603 report("Inconsistent CFG", MBB);
604 errs() << "MBB is not in the successor list of the predecessor BB#"
605 << (*I)->getNumber() << ".\n";
606 }
607 }
608
609 const MCAsmInfo *AsmInfo = TM->getMCAsmInfo();
610 const BasicBlock *BB = MBB->getBasicBlock();
611 const Function *Fn = MF->getFunction();
612 if (LandingPadSuccs.size() > 1 &&
613 !(AsmInfo &&
614 AsmInfo->getExceptionHandlingType() == ExceptionHandling::SjLj &&
615 BB && isa<SwitchInst>(BB->getTerminator())) &&
616 !isFuncletEHPersonality(classifyEHPersonality(Fn->getPersonalityFn())))
617 report("MBB has more than one landing pad successor", MBB);
618
619 // Call AnalyzeBranch. If it succeeds, there several more conditions to check.
620 MachineBasicBlock *TBB = nullptr, *FBB = nullptr;
621 SmallVector<MachineOperand, 4> Cond;
622 if (!TII->analyzeBranch(*const_cast<MachineBasicBlock *>(MBB), TBB, FBB,
623 Cond)) {
624 // Ok, AnalyzeBranch thinks it knows what's going on with this block. Let's
625 // check whether its answers match up with reality.
626 if (!TBB && !FBB) {
627 // Block falls through to its successor.
628 MachineFunction::const_iterator MBBI = MBB->getIterator();
629 ++MBBI;
630 if (MBBI == MF->end()) {
631 // It's possible that the block legitimately ends with a noreturn
632 // call or an unreachable, in which case it won't actually fall
633 // out the bottom of the function.
634 } else if (MBB->succ_size() == LandingPadSuccs.size()) {
635 // It's possible that the block legitimately ends with a noreturn
636 // call or an unreachable, in which case it won't actuall fall
637 // out of the block.
638 } else if (MBB->succ_size() != 1+LandingPadSuccs.size()) {
639 report("MBB exits via unconditional fall-through but doesn't have "
640 "exactly one CFG successor!", MBB);
641 } else if (!MBB->isSuccessor(&*MBBI)) {
642 report("MBB exits via unconditional fall-through but its successor "
643 "differs from its CFG successor!", MBB);
644 }
645 if (!MBB->empty() && MBB->back().isBarrier() &&
646 !TII->isPredicated(MBB->back())) {
647 report("MBB exits via unconditional fall-through but ends with a "
648 "barrier instruction!", MBB);
649 }
650 if (!Cond.empty()) {
651 report("MBB exits via unconditional fall-through but has a condition!",
652 MBB);
653 }
654 } else if (TBB && !FBB && Cond.empty()) {
655 // Block unconditionally branches somewhere.
656 // If the block has exactly one successor, that happens to be a
657 // landingpad, accept it as valid control flow.
658 if (MBB->succ_size() != 1+LandingPadSuccs.size() &&
659 (MBB->succ_size() != 1 || LandingPadSuccs.size() != 1 ||
660 *MBB->succ_begin() != *LandingPadSuccs.begin())) {
661 report("MBB exits via unconditional branch but doesn't have "
662 "exactly one CFG successor!", MBB);
663 } else if (!MBB->isSuccessor(TBB)) {
664 report("MBB exits via unconditional branch but the CFG "
665 "successor doesn't match the actual successor!", MBB);
666 }
667 if (MBB->empty()) {
668 report("MBB exits via unconditional branch but doesn't contain "
669 "any instructions!", MBB);
670 } else if (!MBB->back().isBarrier()) {
671 report("MBB exits via unconditional branch but doesn't end with a "
672 "barrier instruction!", MBB);
673 } else if (!MBB->back().isTerminator()) {
674 report("MBB exits via unconditional branch but the branch isn't a "
675 "terminator instruction!", MBB);
676 }
677 } else if (TBB && !FBB && !Cond.empty()) {
678 // Block conditionally branches somewhere, otherwise falls through.
679 MachineFunction::const_iterator MBBI = MBB->getIterator();
680 ++MBBI;
681 if (MBBI == MF->end()) {
682 report("MBB conditionally falls through out of function!", MBB);
683 } else if (MBB->succ_size() == 1) {
684 // A conditional branch with only one successor is weird, but allowed.
685 if (&*MBBI != TBB)
686 report("MBB exits via conditional branch/fall-through but only has "
687 "one CFG successor!", MBB);
688 else if (TBB != *MBB->succ_begin())
689 report("MBB exits via conditional branch/fall-through but the CFG "
690 "successor don't match the actual successor!", MBB);
691 } else if (MBB->succ_size() != 2) {
692 report("MBB exits via conditional branch/fall-through but doesn't have "
693 "exactly two CFG successors!", MBB);
694 } else if (!matchPair(MBB->succ_begin(), TBB, &*MBBI)) {
695 report("MBB exits via conditional branch/fall-through but the CFG "
696 "successors don't match the actual successors!", MBB);
697 }
698 if (MBB->empty()) {
699 report("MBB exits via conditional branch/fall-through but doesn't "
700 "contain any instructions!", MBB);
701 } else if (MBB->back().isBarrier()) {
702 report("MBB exits via conditional branch/fall-through but ends with a "
703 "barrier instruction!", MBB);
704 } else if (!MBB->back().isTerminator()) {
705 report("MBB exits via conditional branch/fall-through but the branch "
706 "isn't a terminator instruction!", MBB);
707 }
708 } else if (TBB && FBB) {
709 // Block conditionally branches somewhere, otherwise branches
710 // somewhere else.
711 if (MBB->succ_size() == 1) {
712 // A conditional branch with only one successor is weird, but allowed.
713 if (FBB != TBB)
714 report("MBB exits via conditional branch/branch through but only has "
715 "one CFG successor!", MBB);
716 else if (TBB != *MBB->succ_begin())
717 report("MBB exits via conditional branch/branch through but the CFG "
718 "successor don't match the actual successor!", MBB);
719 } else if (MBB->succ_size() != 2) {
720 report("MBB exits via conditional branch/branch but doesn't have "
721 "exactly two CFG successors!", MBB);
722 } else if (!matchPair(MBB->succ_begin(), TBB, FBB)) {
723 report("MBB exits via conditional branch/branch but the CFG "
724 "successors don't match the actual successors!", MBB);
725 }
726 if (MBB->empty()) {
727 report("MBB exits via conditional branch/branch but doesn't "
728 "contain any instructions!", MBB);
729 } else if (!MBB->back().isBarrier()) {
730 report("MBB exits via conditional branch/branch but doesn't end with a "
731 "barrier instruction!", MBB);
732 } else if (!MBB->back().isTerminator()) {
733 report("MBB exits via conditional branch/branch but the branch "
734 "isn't a terminator instruction!", MBB);
735 }
736 if (Cond.empty()) {
737 report("MBB exits via conditinal branch/branch but there's no "
738 "condition!", MBB);
739 }
740 } else {
741 report("AnalyzeBranch returned invalid data!", MBB);
742 }
743 }
744
745 regsLive.clear();
746 if (MRI->tracksLiveness()) {
747 for (const auto &LI : MBB->liveins()) {
748 if (!TargetRegisterInfo::isPhysicalRegister(LI.PhysReg)) {
749 report("MBB live-in list contains non-physical register", MBB);
750 continue;
751 }
752 for (MCSubRegIterator SubRegs(LI.PhysReg, TRI, /*IncludeSelf=*/true);
753 SubRegs.isValid(); ++SubRegs)
754 regsLive.insert(*SubRegs);
755 }
756 }
757 regsLiveInButUnused = regsLive;
758
759 const MachineFrameInfo &MFI = MF->getFrameInfo();
760 BitVector PR = MFI.getPristineRegs(*MF);
761 for (int I = PR.find_first(); I>0; I = PR.find_next(I)) {
762 for (MCSubRegIterator SubRegs(I, TRI, /*IncludeSelf=*/true);
763 SubRegs.isValid(); ++SubRegs)
764 regsLive.insert(*SubRegs);
765 }
766
767 regsKilled.clear();
768 regsDefined.clear();
769
770 if (Indexes)
771 lastIndex = Indexes->getMBBStartIdx(MBB);
772}
773
774// This function gets called for all bundle headers, including normal
775// stand-alone unbundled instructions.
776void MachineVerifier::visitMachineBundleBefore(const MachineInstr *MI) {
777 if (Indexes && Indexes->hasIndex(*MI)) {
778 SlotIndex idx = Indexes->getInstructionIndex(*MI);
779 if (!(idx > lastIndex)) {
780 report("Instruction index out of order", MI);
781 errs() << "Last instruction was at " << lastIndex << '\n';
782 }
783 lastIndex = idx;
784 }
785
786 // Ensure non-terminators don't follow terminators.
787 // Ignore predicated terminators formed by if conversion.
788 // FIXME: If conversion shouldn't need to violate this rule.
789 if (MI->isTerminator() && !TII->isPredicated(*MI)) {
790 if (!FirstTerminator)
791 FirstTerminator = MI;
792 } else if (FirstTerminator) {
793 report("Non-terminator instruction after the first terminator", MI);
794 errs() << "First terminator was:\t" << *FirstTerminator;
795 }
796}
797
798// The operands on an INLINEASM instruction must follow a template.
799// Verify that the flag operands make sense.
800void MachineVerifier::verifyInlineAsm(const MachineInstr *MI) {
801 // The first two operands on INLINEASM are the asm string and global flags.
802 if (MI->getNumOperands() < 2) {
803 report("Too few operands on inline asm", MI);
804 return;
805 }
806 if (!MI->getOperand(0).isSymbol())
807 report("Asm string must be an external symbol", MI);
808 if (!MI->getOperand(1).isImm())
809 report("Asm flags must be an immediate", MI);
810 // Allowed flags are Extra_HasSideEffects = 1, Extra_IsAlignStack = 2,
811 // Extra_AsmDialect = 4, Extra_MayLoad = 8, and Extra_MayStore = 16,
812 // and Extra_IsConvergent = 32.
813 if (!isUInt<6>(MI->getOperand(1).getImm()))
814 report("Unknown asm flags", &MI->getOperand(1), 1);
815
816 static_assert(InlineAsm::MIOp_FirstOperand == 2, "Asm format changed");
817
818 unsigned OpNo = InlineAsm::MIOp_FirstOperand;
819 unsigned NumOps;
820 for (unsigned e = MI->getNumOperands(); OpNo < e; OpNo += NumOps) {
821 const MachineOperand &MO = MI->getOperand(OpNo);
822 // There may be implicit ops after the fixed operands.
823 if (!MO.isImm())
824 break;
825 NumOps = 1 + InlineAsm::getNumOperandRegisters(MO.getImm());
826 }
827
828 if (OpNo > MI->getNumOperands())
829 report("Missing operands in last group", MI);
830
831 // An optional MDNode follows the groups.
832 if (OpNo < MI->getNumOperands() && MI->getOperand(OpNo).isMetadata())
833 ++OpNo;
834
835 // All trailing operands must be implicit registers.
836 for (unsigned e = MI->getNumOperands(); OpNo < e; ++OpNo) {
837 const MachineOperand &MO = MI->getOperand(OpNo);
838 if (!MO.isReg() || !MO.isImplicit())
839 report("Expected implicit register after groups", &MO, OpNo);
840 }
841}
842
843void MachineVerifier::visitMachineInstrBefore(const MachineInstr *MI) {
844 const MCInstrDesc &MCID = MI->getDesc();
845 if (MI->getNumOperands() < MCID.getNumOperands()) {
846 report("Too few operands", MI);
847 errs() << MCID.getNumOperands() << " operands expected, but "
848 << MI->getNumOperands() << " given.\n";
849 }
850
851 if (MI->isPHI() && MF->getProperties().hasProperty(
852 MachineFunctionProperties::Property::NoPHIs))
853 report("Found PHI instruction with NoPHIs property set", MI);
854
855 // Check the tied operands.
856 if (MI->isInlineAsm())
857 verifyInlineAsm(MI);
858
859 // Check the MachineMemOperands for basic consistency.
860 for (MachineInstr::mmo_iterator I = MI->memoperands_begin(),
861 E = MI->memoperands_end(); I != E; ++I) {
862 if ((*I)->isLoad() && !MI->mayLoad())
863 report("Missing mayLoad flag", MI);
864 if ((*I)->isStore() && !MI->mayStore())
865 report("Missing mayStore flag", MI);
866 }
867
868 // Debug values must not have a slot index.
869 // Other instructions must have one, unless they are inside a bundle.
870 if (LiveInts) {
871 bool mapped = !LiveInts->isNotInMIMap(*MI);
872 if (MI->isDebugValue()) {
873 if (mapped)
874 report("Debug instruction has a slot index", MI);
875 } else if (MI->isInsideBundle()) {
876 if (mapped)
877 report("Instruction inside bundle has a slot index", MI);
878 } else {
879 if (!mapped)
880 report("Missing slot index", MI);
881 }
882 }
883
884 // Check types.
885 if (isPreISelGenericOpcode(MCID.getOpcode())) {
886 if (isFunctionSelected)
887 report("Unexpected generic instruction in a Selected function", MI);
888
889 // Generic instructions specify equality constraints between some
890 // of their operands. Make sure these are consistent.
891 SmallVector<LLT, 4> Types;
892 for (unsigned i = 0; i < MCID.getNumOperands(); ++i) {
893 if (!MCID.OpInfo[i].isGenericType())
894 continue;
895 size_t TypeIdx = MCID.OpInfo[i].getGenericTypeIndex();
896 Types.resize(std::max(TypeIdx + 1, Types.size()));
897
898 LLT OpTy = MRI->getType(MI->getOperand(i).getReg());
899 if (Types[TypeIdx].isValid() && Types[TypeIdx] != OpTy)
900 report("type mismatch in generic instruction", MI);
901 Types[TypeIdx] = OpTy;
902 }
903 }
904
905 // Generic opcodes must not have physical register operands.
906 if (isPreISelGenericOpcode(MCID.getOpcode())) {
907 for (auto &Op : MI->operands()) {
908 if (Op.isReg() && TargetRegisterInfo::isPhysicalRegister(Op.getReg()))
909 report("Generic instruction cannot have physical register", MI);
910 }
911 }
912
913 // Generic loads and stores must have a single MachineMemOperand
914 // describing that access.
915 if ((MI->getOpcode() == TargetOpcode::G_LOAD ||
916 MI->getOpcode() == TargetOpcode::G_STORE) &&
917 !MI->hasOneMemOperand())
918 report("Generic instruction accessing memory must have one mem operand",
919 MI);
920
921 StringRef ErrorInfo;
922 if (!TII->verifyInstruction(*MI, ErrorInfo))
923 report(ErrorInfo.data(), MI);
924}
925
926void
927MachineVerifier::visitMachineOperand(const MachineOperand *MO, unsigned MONum) {
928 const MachineInstr *MI = MO->getParent();
929 const MCInstrDesc &MCID = MI->getDesc();
930 unsigned NumDefs = MCID.getNumDefs();
931 if (MCID.getOpcode() == TargetOpcode::PATCHPOINT)
932 NumDefs = (MONum == 0 && MO->isReg()) ? NumDefs : 0;
933
934 // The first MCID.NumDefs operands must be explicit register defines
935 if (MONum < NumDefs) {
936 const MCOperandInfo &MCOI = MCID.OpInfo[MONum];
937 if (!MO->isReg())
938 report("Explicit definition must be a register", MO, MONum);
939 else if (!MO->isDef() && !MCOI.isOptionalDef())
940 report("Explicit definition marked as use", MO, MONum);
941 else if (MO->isImplicit())
942 report("Explicit definition marked as implicit", MO, MONum);
943 } else if (MONum < MCID.getNumOperands()) {
944 const MCOperandInfo &MCOI = MCID.OpInfo[MONum];
945 // Don't check if it's the last operand in a variadic instruction. See,
946 // e.g., LDM_RET in the arm back end.
947 if (MO->isReg() &&
948 !(MI->isVariadic() && MONum == MCID.getNumOperands()-1)) {
949 if (MO->isDef() && !MCOI.isOptionalDef())
950 report("Explicit operand marked as def", MO, MONum);
951 if (MO->isImplicit())
952 report("Explicit operand marked as implicit", MO, MONum);
953 }
954
955 int TiedTo = MCID.getOperandConstraint(MONum, MCOI::TIED_TO);
956 if (TiedTo != -1) {
957 if (!MO->isReg())
958 report("Tied use must be a register", MO, MONum);
959 else if (!MO->isTied())
960 report("Operand should be tied", MO, MONum);
961 else if (unsigned(TiedTo) != MI->findTiedOperandIdx(MONum))
962 report("Tied def doesn't match MCInstrDesc", MO, MONum);
963 } else if (MO->isReg() && MO->isTied())
964 report("Explicit operand should not be tied", MO, MONum);
965 } else {
966 // ARM adds %reg0 operands to indicate predicates. We'll allow that.
967 if (MO->isReg() && !MO->isImplicit() && !MI->isVariadic() && MO->getReg())
968 report("Extra explicit operand on non-variadic instruction", MO, MONum);
969 }
970
971 switch (MO->getType()) {
972 case MachineOperand::MO_Register: {
973 const unsigned Reg = MO->getReg();
974 if (!Reg)
975 return;
976 if (MRI->tracksLiveness() && !MI->isDebugValue())
977 checkLiveness(MO, MONum);
978
979 // Verify the consistency of tied operands.
980 if (MO->isTied()) {
981 unsigned OtherIdx = MI->findTiedOperandIdx(MONum);
982 const MachineOperand &OtherMO = MI->getOperand(OtherIdx);
983 if (!OtherMO.isReg())
984 report("Must be tied to a register", MO, MONum);
985 if (!OtherMO.isTied())
986 report("Missing tie flags on tied operand", MO, MONum);
987 if (MI->findTiedOperandIdx(OtherIdx) != MONum)
988 report("Inconsistent tie links", MO, MONum);
989 if (MONum < MCID.getNumDefs()) {
990 if (OtherIdx < MCID.getNumOperands()) {
991 if (-1 == MCID.getOperandConstraint(OtherIdx, MCOI::TIED_TO))
992 report("Explicit def tied to explicit use without tie constraint",
993 MO, MONum);
994 } else {
995 if (!OtherMO.isImplicit())
996 report("Explicit def should be tied to implicit use", MO, MONum);
997 }
998 }
999 }
1000
1001 // Verify two-address constraints after leaving SSA form.
1002 unsigned DefIdx;
1003 if (!MRI->isSSA() && MO->isUse() &&
1004 MI->isRegTiedToDefOperand(MONum, &DefIdx) &&
1005 Reg != MI->getOperand(DefIdx).getReg())
1006 report("Two-address instruction operands must be identical", MO, MONum);
1007
1008 // Check register classes.
1009 if (MONum < MCID.getNumOperands() && !MO->isImplicit()) {
1010 unsigned SubIdx = MO->getSubReg();
1011
1012 if (TargetRegisterInfo::isPhysicalRegister(Reg)) {
1013 if (SubIdx) {
1014 report("Illegal subregister index for physical register", MO, MONum);
1015 return;
1016 }
1017 if (const TargetRegisterClass *DRC =
1018 TII->getRegClass(MCID, MONum, TRI, *MF)) {
1019 if (!DRC->contains(Reg)) {
1020 report("Illegal physical register for instruction", MO, MONum);
1021 errs() << TRI->getName(Reg) << " is not a "
1022 << TRI->getRegClassName(DRC) << " register.\n";
1023 }
1024 }
1025 } else {
1026 // Virtual register.
1027 const TargetRegisterClass *RC = MRI->getRegClassOrNull(Reg);
1028 if (!RC) {
1029 // This is a generic virtual register.
1030
1031 // If we're post-Select, we can't have gvregs anymore.
1032 if (isFunctionSelected) {
1033 report("Generic virtual register invalid in a Selected function",
1034 MO, MONum);
1035 return;
1036 }
1037
1038 // The gvreg must have a type and it must not have a SubIdx.
1039 LLT Ty = MRI->getType(Reg);
1040 if (!Ty.isValid()) {
1041 report("Generic virtual register must have a valid type", MO,
1042 MONum);
1043 return;
1044 }
1045
1046 const RegisterBank *RegBank = MRI->getRegBankOrNull(Reg);
1047
1048 // If we're post-RegBankSelect, the gvreg must have a bank.
1049 if (!RegBank && isFunctionRegBankSelected) {
1050 report("Generic virtual register must have a bank in a "
1051 "RegBankSelected function",
1052 MO, MONum);
1053 return;
1054 }
1055
1056 // Make sure the register fits into its register bank if any.
1057 if (RegBank && Ty.isValid() &&
1058 RegBank->getSize() < Ty.getSizeInBits()) {
1059 report("Register bank is too small for virtual register", MO,
1060 MONum);
1061 errs() << "Register bank " << RegBank->getName() << " too small("
1062 << RegBank->getSize() << ") to fit " << Ty.getSizeInBits()
1063 << "-bits\n";
1064 return;
1065 }
1066 if (SubIdx) {
1067 report("Generic virtual register does not subregister index", MO,
1068 MONum);
1069 return;
1070 }
1071
1072 // If this is a target specific instruction and this operand
1073 // has register class constraint, the virtual register must
1074 // comply to it.
1075 if (!isPreISelGenericOpcode(MCID.getOpcode()) &&
1076 TII->getRegClass(MCID, MONum, TRI, *MF)) {
1077 report("Virtual register does not match instruction constraint", MO,
1078 MONum);
1079 errs() << "Expect register class "
1080 << TRI->getRegClassName(
1081 TII->getRegClass(MCID, MONum, TRI, *MF))
1082 << " but got nothing\n";
1083 return;
1084 }
1085
1086 break;
1087 }
1088 if (SubIdx) {
1089 const TargetRegisterClass *SRC =
1090 TRI->getSubClassWithSubReg(RC, SubIdx);
1091 if (!SRC) {
1092 report("Invalid subregister index for virtual register", MO, MONum);
1093 errs() << "Register class " << TRI->getRegClassName(RC)
1094 << " does not support subreg index " << SubIdx << "\n";
1095 return;
1096 }
1097 if (RC != SRC) {
1098 report("Invalid register class for subregister index", MO, MONum);
1099 errs() << "Register class " << TRI->getRegClassName(RC)
1100 << " does not fully support subreg index " << SubIdx << "\n";
1101 return;
1102 }
1103 }
1104 if (const TargetRegisterClass *DRC =
1105 TII->getRegClass(MCID, MONum, TRI, *MF)) {
1106 if (SubIdx) {
1107 const TargetRegisterClass *SuperRC =
1108 TRI->getLargestLegalSuperClass(RC, *MF);
1109 if (!SuperRC) {
1110 report("No largest legal super class exists.", MO, MONum);
1111 return;
1112 }
1113 DRC = TRI->getMatchingSuperRegClass(SuperRC, DRC, SubIdx);
1114 if (!DRC) {
1115 report("No matching super-reg register class.", MO, MONum);
1116 return;
1117 }
1118 }
1119 if (!RC->hasSuperClassEq(DRC)) {
1120 report("Illegal virtual register for instruction", MO, MONum);
1121 errs() << "Expected a " << TRI->getRegClassName(DRC)
1122 << " register, but got a " << TRI->getRegClassName(RC)
1123 << " register\n";
1124 }
1125 }
1126 }
1127 }
1128 break;
1129 }
1130
1131 case MachineOperand::MO_RegisterMask:
1132 regMasks.push_back(MO->getRegMask());
1133 break;
1134
1135 case MachineOperand::MO_MachineBasicBlock:
1136 if (MI->isPHI() && !MO->getMBB()->isSuccessor(MI->getParent()))
1137 report("PHI operand is not in the CFG", MO, MONum);
1138 break;
1139
1140 case MachineOperand::MO_FrameIndex:
1141 if (LiveStks && LiveStks->hasInterval(MO->getIndex()) &&
1142 LiveInts && !LiveInts->isNotInMIMap(*MI)) {
1143 int FI = MO->getIndex();
1144 LiveInterval &LI = LiveStks->getInterval(FI);
1145 SlotIndex Idx = LiveInts->getInstructionIndex(*MI);
1146
1147 bool stores = MI->mayStore();
1148 bool loads = MI->mayLoad();
1149 // For a memory-to-memory move, we need to check if the frame
1150 // index is used for storing or loading, by inspecting the
1151 // memory operands.
1152 if (stores && loads) {
1153 for (auto *MMO : MI->memoperands()) {
1154 const PseudoSourceValue *PSV = MMO->getPseudoValue();
1155 if (PSV == nullptr) continue;
1156 const FixedStackPseudoSourceValue *Value =
1157 dyn_cast<FixedStackPseudoSourceValue>(PSV);
1158 if (Value == nullptr) continue;
1159 if (Value->getFrameIndex() != FI) continue;
1160
1161 if (MMO->isStore())
1162 loads = false;
1163 else
1164 stores = false;
1165 break;
1166 }
1167 if (loads == stores)
1168 report("Missing fixed stack memoperand.", MI);
1169 }
1170 if (loads && !LI.liveAt(Idx.getRegSlot(true))) {
1171 report("Instruction loads from dead spill slot", MO, MONum);
1172 errs() << "Live stack: " << LI << '\n';
1173 }
1174 if (stores && !LI.liveAt(Idx.getRegSlot())) {
1175 report("Instruction stores to dead spill slot", MO, MONum);
1176 errs() << "Live stack: " << LI << '\n';
1177 }
1178 }
1179 break;
1180
1181 default:
1182 break;
1183 }
1184}
1185
1186void MachineVerifier::checkLivenessAtUse(const MachineOperand *MO,
1187 unsigned MONum, SlotIndex UseIdx, const LiveRange &LR, unsigned VRegOrUnit,
1188 LaneBitmask LaneMask) {
1189 LiveQueryResult LRQ = LR.Query(UseIdx);
1190 // Check if we have a segment at the use, note however that we only need one
1191 // live subregister range, the others may be dead.
1192 if (!LRQ.valueIn() && LaneMask.none()) {
1193 report("No live segment at use", MO, MONum);
1194 report_context_liverange(LR);
1195 report_context_vreg_regunit(VRegOrUnit);
1196 report_context(UseIdx);
1197 }
1198 if (MO->isKill() && !LRQ.isKill()) {
1199 report("Live range continues after kill flag", MO, MONum);
1200 report_context_liverange(LR);
1201 report_context_vreg_regunit(VRegOrUnit);
1202 if (LaneMask.any())
1203 report_context_lanemask(LaneMask);
1204 report_context(UseIdx);
1205 }
1206}
1207
1208void MachineVerifier::checkLivenessAtDef(const MachineOperand *MO,
1209 unsigned MONum, SlotIndex DefIdx, const LiveRange &LR, unsigned VRegOrUnit,
1210 LaneBitmask LaneMask) {
1211 if (const VNInfo *VNI = LR.getVNInfoAt(DefIdx)) {
1212 assert(VNI && "NULL valno is not allowed")((VNI && "NULL valno is not allowed") ? static_cast<
void> (0) : __assert_fail ("VNI && \"NULL valno is not allowed\""
, "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn301135/lib/CodeGen/MachineVerifier.cpp"
, 1212, __PRETTY_FUNCTION__))
;
1213 if (VNI->def != DefIdx) {
1214 report("Inconsistent valno->def", MO, MONum);
1215 report_context_liverange(LR);
1216 report_context_vreg_regunit(VRegOrUnit);
1217 if (LaneMask.any())
1218 report_context_lanemask(LaneMask);
1219 report_context(*VNI);
1220 report_context(DefIdx);
1221 }
1222 } else {
1223 report("No live segment at def", MO, MONum);
1224 report_context_liverange(LR);
1225 report_context_vreg_regunit(VRegOrUnit);
1226 if (LaneMask.any())
1227 report_context_lanemask(LaneMask);
1228 report_context(DefIdx);
1229 }
1230 // Check that, if the dead def flag is present, LiveInts agree.
1231 if (MO->isDead()) {
1232 LiveQueryResult LRQ = LR.Query(DefIdx);
1233 if (!LRQ.isDeadDef()) {
1234 // In case of physregs we can have a non-dead definition on another
1235 // operand.
1236 bool otherDef = false;
1237 if (!TargetRegisterInfo::isVirtualRegister(VRegOrUnit)) {
1238 const MachineInstr &MI = *MO->getParent();
1239 for (const MachineOperand &MO : MI.operands()) {
1240 if (!MO.isReg() || !MO.isDef() || MO.isDead())
1241 continue;
1242 unsigned Reg = MO.getReg();
1243 for (MCRegUnitIterator Units(Reg, TRI); Units.isValid(); ++Units) {
1244 if (*Units == VRegOrUnit) {
1245 otherDef = true;
1246 break;
1247 }
1248 }
1249 }
1250 }
1251
1252 if (!otherDef) {
1253 report("Live range continues after dead def flag", MO, MONum);
1254 report_context_liverange(LR);
1255 report_context_vreg_regunit(VRegOrUnit);
1256 if (LaneMask.any())
1257 report_context_lanemask(LaneMask);
1258 }
1259 }
1260 }
1261}
1262
1263void MachineVerifier::checkLiveness(const MachineOperand *MO, unsigned MONum) {
1264 const MachineInstr *MI = MO->getParent();
1265 const unsigned Reg = MO->getReg();
1266
1267 // Both use and def operands can read a register.
1268 if (MO->readsReg()) {
1269 regsLiveInButUnused.erase(Reg);
1270
1271 if (MO->isKill())
1272 addRegWithSubRegs(regsKilled, Reg);
1273
1274 // Check that LiveVars knows this kill.
1275 if (LiveVars && TargetRegisterInfo::isVirtualRegister(Reg) &&
1276 MO->isKill()) {
1277 LiveVariables::VarInfo &VI = LiveVars->getVarInfo(Reg);
1278 if (!is_contained(VI.Kills, MI))
1279 report("Kill missing from LiveVariables", MO, MONum);
1280 }
1281
1282 // Check LiveInts liveness and kill.
1283 if (LiveInts && !LiveInts->isNotInMIMap(*MI)) {
1284 SlotIndex UseIdx = LiveInts->getInstructionIndex(*MI);
1285 // Check the cached regunit intervals.
1286 if (TargetRegisterInfo::isPhysicalRegister(Reg) && !isReserved(Reg)) {
1287 for (MCRegUnitIterator Units(Reg, TRI); Units.isValid(); ++Units) {
1288 if (const LiveRange *LR = LiveInts->getCachedRegUnit(*Units))
1289 checkLivenessAtUse(MO, MONum, UseIdx, *LR, *Units);
1290 }
1291 }
1292
1293 if (TargetRegisterInfo::isVirtualRegister(Reg)) {
1294 if (LiveInts->hasInterval(Reg)) {
1295 // This is a virtual register interval.
1296 const LiveInterval &LI = LiveInts->getInterval(Reg);
1297 checkLivenessAtUse(MO, MONum, UseIdx, LI, Reg);
1298
1299 if (LI.hasSubRanges() && !MO->isDef()) {
1300 unsigned SubRegIdx = MO->getSubReg();
1301 LaneBitmask MOMask = SubRegIdx != 0
1302 ? TRI->getSubRegIndexLaneMask(SubRegIdx)
1303 : MRI->getMaxLaneMaskForVReg(Reg);
1304 LaneBitmask LiveInMask;
1305 for (const LiveInterval::SubRange &SR : LI.subranges()) {
1306 if ((MOMask & SR.LaneMask).none())
1307 continue;
1308 checkLivenessAtUse(MO, MONum, UseIdx, SR, Reg, SR.LaneMask);
1309 LiveQueryResult LRQ = SR.Query(UseIdx);
1310 if (LRQ.valueIn())
1311 LiveInMask |= SR.LaneMask;
1312 }
1313 // At least parts of the register has to be live at the use.
1314 if ((LiveInMask & MOMask).none()) {
1315 report("No live subrange at use", MO, MONum);
1316 report_context(LI);
1317 report_context(UseIdx);
1318 }
1319 }
1320 } else {
1321 report("Virtual register has no live interval", MO, MONum);
1322 }
1323 }
1324 }
1325
1326 // Use of a dead register.
1327 if (!regsLive.count(Reg)) {
1328 if (TargetRegisterInfo::isPhysicalRegister(Reg)) {
1329 // Reserved registers may be used even when 'dead'.
1330 bool Bad = !isReserved(Reg);
1331 // We are fine if just any subregister has a defined value.
1332 if (Bad) {
1333 for (MCSubRegIterator SubRegs(Reg, TRI); SubRegs.isValid();
1334 ++SubRegs) {
1335 if (regsLive.count(*SubRegs)) {
1336 Bad = false;
1337 break;
1338 }
1339 }
1340 }
1341 // If there is an additional implicit-use of a super register we stop
1342 // here. By definition we are fine if the super register is not
1343 // (completely) dead, if the complete super register is dead we will
1344 // get a report for its operand.
1345 if (Bad) {
1346 for (const MachineOperand &MOP : MI->uses()) {
1347 if (!MOP.isReg())
1348 continue;
1349 if (!MOP.isImplicit())
1350 continue;
1351 for (MCSubRegIterator SubRegs(MOP.getReg(), TRI); SubRegs.isValid();
1352 ++SubRegs) {
1353 if (*SubRegs == Reg) {
1354 Bad = false;
1355 break;
1356 }
1357 }
1358 }
1359 }
1360 if (Bad)
1361 report("Using an undefined physical register", MO, MONum);
1362 } else if (MRI->def_empty(Reg)) {
1363 report("Reading virtual register without a def", MO, MONum);
1364 } else {
1365 BBInfo &MInfo = MBBInfoMap[MI->getParent()];
1366 // We don't know which virtual registers are live in, so only complain
1367 // if vreg was killed in this MBB. Otherwise keep track of vregs that
1368 // must be live in. PHI instructions are handled separately.
1369 if (MInfo.regsKilled.count(Reg))
1370 report("Using a killed virtual register", MO, MONum);
1371 else if (!MI->isPHI())
1372 MInfo.vregsLiveIn.insert(std::make_pair(Reg, MI));
1373 }
1374 }
1375 }
1376
1377 if (MO->isDef()) {
1378 // Register defined.
1379 // TODO: verify that earlyclobber ops are not used.
1380 if (MO->isDead())
1381 addRegWithSubRegs(regsDead, Reg);
1382 else
1383 addRegWithSubRegs(regsDefined, Reg);
1384
1385 // Verify SSA form.
1386 if (MRI->isSSA() && TargetRegisterInfo::isVirtualRegister(Reg) &&
1387 std::next(MRI->def_begin(Reg)) != MRI->def_end())
1388 report("Multiple virtual register defs in SSA form", MO, MONum);
1389
1390 // Check LiveInts for a live segment, but only for virtual registers.
1391 if (LiveInts && !LiveInts->isNotInMIMap(*MI)) {
1392 SlotIndex DefIdx = LiveInts->getInstructionIndex(*MI);
1393 DefIdx = DefIdx.getRegSlot(MO->isEarlyClobber());
1394
1395 if (TargetRegisterInfo::isVirtualRegister(Reg)) {
1396 if (LiveInts->hasInterval(Reg)) {
1397 const LiveInterval &LI = LiveInts->getInterval(Reg);
1398 checkLivenessAtDef(MO, MONum, DefIdx, LI, Reg);
1399
1400 if (LI.hasSubRanges()) {
1401 unsigned SubRegIdx = MO->getSubReg();
1402 LaneBitmask MOMask = SubRegIdx != 0
1403 ? TRI->getSubRegIndexLaneMask(SubRegIdx)
1404 : MRI->getMaxLaneMaskForVReg(Reg);
1405 for (const LiveInterval::SubRange &SR : LI.subranges()) {
1406 if ((SR.LaneMask & MOMask).none())
1407 continue;
1408 checkLivenessAtDef(MO, MONum, DefIdx, SR, Reg, SR.LaneMask);
1409 }
1410 }
1411 } else {
1412 report("Virtual register has no Live interval", MO, MONum);
1413 }
1414 }
1415 }
1416 }
1417}
1418
1419void MachineVerifier::visitMachineInstrAfter(const MachineInstr *MI) {
1420}
1421
1422// This function gets called after visiting all instructions in a bundle. The
1423// argument points to the bundle header.
1424// Normal stand-alone instructions are also considered 'bundles', and this
1425// function is called for all of them.
1426void MachineVerifier::visitMachineBundleAfter(const MachineInstr *MI) {
1427 BBInfo &MInfo = MBBInfoMap[MI->getParent()];
1428 set_union(MInfo.regsKilled, regsKilled);
1429 set_subtract(regsLive, regsKilled); regsKilled.clear();
1430 // Kill any masked registers.
1431 while (!regMasks.empty()) {
1432 const uint32_t *Mask = regMasks.pop_back_val();
1433 for (RegSet::iterator I = regsLive.begin(), E = regsLive.end(); I != E; ++I)
1434 if (TargetRegisterInfo::isPhysicalRegister(*I) &&
1435 MachineOperand::clobbersPhysReg(Mask, *I))
1436 regsDead.push_back(*I);
1437 }
1438 set_subtract(regsLive, regsDead); regsDead.clear();
1439 set_union(regsLive, regsDefined); regsDefined.clear();
1440}
1441
1442void
1443MachineVerifier::visitMachineBasicBlockAfter(const MachineBasicBlock *MBB) {
1444 MBBInfoMap[MBB].regsLiveOut = regsLive;
1445 regsLive.clear();
1446
1447 if (Indexes) {
1448 SlotIndex stop = Indexes->getMBBEndIdx(MBB);
1449 if (!(stop > lastIndex)) {
1450 report("Block ends before last instruction index", MBB);
1451 errs() << "Block ends at " << stop
1452 << " last instruction was at " << lastIndex << '\n';
1453 }
1454 lastIndex = stop;
1455 }
1456}
1457
1458// Calculate the largest possible vregsPassed sets. These are the registers that
1459// can pass through an MBB live, but may not be live every time. It is assumed
1460// that all vregsPassed sets are empty before the call.
1461void MachineVerifier::calcRegsPassed() {
1462 // First push live-out regs to successors' vregsPassed. Remember the MBBs that
1463 // have any vregsPassed.
1464 SmallPtrSet<const MachineBasicBlock*, 8> todo;
1465 for (const auto &MBB : *MF) {
1466 BBInfo &MInfo = MBBInfoMap[&MBB];
1467 if (!MInfo.reachable)
1468 continue;
1469 for (MachineBasicBlock::const_succ_iterator SuI = MBB.succ_begin(),
1470 SuE = MBB.succ_end(); SuI != SuE; ++SuI) {
1471 BBInfo &SInfo = MBBInfoMap[*SuI];
1472 if (SInfo.addPassed(MInfo.regsLiveOut))
1473 todo.insert(*SuI);
1474 }
1475 }
1476
1477 // Iteratively push vregsPassed to successors. This will converge to the same
1478 // final state regardless of DenseSet iteration order.
1479 while (!todo.empty()) {
1480 const MachineBasicBlock *MBB = *todo.begin();
1481 todo.erase(MBB);
1482 BBInfo &MInfo = MBBInfoMap[MBB];
1483 for (MachineBasicBlock::const_succ_iterator SuI = MBB->succ_begin(),
1484 SuE = MBB->succ_end(); SuI != SuE; ++SuI) {
1485 if (*SuI == MBB)
1486 continue;
1487 BBInfo &SInfo = MBBInfoMap[*SuI];
1488 if (SInfo.addPassed(MInfo.vregsPassed))
1489 todo.insert(*SuI);
1490 }
1491 }
1492}
1493
1494// Calculate the set of virtual registers that must be passed through each basic
1495// block in order to satisfy the requirements of successor blocks. This is very
1496// similar to calcRegsPassed, only backwards.
1497void MachineVerifier::calcRegsRequired() {
1498 // First push live-in regs to predecessors' vregsRequired.
1499 SmallPtrSet<const MachineBasicBlock*, 8> todo;
1500 for (const auto &MBB : *MF) {
1501 BBInfo &MInfo = MBBInfoMap[&MBB];
1502 for (MachineBasicBlock::const_pred_iterator PrI = MBB.pred_begin(),
1503 PrE = MBB.pred_end(); PrI != PrE; ++PrI) {
1504 BBInfo &PInfo = MBBInfoMap[*PrI];
1505 if (PInfo.addRequired(MInfo.vregsLiveIn))
1506 todo.insert(*PrI);
1507 }
1508 }
1509
1510 // Iteratively push vregsRequired to predecessors. This will converge to the
1511 // same final state regardless of DenseSet iteration order.
1512 while (!todo.empty()) {
1513 const MachineBasicBlock *MBB = *todo.begin();
1514 todo.erase(MBB);
1515 BBInfo &MInfo = MBBInfoMap[MBB];
1516 for (MachineBasicBlock::const_pred_iterator PrI = MBB->pred_begin(),
1517 PrE = MBB->pred_end(); PrI != PrE; ++PrI) {
1518 if (*PrI == MBB)
1519 continue;
1520 BBInfo &SInfo = MBBInfoMap[*PrI];
1521 if (SInfo.addRequired(MInfo.vregsRequired))
1522 todo.insert(*PrI);
1523 }
1524 }
1525}
1526
1527// Check PHI instructions at the beginning of MBB. It is assumed that
1528// calcRegsPassed has been run so BBInfo::isLiveOut is valid.
1529void MachineVerifier::checkPHIOps(const MachineBasicBlock *MBB) {
1530 SmallPtrSet<const MachineBasicBlock*, 8> seen;
1531 for (const auto &BBI : *MBB) {
1532 if (!BBI.isPHI())
1533 break;
1534 seen.clear();
1535
1536 for (unsigned i = 1, e = BBI.getNumOperands(); i != e; i += 2) {
1537 unsigned Reg = BBI.getOperand(i).getReg();
1538 const MachineBasicBlock *Pre = BBI.getOperand(i + 1).getMBB();
1539 if (!Pre->isSuccessor(MBB))
1540 continue;
1541 seen.insert(Pre);
1542 BBInfo &PrInfo = MBBInfoMap[Pre];
1543 if (PrInfo.reachable && !PrInfo.isLiveOut(Reg))
1544 report("PHI operand is not live-out from predecessor",
1545 &BBI.getOperand(i), i);
1546 }
1547
1548 // Did we see all predecessors?
1549 for (MachineBasicBlock::const_pred_iterator PrI = MBB->pred_begin(),
1550 PrE = MBB->pred_end(); PrI != PrE; ++PrI) {
1551 if (!seen.count(*PrI)) {
1552 report("Missing PHI operand", &BBI);
1553 errs() << "BB#" << (*PrI)->getNumber()
1554 << " is a predecessor according to the CFG.\n";
1555 }
1556 }
1557 }
1558}
1559
1560void MachineVerifier::visitMachineFunctionAfter() {
1561 calcRegsPassed();
1562
1563 for (const auto &MBB : *MF) {
1564 BBInfo &MInfo = MBBInfoMap[&MBB];
1565
1566 // Skip unreachable MBBs.
1567 if (!MInfo.reachable)
1568 continue;
1569
1570 checkPHIOps(&MBB);
1571 }
1572
1573 // Now check liveness info if available
1574 calcRegsRequired();
1575
1576 // Check for killed virtual registers that should be live out.
1577 for (const auto &MBB : *MF) {
1578 BBInfo &MInfo = MBBInfoMap[&MBB];
1579 for (RegSet::iterator
1580 I = MInfo.vregsRequired.begin(), E = MInfo.vregsRequired.end(); I != E;
1581 ++I)
1582 if (MInfo.regsKilled.count(*I)) {
1583 report("Virtual register killed in block, but needed live out.", &MBB);
1584 errs() << "Virtual register " << PrintReg(*I)
1585 << " is used after the block.\n";
1586 }
1587 }
1588
1589 if (!MF->empty()) {
1590 BBInfo &MInfo = MBBInfoMap[&MF->front()];
1591 for (RegSet::iterator
1592 I = MInfo.vregsRequired.begin(), E = MInfo.vregsRequired.end(); I != E;
1593 ++I) {
1594 report("Virtual register defs don't dominate all uses.", MF);
1595 report_context_vreg(*I);
1596 }
1597 }
1598
1599 if (LiveVars)
1600 verifyLiveVariables();
1601 if (LiveInts)
1602 verifyLiveIntervals();
1603}
1604
1605void MachineVerifier::verifyLiveVariables() {
1606 assert(LiveVars && "Don't call verifyLiveVariables without LiveVars")((LiveVars && "Don't call verifyLiveVariables without LiveVars"
) ? static_cast<void> (0) : __assert_fail ("LiveVars && \"Don't call verifyLiveVariables without LiveVars\""
, "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn301135/lib/CodeGen/MachineVerifier.cpp"
, 1606, __PRETTY_FUNCTION__))
;
1607 for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) {
1608 unsigned Reg = TargetRegisterInfo::index2VirtReg(i);
1609 LiveVariables::VarInfo &VI = LiveVars->getVarInfo(Reg);
1610 for (const auto &MBB : *MF) {
1611 BBInfo &MInfo = MBBInfoMap[&MBB];
1612
1613 // Our vregsRequired should be identical to LiveVariables' AliveBlocks
1614 if (MInfo.vregsRequired.count(Reg)) {
1615 if (!VI.AliveBlocks.test(MBB.getNumber())) {
1616 report("LiveVariables: Block missing from AliveBlocks", &MBB);
1617 errs() << "Virtual register " << PrintReg(Reg)
1618 << " must be live through the block.\n";
1619 }
1620 } else {
1621 if (VI.AliveBlocks.test(MBB.getNumber())) {
1622 report("LiveVariables: Block should not be in AliveBlocks", &MBB);
1623 errs() << "Virtual register " << PrintReg(Reg)
1624 << " is not needed live through the block.\n";
1625 }
1626 }
1627 }
1628 }
1629}
1630
1631void MachineVerifier::verifyLiveIntervals() {
1632 assert(LiveInts && "Don't call verifyLiveIntervals without LiveInts")((LiveInts && "Don't call verifyLiveIntervals without LiveInts"
) ? static_cast<void> (0) : __assert_fail ("LiveInts && \"Don't call verifyLiveIntervals without LiveInts\""
, "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn301135/lib/CodeGen/MachineVerifier.cpp"
, 1632, __PRETTY_FUNCTION__))
;
1633 for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) {
1634 unsigned Reg = TargetRegisterInfo::index2VirtReg(i);
1635
1636 // Spilling and splitting may leave unused registers around. Skip them.
1637 if (MRI->reg_nodbg_empty(Reg))
1638 continue;
1639
1640 if (!LiveInts->hasInterval(Reg)) {
1641 report("Missing live interval for virtual register", MF);
1642 errs() << PrintReg(Reg, TRI) << " still has defs or uses\n";
1643 continue;
1644 }
1645
1646 const LiveInterval &LI = LiveInts->getInterval(Reg);
1647 assert(Reg == LI.reg && "Invalid reg to interval mapping")((Reg == LI.reg && "Invalid reg to interval mapping")
? static_cast<void> (0) : __assert_fail ("Reg == LI.reg && \"Invalid reg to interval mapping\""
, "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn301135/lib/CodeGen/MachineVerifier.cpp"
, 1647, __PRETTY_FUNCTION__))
;
1648 verifyLiveInterval(LI);
1649 }
1650
1651 // Verify all the cached regunit intervals.
1652 for (unsigned i = 0, e = TRI->getNumRegUnits(); i != e; ++i)
1653 if (const LiveRange *LR = LiveInts->getCachedRegUnit(i))
1654 verifyLiveRange(*LR, i);
1655}
1656
1657void MachineVerifier::verifyLiveRangeValue(const LiveRange &LR,
1658 const VNInfo *VNI, unsigned Reg,
1659 LaneBitmask LaneMask) {
1660 if (VNI->isUnused())
1661 return;
1662
1663 const VNInfo *DefVNI = LR.getVNInfoAt(VNI->def);
1664
1665 if (!DefVNI) {
1666 report("Value not live at VNInfo def and not marked unused", MF);
1667 report_context(LR, Reg, LaneMask);
1668 report_context(*VNI);
1669 return;
1670 }
1671
1672 if (DefVNI != VNI) {
1673 report("Live segment at def has different VNInfo", MF);
1674 report_context(LR, Reg, LaneMask);
1675 report_context(*VNI);
1676 return;
1677 }
1678
1679 const MachineBasicBlock *MBB = LiveInts->getMBBFromIndex(VNI->def);
1680 if (!MBB) {
1681 report("Invalid VNInfo definition index", MF);
1682 report_context(LR, Reg, LaneMask);
1683 report_context(*VNI);
1684 return;
1685 }
1686
1687 if (VNI->isPHIDef()) {
1688 if (VNI->def != LiveInts->getMBBStartIdx(MBB)) {
1689 report("PHIDef VNInfo is not defined at MBB start", MBB);
1690 report_context(LR, Reg, LaneMask);
1691 report_context(*VNI);
1692 }
1693 return;
1694 }
1695
1696 // Non-PHI def.
1697 const MachineInstr *MI = LiveInts->getInstructionFromIndex(VNI->def);
1698 if (!MI) {
1699 report("No instruction at VNInfo def index", MBB);
1700 report_context(LR, Reg, LaneMask);
1701 report_context(*VNI);
1702 return;
1703 }
1704
1705 if (Reg != 0) {
1706 bool hasDef = false;
1707 bool isEarlyClobber = false;
1708 for (ConstMIBundleOperands MOI(*MI); MOI.isValid(); ++MOI) {
1709 if (!MOI->isReg() || !MOI->isDef())
1710 continue;
1711 if (TargetRegisterInfo::isVirtualRegister(Reg)) {
1712 if (MOI->getReg() != Reg)
1713 continue;
1714 } else {
1715 if (!TargetRegisterInfo::isPhysicalRegister(MOI->getReg()) ||
1716 !TRI->hasRegUnit(MOI->getReg(), Reg))
1717 continue;
1718 }
1719 if (LaneMask.any() &&
1720 (TRI->getSubRegIndexLaneMask(MOI->getSubReg()) & LaneMask).none())
1721 continue;
1722 hasDef = true;
1723 if (MOI->isEarlyClobber())
1724 isEarlyClobber = true;
1725 }
1726
1727 if (!hasDef) {
1728 report("Defining instruction does not modify register", MI);
1729 report_context(LR, Reg, LaneMask);
1730 report_context(*VNI);
1731 }
1732
1733 // Early clobber defs begin at USE slots, but other defs must begin at
1734 // DEF slots.
1735 if (isEarlyClobber) {
1736 if (!VNI->def.isEarlyClobber()) {
1737 report("Early clobber def must be at an early-clobber slot", MBB);
1738 report_context(LR, Reg, LaneMask);
1739 report_context(*VNI);
1740 }
1741 } else if (!VNI->def.isRegister()) {
1742 report("Non-PHI, non-early clobber def must be at a register slot", MBB);
1743 report_context(LR, Reg, LaneMask);
1744 report_context(*VNI);
1745 }
1746 }
1747}
1748
1749void MachineVerifier::verifyLiveRangeSegment(const LiveRange &LR,
1750 const LiveRange::const_iterator I,
1751 unsigned Reg, LaneBitmask LaneMask)
1752{
1753 const LiveRange::Segment &S = *I;
1754 const VNInfo *VNI = S.valno;
1755 assert(VNI && "Live segment has no valno")((VNI && "Live segment has no valno") ? static_cast<
void> (0) : __assert_fail ("VNI && \"Live segment has no valno\""
, "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn301135/lib/CodeGen/MachineVerifier.cpp"
, 1755, __PRETTY_FUNCTION__))
;
1756
1757 if (VNI->id >= LR.getNumValNums() || VNI != LR.getValNumInfo(VNI->id)) {
1
Assuming the condition is false
2
Taking false branch
1758 report("Foreign valno in live segment", MF);
1759 report_context(LR, Reg, LaneMask);
1760 report_context(S);
1761 report_context(*VNI);
1762 }
1763
1764 if (VNI->isUnused()) {
3
Taking false branch
1765 report("Live segment valno is marked unused", MF);
1766 report_context(LR, Reg, LaneMask);
1767 report_context(S);
1768 }
1769
1770 const MachineBasicBlock *MBB = LiveInts->getMBBFromIndex(S.start);
1771 if (!MBB) {
4
Taking false branch
1772 report("Bad start of live segment, no basic block", MF);
1773 report_context(LR, Reg, LaneMask);
1774 report_context(S);
1775 return;
1776 }
1777 SlotIndex MBBStartIdx = LiveInts->getMBBStartIdx(MBB);
1778 if (S.start != MBBStartIdx && S.start != VNI->def) {
1779 report("Live segment must begin at MBB entry or valno def", MBB);
1780 report_context(LR, Reg, LaneMask);
1781 report_context(S);
1782 }
1783
1784 const MachineBasicBlock *EndMBB =
1785 LiveInts->getMBBFromIndex(S.end.getPrevSlot());
1786 if (!EndMBB) {
5
Taking false branch
1787 report("Bad end of live segment, no basic block", MF);
1788 report_context(LR, Reg, LaneMask);
1789 report_context(S);
1790 return;
1791 }
1792
1793 // No more checks for live-out segments.
1794 if (S.end == LiveInts->getMBBEndIdx(EndMBB))
6
Taking false branch
1795 return;
1796
1797 // RegUnit intervals are allowed dead phis.
1798 if (!TargetRegisterInfo::isVirtualRegister(Reg) && VNI->isPHIDef() &&
7
Taking false branch
1799 S.start == VNI->def && S.end == VNI->def.getDeadSlot())
1800 return;
1801
1802 // The live segment is ending inside EndMBB
1803 const MachineInstr *MI =
1804 LiveInts->getInstructionFromIndex(S.end.getPrevSlot());
1805 if (!MI) {
8
Assuming 'MI' is non-null
9
Taking false branch
1806 report("Live segment doesn't end at a valid instruction", EndMBB);
1807 report_context(LR, Reg, LaneMask);
1808 report_context(S);
1809 return;
1810 }
1811
1812 // The block slot must refer to a basic block boundary.
1813 if (S.end.isBlock()) {
10
Taking false branch
1814 report("Live segment ends at B slot of an instruction", EndMBB);
1815 report_context(LR, Reg, LaneMask);
1816 report_context(S);
1817 }
1818
1819 if (S.end.isDead()) {
11
Taking false branch
1820 // Segment ends on the dead slot.
1821 // That means there must be a dead def.
1822 if (!SlotIndex::isSameInstr(S.start, S.end)) {
1823 report("Live segment ending at dead slot spans instructions", EndMBB);
1824 report_context(LR, Reg, LaneMask);
1825 report_context(S);
1826 }
1827 }
1828
1829 // A live segment can only end at an early-clobber slot if it is being
1830 // redefined by an early-clobber def.
1831 if (S.end.isEarlyClobber()) {
12
Taking false branch
1832 if (I+1 == LR.end() || (I+1)->start != S.end) {
1833 report("Live segment ending at early clobber slot must be "
1834 "redefined by an EC def in the same instruction", EndMBB);
1835 report_context(LR, Reg, LaneMask);
1836 report_context(S);
1837 }
1838 }
1839
1840 // The following checks only apply to virtual registers. Physreg liveness
1841 // is too weird to check.
1842 if (TargetRegisterInfo::isVirtualRegister(Reg)) {
13
Taking false branch
1843 // A live segment can end with either a redefinition, a kill flag on a
1844 // use, or a dead flag on a def.
1845 bool hasRead = false;
1846 bool hasSubRegDef = false;
1847 bool hasDeadDef = false;
1848 for (ConstMIBundleOperands MOI(*MI); MOI.isValid(); ++MOI) {
1849 if (!MOI->isReg() || MOI->getReg() != Reg)
1850 continue;
1851 unsigned Sub = MOI->getSubReg();
1852 LaneBitmask SLM = Sub != 0 ? TRI->getSubRegIndexLaneMask(Sub)
1853 : LaneBitmask::getAll();
1854 if (MOI->isDef()) {
1855 if (Sub != 0) {
1856 hasSubRegDef = true;
1857 // An operand vreg0:sub0<def> reads vreg0:sub1..n. Invert the lane
1858 // mask for subregister defs. Read-undef defs will be handled by
1859 // readsReg below.
1860 SLM = ~SLM;
1861 }
1862 if (MOI->isDead())
1863 hasDeadDef = true;
1864 }
1865 if (LaneMask.any() && (LaneMask & SLM).none())
1866 continue;
1867 if (MOI->readsReg())
1868 hasRead = true;
1869 }
1870 if (S.end.isDead()) {
1871 // Make sure that the corresponding machine operand for a "dead" live
1872 // range has the dead flag. We cannot perform this check for subregister
1873 // liveranges as partially dead values are allowed.
1874 if (LaneMask.none() && !hasDeadDef) {
1875 report("Instruction ending live segment on dead slot has no dead flag",
1876 MI);
1877 report_context(LR, Reg, LaneMask);
1878 report_context(S);
1879 }
1880 } else {
1881 if (!hasRead) {
1882 // When tracking subregister liveness, the main range must start new
1883 // values on partial register writes, even if there is no read.
1884 if (!MRI->shouldTrackSubRegLiveness(Reg) || LaneMask.any() ||
1885 !hasSubRegDef) {
1886 report("Instruction ending live segment doesn't read the register",
1887 MI);
1888 report_context(LR, Reg, LaneMask);
1889 report_context(S);
1890 }
1891 }
1892 }
1893 }
1894
1895 // Now check all the basic blocks in this live segment.
1896 MachineFunction::const_iterator MFI = MBB->getIterator();
1897 // Is this live segment the beginning of a non-PHIDef VN?
1898 if (S.start == VNI->def && !VNI->isPHIDef()) {
1899 // Not live-in to any blocks.
1900 if (MBB == EndMBB)
1901 return;
1902 // Skip this block.
1903 ++MFI;
1904 }
1905 for (;;) {
14
Loop condition is true. Entering loop body
20
Loop condition is true. Entering loop body
26
Loop condition is true. Entering loop body
32
Loop condition is true. Entering loop body
1906 assert(LiveInts->isLiveInToMBB(LR, &*MFI))((LiveInts->isLiveInToMBB(LR, &*MFI)) ? static_cast<
void> (0) : __assert_fail ("LiveInts->isLiveInToMBB(LR, &*MFI)"
, "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn301135/lib/CodeGen/MachineVerifier.cpp"
, 1906, __PRETTY_FUNCTION__))
;
1907 // We don't know how to track physregs into a landing pad.
1908 if (!TargetRegisterInfo::isVirtualRegister(Reg) &&
16
Taking false branch
22
Taking false branch
28
Taking false branch
34
Taking false branch
1909 MFI->isEHPad()) {
15
Assuming the condition is false
21
Assuming the condition is false
27
Assuming the condition is false
33
Assuming the condition is false
1910 if (&*MFI == EndMBB)
1911 break;
1912 ++MFI;
1913 continue;
1914 }
1915
1916 // Is VNI a PHI-def in the current block?
1917 bool IsPHI = VNI->isPHIDef() &&
1918 VNI->def == LiveInts->getMBBStartIdx(&*MFI);
1919
1920 // Check that VNI is live-out of all predecessors.
1921 for (MachineBasicBlock::const_pred_iterator PI = MFI->pred_begin(),
17
Loop condition is false. Execution continues on line 1948
23
Loop condition is false. Execution continues on line 1948
29
Loop condition is false. Execution continues on line 1948
35
Loop condition is true. Entering loop body
1922 PE = MFI->pred_end(); PI != PE; ++PI) {
1923 SlotIndex PEnd = LiveInts->getMBBEndIdx(*PI);
1924 const VNInfo *PVNI = LR.getVNInfoBefore(PEnd);
36
'PVNI' initialized here
1925
1926 // All predecessors must have a live-out value if this is not a
1927 // subregister liverange.
1928 if (!PVNI && LaneMask.none()) {
37
Assuming 'PVNI' is null
38
Taking false branch
1929 report("Register not marked live out of predecessor", *PI);
1930 report_context(LR, Reg, LaneMask);
1931 report_context(*VNI);
1932 errs() << " live into BB#" << MFI->getNumber()
1933 << '@' << LiveInts->getMBBStartIdx(&*MFI) << ", not live before "
1934 << PEnd << '\n';
1935 continue;
1936 }
1937
1938 // Only PHI-defs can take different predecessor values.
1939 if (!IsPHI && PVNI != VNI) {
39
Taking true branch
1940 report("Different value live out of predecessor", *PI);
1941 report_context(LR, Reg, LaneMask);
1942 errs() << "Valno #" << PVNI->id << " live out of BB#"
40
Access to field 'id' results in a dereference of a null pointer (loaded from variable 'PVNI')
1943 << (*PI)->getNumber() << '@' << PEnd << "\nValno #" << VNI->id
1944 << " live into BB#" << MFI->getNumber() << '@'
1945 << LiveInts->getMBBStartIdx(&*MFI) << '\n';
1946 }
1947 }
1948 if (&*MFI == EndMBB)
18
Assuming the condition is false
19
Taking false branch
24
Assuming the condition is false
25
Taking false branch
30
Assuming the condition is false
31
Taking false branch
1949 break;
1950 ++MFI;
1951 }
1952}
1953
1954void MachineVerifier::verifyLiveRange(const LiveRange &LR, unsigned Reg,
1955 LaneBitmask LaneMask) {
1956 for (const VNInfo *VNI : LR.valnos)
1957 verifyLiveRangeValue(LR, VNI, Reg, LaneMask);
1958
1959 for (LiveRange::const_iterator I = LR.begin(), E = LR.end(); I != E; ++I)
1960 verifyLiveRangeSegment(LR, I, Reg, LaneMask);
1961}
1962
1963void MachineVerifier::verifyLiveInterval(const LiveInterval &LI) {
1964 unsigned Reg = LI.reg;
1965 assert(TargetRegisterInfo::isVirtualRegister(Reg))((TargetRegisterInfo::isVirtualRegister(Reg)) ? static_cast<
void> (0) : __assert_fail ("TargetRegisterInfo::isVirtualRegister(Reg)"
, "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn301135/lib/CodeGen/MachineVerifier.cpp"
, 1965, __PRETTY_FUNCTION__))
;
1966 verifyLiveRange(LI, Reg);
1967
1968 LaneBitmask Mask;
1969 LaneBitmask MaxMask = MRI->getMaxLaneMaskForVReg(Reg);
1970 for (const LiveInterval::SubRange &SR : LI.subranges()) {
1971 if ((Mask & SR.LaneMask).any()) {
1972 report("Lane masks of sub ranges overlap in live interval", MF);
1973 report_context(LI);
1974 }
1975 if ((SR.LaneMask & ~MaxMask).any()) {
1976 report("Subrange lanemask is invalid", MF);
1977 report_context(LI);
1978 }
1979 if (SR.empty()) {
1980 report("Subrange must not be empty", MF);
1981 report_context(SR, LI.reg, SR.LaneMask);
1982 }
1983 Mask |= SR.LaneMask;
1984 verifyLiveRange(SR, LI.reg, SR.LaneMask);
1985 if (!LI.covers(SR)) {
1986 report("A Subrange is not covered by the main range", MF);
1987 report_context(LI);
1988 }
1989 }
1990
1991 // Check the LI only has one connected component.
1992 ConnectedVNInfoEqClasses ConEQ(*LiveInts);
1993 unsigned NumComp = ConEQ.Classify(LI);
1994 if (NumComp > 1) {
1995 report("Multiple connected components in live interval", MF);
1996 report_context(LI);
1997 for (unsigned comp = 0; comp != NumComp; ++comp) {
1998 errs() << comp << ": valnos";
1999 for (LiveInterval::const_vni_iterator I = LI.vni_begin(),
2000 E = LI.vni_end(); I!=E; ++I)
2001 if (comp == ConEQ.getEqClass(*I))
2002 errs() << ' ' << (*I)->id;
2003 errs() << '\n';
2004 }
2005 }
2006}
2007
2008namespace {
2009 // FrameSetup and FrameDestroy can have zero adjustment, so using a single
2010 // integer, we can't tell whether it is a FrameSetup or FrameDestroy if the
2011 // value is zero.
2012 // We use a bool plus an integer to capture the stack state.
2013 struct StackStateOfBB {
2014 StackStateOfBB() : EntryValue(0), ExitValue(0), EntryIsSetup(false),
2015 ExitIsSetup(false) { }
2016 StackStateOfBB(int EntryVal, int ExitVal, bool EntrySetup, bool ExitSetup) :
2017 EntryValue(EntryVal), ExitValue(ExitVal), EntryIsSetup(EntrySetup),
2018 ExitIsSetup(ExitSetup) { }
2019 // Can be negative, which means we are setting up a frame.
2020 int EntryValue;
2021 int ExitValue;
2022 bool EntryIsSetup;
2023 bool ExitIsSetup;
2024 };
2025}
2026
2027/// Make sure on every path through the CFG, a FrameSetup <n> is always followed
2028/// by a FrameDestroy <n>, stack adjustments are identical on all
2029/// CFG edges to a merge point, and frame is destroyed at end of a return block.
2030void MachineVerifier::verifyStackFrame() {
2031 unsigned FrameSetupOpcode = TII->getCallFrameSetupOpcode();
2032 unsigned FrameDestroyOpcode = TII->getCallFrameDestroyOpcode();
2033 if (FrameSetupOpcode == ~0u && FrameDestroyOpcode == ~0u)
2034 return;
2035
2036 SmallVector<StackStateOfBB, 8> SPState;
2037 SPState.resize(MF->getNumBlockIDs());
2038 df_iterator_default_set<const MachineBasicBlock*> Reachable;
2039
2040 // Visit the MBBs in DFS order.
2041 for (df_ext_iterator<const MachineFunction*,
2042 df_iterator_default_set<const MachineBasicBlock*> >
2043 DFI = df_ext_begin(MF, Reachable), DFE = df_ext_end(MF, Reachable);
2044 DFI != DFE; ++DFI) {
2045 const MachineBasicBlock *MBB = *DFI;
2046
2047 StackStateOfBB BBState;
2048 // Check the exit state of the DFS stack predecessor.
2049 if (DFI.getPathLength() >= 2) {
2050 const MachineBasicBlock *StackPred = DFI.getPath(DFI.getPathLength() - 2);
2051 assert(Reachable.count(StackPred) &&((Reachable.count(StackPred) && "DFS stack predecessor is already visited.\n"
) ? static_cast<void> (0) : __assert_fail ("Reachable.count(StackPred) && \"DFS stack predecessor is already visited.\\n\""
, "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn301135/lib/CodeGen/MachineVerifier.cpp"
, 2052, __PRETTY_FUNCTION__))
2052 "DFS stack predecessor is already visited.\n")((Reachable.count(StackPred) && "DFS stack predecessor is already visited.\n"
) ? static_cast<void> (0) : __assert_fail ("Reachable.count(StackPred) && \"DFS stack predecessor is already visited.\\n\""
, "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn301135/lib/CodeGen/MachineVerifier.cpp"
, 2052, __PRETTY_FUNCTION__))
;
2053 BBState.EntryValue = SPState[StackPred->getNumber()].ExitValue;
2054 BBState.EntryIsSetup = SPState[StackPred->getNumber()].ExitIsSetup;
2055 BBState.ExitValue = BBState.EntryValue;
2056 BBState.ExitIsSetup = BBState.EntryIsSetup;
2057 }
2058
2059 // Update stack state by checking contents of MBB.
2060 for (const auto &I : *MBB) {
2061 if (I.getOpcode() == FrameSetupOpcode) {
2062 if (BBState.ExitIsSetup)
2063 report("FrameSetup is after another FrameSetup", &I);
2064 BBState.ExitValue -= TII->getFrameSize(I);
2065 BBState.ExitIsSetup = true;
2066 }
2067
2068 if (I.getOpcode() == FrameDestroyOpcode) {
2069 int Size = TII->getFrameSize(I);
2070 if (!BBState.ExitIsSetup)
2071 report("FrameDestroy is not after a FrameSetup", &I);
2072 int AbsSPAdj = BBState.ExitValue < 0 ? -BBState.ExitValue :
2073 BBState.ExitValue;
2074 if (BBState.ExitIsSetup && AbsSPAdj != Size) {
2075 report("FrameDestroy <n> is after FrameSetup <m>", &I);
2076 errs() << "FrameDestroy <" << Size << "> is after FrameSetup <"
2077 << AbsSPAdj << ">.\n";
2078 }
2079 BBState.ExitValue += Size;
2080 BBState.ExitIsSetup = false;
2081 }
2082 }
2083 SPState[MBB->getNumber()] = BBState;
2084
2085 // Make sure the exit state of any predecessor is consistent with the entry
2086 // state.
2087 for (MachineBasicBlock::const_pred_iterator I = MBB->pred_begin(),
2088 E = MBB->pred_end(); I != E; ++I) {
2089 if (Reachable.count(*I) &&
2090 (SPState[(*I)->getNumber()].ExitValue != BBState.EntryValue ||
2091 SPState[(*I)->getNumber()].ExitIsSetup != BBState.EntryIsSetup)) {
2092 report("The exit stack state of a predecessor is inconsistent.", MBB);
2093 errs() << "Predecessor BB#" << (*I)->getNumber() << " has exit state ("
2094 << SPState[(*I)->getNumber()].ExitValue << ", "
2095 << SPState[(*I)->getNumber()].ExitIsSetup
2096 << "), while BB#" << MBB->getNumber() << " has entry state ("
2097 << BBState.EntryValue << ", " << BBState.EntryIsSetup << ").\n";
2098 }
2099 }
2100
2101 // Make sure the entry state of any successor is consistent with the exit
2102 // state.
2103 for (MachineBasicBlock::const_succ_iterator I = MBB->succ_begin(),
2104 E = MBB->succ_end(); I != E; ++I) {
2105 if (Reachable.count(*I) &&
2106 (SPState[(*I)->getNumber()].EntryValue != BBState.ExitValue ||
2107 SPState[(*I)->getNumber()].EntryIsSetup != BBState.ExitIsSetup)) {
2108 report("The entry stack state of a successor is inconsistent.", MBB);
2109 errs() << "Successor BB#" << (*I)->getNumber() << " has entry state ("
2110 << SPState[(*I)->getNumber()].EntryValue << ", "
2111 << SPState[(*I)->getNumber()].EntryIsSetup
2112 << "), while BB#" << MBB->getNumber() << " has exit state ("
2113 << BBState.ExitValue << ", " << BBState.ExitIsSetup << ").\n";
2114 }
2115 }
2116
2117 // Make sure a basic block with return ends with zero stack adjustment.
2118 if (!MBB->empty() && MBB->back().isReturn()) {
2119 if (BBState.ExitIsSetup)
2120 report("A return block ends with a FrameSetup.", MBB);
2121 if (BBState.ExitValue)
2122 report("A return block ends with a nonzero stack adjustment.", MBB);
2123 }
2124 }
2125}