Bug Summary

File:lib/CodeGen/MachineVerifier.cpp
Warning:line 1944, column 32
Access to field 'id' results in a dereference of a null pointer (loaded from variable 'PVNI')

Annotated Source Code

1//===-- MachineVerifier.cpp - Machine Code Verifier -----------------------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// Pass to verify generated machine code. The following is checked:
11//
12// Operand counts: All explicit operands must be present.
13//
14// Register classes: All physical and virtual register operands must be
15// compatible with the register class required by the instruction descriptor.
16//
17// Register live intervals: Registers must be defined only once, and must be
18// defined before use.
19//
20// The machine code verifier is enabled from LLVMTargetMachine.cpp with the
21// command-line option -verify-machineinstrs, or by defining the environment
22// variable LLVM_VERIFY_MACHINEINSTRS to the name of a file that will receive
23// the verifier errors.
24//===----------------------------------------------------------------------===//
25
26#include "llvm/CodeGen/Passes.h"
27#include "llvm/ADT/DenseSet.h"
28#include "llvm/ADT/DepthFirstIterator.h"
29#include "llvm/ADT/SetOperations.h"
30#include "llvm/ADT/SmallVector.h"
31#include "llvm/Analysis/EHPersonalities.h"
32#include "llvm/CodeGen/LiveIntervalAnalysis.h"
33#include "llvm/CodeGen/LiveStackAnalysis.h"
34#include "llvm/CodeGen/LiveVariables.h"
35#include "llvm/CodeGen/MachineFrameInfo.h"
36#include "llvm/CodeGen/MachineFunctionPass.h"
37#include "llvm/CodeGen/MachineMemOperand.h"
38#include "llvm/CodeGen/MachineRegisterInfo.h"
39#include "llvm/IR/BasicBlock.h"
40#include "llvm/IR/InlineAsm.h"
41#include "llvm/IR/Instructions.h"
42#include "llvm/MC/MCAsmInfo.h"
43#include "llvm/Support/Debug.h"
44#include "llvm/Support/ErrorHandling.h"
45#include "llvm/Support/FileSystem.h"
46#include "llvm/Support/raw_ostream.h"
47#include "llvm/Target/TargetInstrInfo.h"
48#include "llvm/Target/TargetMachine.h"
49#include "llvm/Target/TargetRegisterInfo.h"
50#include "llvm/Target/TargetSubtargetInfo.h"
51using namespace llvm;
52
53namespace {
54 struct MachineVerifier {
55
56 MachineVerifier(Pass *pass, const char *b) :
57 PASS(pass),
58 Banner(b)
59 {}
60
61 unsigned verify(MachineFunction &MF);
62
63 Pass *const PASS;
64 const char *Banner;
65 const MachineFunction *MF;
66 const TargetMachine *TM;
67 const TargetInstrInfo *TII;
68 const TargetRegisterInfo *TRI;
69 const MachineRegisterInfo *MRI;
70
71 unsigned foundErrors;
72
73 // Avoid querying the MachineFunctionProperties for each operand.
74 bool isFunctionRegBankSelected;
75 bool isFunctionSelected;
76
77 typedef SmallVector<unsigned, 16> RegVector;
78 typedef SmallVector<const uint32_t*, 4> RegMaskVector;
79 typedef DenseSet<unsigned> RegSet;
80 typedef DenseMap<unsigned, const MachineInstr*> RegMap;
81 typedef SmallPtrSet<const MachineBasicBlock*, 8> BlockSet;
82
83 const MachineInstr *FirstTerminator;
84 BlockSet FunctionBlocks;
85
86 BitVector regsReserved;
87 RegSet regsLive;
88 RegVector regsDefined, regsDead, regsKilled;
89 RegMaskVector regMasks;
90 RegSet regsLiveInButUnused;
91
92 SlotIndex lastIndex;
93
94 // Add Reg and any sub-registers to RV
95 void addRegWithSubRegs(RegVector &RV, unsigned Reg) {
96 RV.push_back(Reg);
97 if (TargetRegisterInfo::isPhysicalRegister(Reg))
98 for (MCSubRegIterator SubRegs(Reg, TRI); SubRegs.isValid(); ++SubRegs)
99 RV.push_back(*SubRegs);
100 }
101
102 struct BBInfo {
103 // Is this MBB reachable from the MF entry point?
104 bool reachable;
105
106 // Vregs that must be live in because they are used without being
107 // defined. Map value is the user.
108 RegMap vregsLiveIn;
109
110 // Regs killed in MBB. They may be defined again, and will then be in both
111 // regsKilled and regsLiveOut.
112 RegSet regsKilled;
113
114 // Regs defined in MBB and live out. Note that vregs passing through may
115 // be live out without being mentioned here.
116 RegSet regsLiveOut;
117
118 // Vregs that pass through MBB untouched. This set is disjoint from
119 // regsKilled and regsLiveOut.
120 RegSet vregsPassed;
121
122 // Vregs that must pass through MBB because they are needed by a successor
123 // block. This set is disjoint from regsLiveOut.
124 RegSet vregsRequired;
125
126 // Set versions of block's predecessor and successor lists.
127 BlockSet Preds, Succs;
128
129 BBInfo() : reachable(false) {}
130
131 // Add register to vregsPassed if it belongs there. Return true if
132 // anything changed.
133 bool addPassed(unsigned Reg) {
134 if (!TargetRegisterInfo::isVirtualRegister(Reg))
135 return false;
136 if (regsKilled.count(Reg) || regsLiveOut.count(Reg))
137 return false;
138 return vregsPassed.insert(Reg).second;
139 }
140
141 // Same for a full set.
142 bool addPassed(const RegSet &RS) {
143 bool changed = false;
144 for (RegSet::const_iterator I = RS.begin(), E = RS.end(); I != E; ++I)
145 if (addPassed(*I))
146 changed = true;
147 return changed;
148 }
149
150 // Add register to vregsRequired if it belongs there. Return true if
151 // anything changed.
152 bool addRequired(unsigned Reg) {
153 if (!TargetRegisterInfo::isVirtualRegister(Reg))
154 return false;
155 if (regsLiveOut.count(Reg))
156 return false;
157 return vregsRequired.insert(Reg).second;
158 }
159
160 // Same for a full set.
161 bool addRequired(const RegSet &RS) {
162 bool changed = false;
163 for (RegSet::const_iterator I = RS.begin(), E = RS.end(); I != E; ++I)
164 if (addRequired(*I))
165 changed = true;
166 return changed;
167 }
168
169 // Same for a full map.
170 bool addRequired(const RegMap &RM) {
171 bool changed = false;
172 for (RegMap::const_iterator I = RM.begin(), E = RM.end(); I != E; ++I)
173 if (addRequired(I->first))
174 changed = true;
175 return changed;
176 }
177
178 // Live-out registers are either in regsLiveOut or vregsPassed.
179 bool isLiveOut(unsigned Reg) const {
180 return regsLiveOut.count(Reg) || vregsPassed.count(Reg);
181 }
182 };
183
184 // Extra register info per MBB.
185 DenseMap<const MachineBasicBlock*, BBInfo> MBBInfoMap;
186
187 bool isReserved(unsigned Reg) {
188 return Reg < regsReserved.size() && regsReserved.test(Reg);
189 }
190
191 bool isAllocatable(unsigned Reg) const {
192 return Reg < TRI->getNumRegs() && TRI->isInAllocatableClass(Reg) &&
193 !regsReserved.test(Reg);
194 }
195
196 // Analysis information if available
197 LiveVariables *LiveVars;
198 LiveIntervals *LiveInts;
199 LiveStacks *LiveStks;
200 SlotIndexes *Indexes;
201
202 void visitMachineFunctionBefore();
203 void visitMachineBasicBlockBefore(const MachineBasicBlock *MBB);
204 void visitMachineBundleBefore(const MachineInstr *MI);
205 void visitMachineInstrBefore(const MachineInstr *MI);
206 void visitMachineOperand(const MachineOperand *MO, unsigned MONum);
207 void visitMachineInstrAfter(const MachineInstr *MI);
208 void visitMachineBundleAfter(const MachineInstr *MI);
209 void visitMachineBasicBlockAfter(const MachineBasicBlock *MBB);
210 void visitMachineFunctionAfter();
211
212 void report(const char *msg, const MachineFunction *MF);
213 void report(const char *msg, const MachineBasicBlock *MBB);
214 void report(const char *msg, const MachineInstr *MI);
215 void report(const char *msg, const MachineOperand *MO, unsigned MONum);
216
217 void report_context(const LiveInterval &LI) const;
218 void report_context(const LiveRange &LR, unsigned VRegUnit,
219 LaneBitmask LaneMask) const;
220 void report_context(const LiveRange::Segment &S) const;
221 void report_context(const VNInfo &VNI) const;
222 void report_context(SlotIndex Pos) const;
223 void report_context_liverange(const LiveRange &LR) const;
224 void report_context_lanemask(LaneBitmask LaneMask) const;
225 void report_context_vreg(unsigned VReg) const;
226 void report_context_vreg_regunit(unsigned VRegOrRegUnit) const;
227
228 void verifyInlineAsm(const MachineInstr *MI);
229
230 void checkLiveness(const MachineOperand *MO, unsigned MONum);
231 void checkLivenessAtUse(const MachineOperand *MO, unsigned MONum,
232 SlotIndex UseIdx, const LiveRange &LR, unsigned Reg,
233 LaneBitmask LaneMask = LaneBitmask::getNone());
234 void checkLivenessAtDef(const MachineOperand *MO, unsigned MONum,
235 SlotIndex DefIdx, const LiveRange &LR, unsigned Reg,
236 LaneBitmask LaneMask = LaneBitmask::getNone());
237
238 void markReachable(const MachineBasicBlock *MBB);
239 void calcRegsPassed();
240 void checkPHIOps(const MachineBasicBlock *MBB);
241
242 void calcRegsRequired();
243 void verifyLiveVariables();
244 void verifyLiveIntervals();
245 void verifyLiveInterval(const LiveInterval&);
246 void verifyLiveRangeValue(const LiveRange&, const VNInfo*, unsigned,
247 LaneBitmask);
248 void verifyLiveRangeSegment(const LiveRange&,
249 const LiveRange::const_iterator I, unsigned,
250 LaneBitmask);
251 void verifyLiveRange(const LiveRange&, unsigned,
252 LaneBitmask LaneMask = LaneBitmask::getNone());
253
254 void verifyStackFrame();
255
256 void verifySlotIndexes() const;
257 void verifyProperties(const MachineFunction &MF);
258 };
259
260 struct MachineVerifierPass : public MachineFunctionPass {
261 static char ID; // Pass ID, replacement for typeid
262 const std::string Banner;
263
264 MachineVerifierPass(std::string banner = std::string())
265 : MachineFunctionPass(ID), Banner(std::move(banner)) {
266 initializeMachineVerifierPassPass(*PassRegistry::getPassRegistry());
267 }
268
269 void getAnalysisUsage(AnalysisUsage &AU) const override {
270 AU.setPreservesAll();
271 MachineFunctionPass::getAnalysisUsage(AU);
272 }
273
274 bool runOnMachineFunction(MachineFunction &MF) override {
275 unsigned FoundErrors = MachineVerifier(this, Banner.c_str()).verify(MF);
276 if (FoundErrors)
277 report_fatal_error("Found "+Twine(FoundErrors)+" machine code errors.");
278 return false;
279 }
280 };
281
282}
283
284char MachineVerifierPass::ID = 0;
285INITIALIZE_PASS(MachineVerifierPass, "machineverifier",static void *initializeMachineVerifierPassPassOnce(PassRegistry
&Registry) { PassInfo *PI = new PassInfo( "Verify generated machine code"
, "machineverifier", &MachineVerifierPass::ID, PassInfo::
NormalCtor_t(callDefaultCtor<MachineVerifierPass>), false
, false); Registry.registerPass(*PI, true); return PI; } static
llvm::once_flag InitializeMachineVerifierPassPassFlag; void llvm
::initializeMachineVerifierPassPass(PassRegistry &Registry
) { llvm::call_once(InitializeMachineVerifierPassPassFlag, initializeMachineVerifierPassPassOnce
, std::ref(Registry)); }
286 "Verify generated machine code", false, false)static void *initializeMachineVerifierPassPassOnce(PassRegistry
&Registry) { PassInfo *PI = new PassInfo( "Verify generated machine code"
, "machineverifier", &MachineVerifierPass::ID, PassInfo::
NormalCtor_t(callDefaultCtor<MachineVerifierPass>), false
, false); Registry.registerPass(*PI, true); return PI; } static
llvm::once_flag InitializeMachineVerifierPassPassFlag; void llvm
::initializeMachineVerifierPassPass(PassRegistry &Registry
) { llvm::call_once(InitializeMachineVerifierPassPassFlag, initializeMachineVerifierPassPassOnce
, std::ref(Registry)); }
287
288FunctionPass *llvm::createMachineVerifierPass(const std::string &Banner) {
289 return new MachineVerifierPass(Banner);
290}
291
292bool MachineFunction::verify(Pass *p, const char *Banner, bool AbortOnErrors)
293 const {
294 MachineFunction &MF = const_cast<MachineFunction&>(*this);
295 unsigned FoundErrors = MachineVerifier(p, Banner).verify(MF);
296 if (AbortOnErrors && FoundErrors)
297 report_fatal_error("Found "+Twine(FoundErrors)+" machine code errors.");
298 return FoundErrors == 0;
299}
300
301void MachineVerifier::verifySlotIndexes() const {
302 if (Indexes == nullptr)
303 return;
304
305 // Ensure the IdxMBB list is sorted by slot indexes.
306 SlotIndex Last;
307 for (SlotIndexes::MBBIndexIterator I = Indexes->MBBIndexBegin(),
308 E = Indexes->MBBIndexEnd(); I != E; ++I) {
309 assert(!Last.isValid() || I->first > Last)((!Last.isValid() || I->first > Last) ? static_cast<
void> (0) : __assert_fail ("!Last.isValid() || I->first > Last"
, "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn303373/lib/CodeGen/MachineVerifier.cpp"
, 309, __PRETTY_FUNCTION__))
;
310 Last = I->first;
311 }
312}
313
314void MachineVerifier::verifyProperties(const MachineFunction &MF) {
315 // If a pass has introduced virtual registers without clearing the
316 // NoVRegs property (or set it without allocating the vregs)
317 // then report an error.
318 if (MF.getProperties().hasProperty(
319 MachineFunctionProperties::Property::NoVRegs) &&
320 MRI->getNumVirtRegs())
321 report("Function has NoVRegs property but there are VReg operands", &MF);
322}
323
324unsigned MachineVerifier::verify(MachineFunction &MF) {
325 foundErrors = 0;
326
327 this->MF = &MF;
328 TM = &MF.getTarget();
329 TII = MF.getSubtarget().getInstrInfo();
330 TRI = MF.getSubtarget().getRegisterInfo();
331 MRI = &MF.getRegInfo();
332
333 isFunctionRegBankSelected = MF.getProperties().hasProperty(
334 MachineFunctionProperties::Property::RegBankSelected);
335 isFunctionSelected = MF.getProperties().hasProperty(
336 MachineFunctionProperties::Property::Selected);
337
338 LiveVars = nullptr;
339 LiveInts = nullptr;
340 LiveStks = nullptr;
341 Indexes = nullptr;
342 if (PASS) {
343 LiveInts = PASS->getAnalysisIfAvailable<LiveIntervals>();
344 // We don't want to verify LiveVariables if LiveIntervals is available.
345 if (!LiveInts)
346 LiveVars = PASS->getAnalysisIfAvailable<LiveVariables>();
347 LiveStks = PASS->getAnalysisIfAvailable<LiveStacks>();
348 Indexes = PASS->getAnalysisIfAvailable<SlotIndexes>();
349 }
350
351 verifySlotIndexes();
352
353 verifyProperties(MF);
354
355 visitMachineFunctionBefore();
356 for (MachineFunction::const_iterator MFI = MF.begin(), MFE = MF.end();
357 MFI!=MFE; ++MFI) {
358 visitMachineBasicBlockBefore(&*MFI);
359 // Keep track of the current bundle header.
360 const MachineInstr *CurBundle = nullptr;
361 // Do we expect the next instruction to be part of the same bundle?
362 bool InBundle = false;
363
364 for (MachineBasicBlock::const_instr_iterator MBBI = MFI->instr_begin(),
365 MBBE = MFI->instr_end(); MBBI != MBBE; ++MBBI) {
366 if (MBBI->getParent() != &*MFI) {
367 report("Bad instruction parent pointer", &*MFI);
368 errs() << "Instruction: " << *MBBI;
369 continue;
370 }
371
372 // Check for consistent bundle flags.
373 if (InBundle && !MBBI->isBundledWithPred())
374 report("Missing BundledPred flag, "
375 "BundledSucc was set on predecessor",
376 &*MBBI);
377 if (!InBundle && MBBI->isBundledWithPred())
378 report("BundledPred flag is set, "
379 "but BundledSucc not set on predecessor",
380 &*MBBI);
381
382 // Is this a bundle header?
383 if (!MBBI->isInsideBundle()) {
384 if (CurBundle)
385 visitMachineBundleAfter(CurBundle);
386 CurBundle = &*MBBI;
387 visitMachineBundleBefore(CurBundle);
388 } else if (!CurBundle)
389 report("No bundle header", &*MBBI);
390 visitMachineInstrBefore(&*MBBI);
391 for (unsigned I = 0, E = MBBI->getNumOperands(); I != E; ++I) {
392 const MachineInstr &MI = *MBBI;
393 const MachineOperand &Op = MI.getOperand(I);
394 if (Op.getParent() != &MI) {
395 // Make sure to use correct addOperand / RemoveOperand / ChangeTo
396 // functions when replacing operands of a MachineInstr.
397 report("Instruction has operand with wrong parent set", &MI);
398 }
399
400 visitMachineOperand(&Op, I);
401 }
402
403 visitMachineInstrAfter(&*MBBI);
404
405 // Was this the last bundled instruction?
406 InBundle = MBBI->isBundledWithSucc();
407 }
408 if (CurBundle)
409 visitMachineBundleAfter(CurBundle);
410 if (InBundle)
411 report("BundledSucc flag set on last instruction in block", &MFI->back());
412 visitMachineBasicBlockAfter(&*MFI);
413 }
414 visitMachineFunctionAfter();
415
416 // Clean up.
417 regsLive.clear();
418 regsDefined.clear();
419 regsDead.clear();
420 regsKilled.clear();
421 regMasks.clear();
422 regsLiveInButUnused.clear();
423 MBBInfoMap.clear();
424
425 return foundErrors;
426}
427
428void MachineVerifier::report(const char *msg, const MachineFunction *MF) {
429 assert(MF)((MF) ? static_cast<void> (0) : __assert_fail ("MF", "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn303373/lib/CodeGen/MachineVerifier.cpp"
, 429, __PRETTY_FUNCTION__))
;
430 errs() << '\n';
431 if (!foundErrors++) {
432 if (Banner)
433 errs() << "# " << Banner << '\n';
434 if (LiveInts != nullptr)
435 LiveInts->print(errs());
436 else
437 MF->print(errs(), Indexes);
438 }
439 errs() << "*** Bad machine code: " << msg << " ***\n"
440 << "- function: " << MF->getName() << "\n";
441}
442
443void MachineVerifier::report(const char *msg, const MachineBasicBlock *MBB) {
444 assert(MBB)((MBB) ? static_cast<void> (0) : __assert_fail ("MBB", "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn303373/lib/CodeGen/MachineVerifier.cpp"
, 444, __PRETTY_FUNCTION__))
;
445 report(msg, MBB->getParent());
446 errs() << "- basic block: BB#" << MBB->getNumber()
447 << ' ' << MBB->getName()
448 << " (" << (const void*)MBB << ')';
449 if (Indexes)
450 errs() << " [" << Indexes->getMBBStartIdx(MBB)
451 << ';' << Indexes->getMBBEndIdx(MBB) << ')';
452 errs() << '\n';
453}
454
455void MachineVerifier::report(const char *msg, const MachineInstr *MI) {
456 assert(MI)((MI) ? static_cast<void> (0) : __assert_fail ("MI", "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn303373/lib/CodeGen/MachineVerifier.cpp"
, 456, __PRETTY_FUNCTION__))
;
457 report(msg, MI->getParent());
458 errs() << "- instruction: ";
459 if (Indexes && Indexes->hasIndex(*MI))
460 errs() << Indexes->getInstructionIndex(*MI) << '\t';
461 MI->print(errs(), /*SkipOpers=*/true);
462 errs() << '\n';
463}
464
465void MachineVerifier::report(const char *msg,
466 const MachineOperand *MO, unsigned MONum) {
467 assert(MO)((MO) ? static_cast<void> (0) : __assert_fail ("MO", "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn303373/lib/CodeGen/MachineVerifier.cpp"
, 467, __PRETTY_FUNCTION__))
;
468 report(msg, MO->getParent());
469 errs() << "- operand " << MONum << ": ";
470 MO->print(errs(), TRI);
471 errs() << "\n";
472}
473
474void MachineVerifier::report_context(SlotIndex Pos) const {
475 errs() << "- at: " << Pos << '\n';
476}
477
478void MachineVerifier::report_context(const LiveInterval &LI) const {
479 errs() << "- interval: " << LI << '\n';
480}
481
482void MachineVerifier::report_context(const LiveRange &LR, unsigned VRegUnit,
483 LaneBitmask LaneMask) const {
484 report_context_liverange(LR);
485 report_context_vreg_regunit(VRegUnit);
486 if (LaneMask.any())
487 report_context_lanemask(LaneMask);
488}
489
490void MachineVerifier::report_context(const LiveRange::Segment &S) const {
491 errs() << "- segment: " << S << '\n';
492}
493
494void MachineVerifier::report_context(const VNInfo &VNI) const {
495 errs() << "- ValNo: " << VNI.id << " (def " << VNI.def << ")\n";
496}
497
498void MachineVerifier::report_context_liverange(const LiveRange &LR) const {
499 errs() << "- liverange: " << LR << '\n';
500}
501
502void MachineVerifier::report_context_vreg(unsigned VReg) const {
503 errs() << "- v. register: " << PrintReg(VReg, TRI) << '\n';
504}
505
506void MachineVerifier::report_context_vreg_regunit(unsigned VRegOrUnit) const {
507 if (TargetRegisterInfo::isVirtualRegister(VRegOrUnit)) {
508 report_context_vreg(VRegOrUnit);
509 } else {
510 errs() << "- regunit: " << PrintRegUnit(VRegOrUnit, TRI) << '\n';
511 }
512}
513
514void MachineVerifier::report_context_lanemask(LaneBitmask LaneMask) const {
515 errs() << "- lanemask: " << PrintLaneMask(LaneMask) << '\n';
516}
517
518void MachineVerifier::markReachable(const MachineBasicBlock *MBB) {
519 BBInfo &MInfo = MBBInfoMap[MBB];
520 if (!MInfo.reachable) {
521 MInfo.reachable = true;
522 for (MachineBasicBlock::const_succ_iterator SuI = MBB->succ_begin(),
523 SuE = MBB->succ_end(); SuI != SuE; ++SuI)
524 markReachable(*SuI);
525 }
526}
527
528void MachineVerifier::visitMachineFunctionBefore() {
529 lastIndex = SlotIndex();
530 regsReserved = MRI->reservedRegsFrozen() ? MRI->getReservedRegs()
531 : TRI->getReservedRegs(*MF);
532
533 if (!MF->empty())
534 markReachable(&MF->front());
535
536 // Build a set of the basic blocks in the function.
537 FunctionBlocks.clear();
538 for (const auto &MBB : *MF) {
539 FunctionBlocks.insert(&MBB);
540 BBInfo &MInfo = MBBInfoMap[&MBB];
541
542 MInfo.Preds.insert(MBB.pred_begin(), MBB.pred_end());
543 if (MInfo.Preds.size() != MBB.pred_size())
544 report("MBB has duplicate entries in its predecessor list.", &MBB);
545
546 MInfo.Succs.insert(MBB.succ_begin(), MBB.succ_end());
547 if (MInfo.Succs.size() != MBB.succ_size())
548 report("MBB has duplicate entries in its successor list.", &MBB);
549 }
550
551 // Check that the register use lists are sane.
552 MRI->verifyUseLists();
553
554 if (!MF->empty())
555 verifyStackFrame();
556}
557
558// Does iterator point to a and b as the first two elements?
559static bool matchPair(MachineBasicBlock::const_succ_iterator i,
560 const MachineBasicBlock *a, const MachineBasicBlock *b) {
561 if (*i == a)
562 return *++i == b;
563 if (*i == b)
564 return *++i == a;
565 return false;
566}
567
568void
569MachineVerifier::visitMachineBasicBlockBefore(const MachineBasicBlock *MBB) {
570 FirstTerminator = nullptr;
571
572 if (!MF->getProperties().hasProperty(
573 MachineFunctionProperties::Property::NoPHIs) && MRI->tracksLiveness()) {
574 // If this block has allocatable physical registers live-in, check that
575 // it is an entry block or landing pad.
576 for (const auto &LI : MBB->liveins()) {
577 if (isAllocatable(LI.PhysReg) && !MBB->isEHPad() &&
578 MBB->getIterator() != MBB->getParent()->begin()) {
579 report("MBB has allocatable live-in, but isn't entry or landing-pad.", MBB);
580 }
581 }
582 }
583
584 // Count the number of landing pad successors.
585 SmallPtrSet<MachineBasicBlock*, 4> LandingPadSuccs;
586 for (MachineBasicBlock::const_succ_iterator I = MBB->succ_begin(),
587 E = MBB->succ_end(); I != E; ++I) {
588 if ((*I)->isEHPad())
589 LandingPadSuccs.insert(*I);
590 if (!FunctionBlocks.count(*I))
591 report("MBB has successor that isn't part of the function.", MBB);
592 if (!MBBInfoMap[*I].Preds.count(MBB)) {
593 report("Inconsistent CFG", MBB);
594 errs() << "MBB is not in the predecessor list of the successor BB#"
595 << (*I)->getNumber() << ".\n";
596 }
597 }
598
599 // Check the predecessor list.
600 for (MachineBasicBlock::const_pred_iterator I = MBB->pred_begin(),
601 E = MBB->pred_end(); I != E; ++I) {
602 if (!FunctionBlocks.count(*I))
603 report("MBB has predecessor that isn't part of the function.", MBB);
604 if (!MBBInfoMap[*I].Succs.count(MBB)) {
605 report("Inconsistent CFG", MBB);
606 errs() << "MBB is not in the successor list of the predecessor BB#"
607 << (*I)->getNumber() << ".\n";
608 }
609 }
610
611 const MCAsmInfo *AsmInfo = TM->getMCAsmInfo();
612 const BasicBlock *BB = MBB->getBasicBlock();
613 const Function *Fn = MF->getFunction();
614 if (LandingPadSuccs.size() > 1 &&
615 !(AsmInfo &&
616 AsmInfo->getExceptionHandlingType() == ExceptionHandling::SjLj &&
617 BB && isa<SwitchInst>(BB->getTerminator())) &&
618 !isFuncletEHPersonality(classifyEHPersonality(Fn->getPersonalityFn())))
619 report("MBB has more than one landing pad successor", MBB);
620
621 // Call AnalyzeBranch. If it succeeds, there several more conditions to check.
622 MachineBasicBlock *TBB = nullptr, *FBB = nullptr;
623 SmallVector<MachineOperand, 4> Cond;
624 if (!TII->analyzeBranch(*const_cast<MachineBasicBlock *>(MBB), TBB, FBB,
625 Cond)) {
626 // Ok, AnalyzeBranch thinks it knows what's going on with this block. Let's
627 // check whether its answers match up with reality.
628 if (!TBB && !FBB) {
629 // Block falls through to its successor.
630 MachineFunction::const_iterator MBBI = MBB->getIterator();
631 ++MBBI;
632 if (MBBI == MF->end()) {
633 // It's possible that the block legitimately ends with a noreturn
634 // call or an unreachable, in which case it won't actually fall
635 // out the bottom of the function.
636 } else if (MBB->succ_size() == LandingPadSuccs.size()) {
637 // It's possible that the block legitimately ends with a noreturn
638 // call or an unreachable, in which case it won't actuall fall
639 // out of the block.
640 } else if (MBB->succ_size() != 1+LandingPadSuccs.size()) {
641 report("MBB exits via unconditional fall-through but doesn't have "
642 "exactly one CFG successor!", MBB);
643 } else if (!MBB->isSuccessor(&*MBBI)) {
644 report("MBB exits via unconditional fall-through but its successor "
645 "differs from its CFG successor!", MBB);
646 }
647 if (!MBB->empty() && MBB->back().isBarrier() &&
648 !TII->isPredicated(MBB->back())) {
649 report("MBB exits via unconditional fall-through but ends with a "
650 "barrier instruction!", MBB);
651 }
652 if (!Cond.empty()) {
653 report("MBB exits via unconditional fall-through but has a condition!",
654 MBB);
655 }
656 } else if (TBB && !FBB && Cond.empty()) {
657 // Block unconditionally branches somewhere.
658 // If the block has exactly one successor, that happens to be a
659 // landingpad, accept it as valid control flow.
660 if (MBB->succ_size() != 1+LandingPadSuccs.size() &&
661 (MBB->succ_size() != 1 || LandingPadSuccs.size() != 1 ||
662 *MBB->succ_begin() != *LandingPadSuccs.begin())) {
663 report("MBB exits via unconditional branch but doesn't have "
664 "exactly one CFG successor!", MBB);
665 } else if (!MBB->isSuccessor(TBB)) {
666 report("MBB exits via unconditional branch but the CFG "
667 "successor doesn't match the actual successor!", MBB);
668 }
669 if (MBB->empty()) {
670 report("MBB exits via unconditional branch but doesn't contain "
671 "any instructions!", MBB);
672 } else if (!MBB->back().isBarrier()) {
673 report("MBB exits via unconditional branch but doesn't end with a "
674 "barrier instruction!", MBB);
675 } else if (!MBB->back().isTerminator()) {
676 report("MBB exits via unconditional branch but the branch isn't a "
677 "terminator instruction!", MBB);
678 }
679 } else if (TBB && !FBB && !Cond.empty()) {
680 // Block conditionally branches somewhere, otherwise falls through.
681 MachineFunction::const_iterator MBBI = MBB->getIterator();
682 ++MBBI;
683 if (MBBI == MF->end()) {
684 report("MBB conditionally falls through out of function!", MBB);
685 } else if (MBB->succ_size() == 1) {
686 // A conditional branch with only one successor is weird, but allowed.
687 if (&*MBBI != TBB)
688 report("MBB exits via conditional branch/fall-through but only has "
689 "one CFG successor!", MBB);
690 else if (TBB != *MBB->succ_begin())
691 report("MBB exits via conditional branch/fall-through but the CFG "
692 "successor don't match the actual successor!", MBB);
693 } else if (MBB->succ_size() != 2) {
694 report("MBB exits via conditional branch/fall-through but doesn't have "
695 "exactly two CFG successors!", MBB);
696 } else if (!matchPair(MBB->succ_begin(), TBB, &*MBBI)) {
697 report("MBB exits via conditional branch/fall-through but the CFG "
698 "successors don't match the actual successors!", MBB);
699 }
700 if (MBB->empty()) {
701 report("MBB exits via conditional branch/fall-through but doesn't "
702 "contain any instructions!", MBB);
703 } else if (MBB->back().isBarrier()) {
704 report("MBB exits via conditional branch/fall-through but ends with a "
705 "barrier instruction!", MBB);
706 } else if (!MBB->back().isTerminator()) {
707 report("MBB exits via conditional branch/fall-through but the branch "
708 "isn't a terminator instruction!", MBB);
709 }
710 } else if (TBB && FBB) {
711 // Block conditionally branches somewhere, otherwise branches
712 // somewhere else.
713 if (MBB->succ_size() == 1) {
714 // A conditional branch with only one successor is weird, but allowed.
715 if (FBB != TBB)
716 report("MBB exits via conditional branch/branch through but only has "
717 "one CFG successor!", MBB);
718 else if (TBB != *MBB->succ_begin())
719 report("MBB exits via conditional branch/branch through but the CFG "
720 "successor don't match the actual successor!", MBB);
721 } else if (MBB->succ_size() != 2) {
722 report("MBB exits via conditional branch/branch but doesn't have "
723 "exactly two CFG successors!", MBB);
724 } else if (!matchPair(MBB->succ_begin(), TBB, FBB)) {
725 report("MBB exits via conditional branch/branch but the CFG "
726 "successors don't match the actual successors!", MBB);
727 }
728 if (MBB->empty()) {
729 report("MBB exits via conditional branch/branch but doesn't "
730 "contain any instructions!", MBB);
731 } else if (!MBB->back().isBarrier()) {
732 report("MBB exits via conditional branch/branch but doesn't end with a "
733 "barrier instruction!", MBB);
734 } else if (!MBB->back().isTerminator()) {
735 report("MBB exits via conditional branch/branch but the branch "
736 "isn't a terminator instruction!", MBB);
737 }
738 if (Cond.empty()) {
739 report("MBB exits via conditinal branch/branch but there's no "
740 "condition!", MBB);
741 }
742 } else {
743 report("AnalyzeBranch returned invalid data!", MBB);
744 }
745 }
746
747 regsLive.clear();
748 if (MRI->tracksLiveness()) {
749 for (const auto &LI : MBB->liveins()) {
750 if (!TargetRegisterInfo::isPhysicalRegister(LI.PhysReg)) {
751 report("MBB live-in list contains non-physical register", MBB);
752 continue;
753 }
754 for (MCSubRegIterator SubRegs(LI.PhysReg, TRI, /*IncludeSelf=*/true);
755 SubRegs.isValid(); ++SubRegs)
756 regsLive.insert(*SubRegs);
757 }
758 }
759 regsLiveInButUnused = regsLive;
760
761 const MachineFrameInfo &MFI = MF->getFrameInfo();
762 BitVector PR = MFI.getPristineRegs(*MF);
763 for (unsigned I : PR.set_bits()) {
764 for (MCSubRegIterator SubRegs(I, TRI, /*IncludeSelf=*/true);
765 SubRegs.isValid(); ++SubRegs)
766 regsLive.insert(*SubRegs);
767 }
768
769 regsKilled.clear();
770 regsDefined.clear();
771
772 if (Indexes)
773 lastIndex = Indexes->getMBBStartIdx(MBB);
774}
775
776// This function gets called for all bundle headers, including normal
777// stand-alone unbundled instructions.
778void MachineVerifier::visitMachineBundleBefore(const MachineInstr *MI) {
779 if (Indexes && Indexes->hasIndex(*MI)) {
780 SlotIndex idx = Indexes->getInstructionIndex(*MI);
781 if (!(idx > lastIndex)) {
782 report("Instruction index out of order", MI);
783 errs() << "Last instruction was at " << lastIndex << '\n';
784 }
785 lastIndex = idx;
786 }
787
788 // Ensure non-terminators don't follow terminators.
789 // Ignore predicated terminators formed by if conversion.
790 // FIXME: If conversion shouldn't need to violate this rule.
791 if (MI->isTerminator() && !TII->isPredicated(*MI)) {
792 if (!FirstTerminator)
793 FirstTerminator = MI;
794 } else if (FirstTerminator) {
795 report("Non-terminator instruction after the first terminator", MI);
796 errs() << "First terminator was:\t" << *FirstTerminator;
797 }
798}
799
800// The operands on an INLINEASM instruction must follow a template.
801// Verify that the flag operands make sense.
802void MachineVerifier::verifyInlineAsm(const MachineInstr *MI) {
803 // The first two operands on INLINEASM are the asm string and global flags.
804 if (MI->getNumOperands() < 2) {
805 report("Too few operands on inline asm", MI);
806 return;
807 }
808 if (!MI->getOperand(0).isSymbol())
809 report("Asm string must be an external symbol", MI);
810 if (!MI->getOperand(1).isImm())
811 report("Asm flags must be an immediate", MI);
812 // Allowed flags are Extra_HasSideEffects = 1, Extra_IsAlignStack = 2,
813 // Extra_AsmDialect = 4, Extra_MayLoad = 8, and Extra_MayStore = 16,
814 // and Extra_IsConvergent = 32.
815 if (!isUInt<6>(MI->getOperand(1).getImm()))
816 report("Unknown asm flags", &MI->getOperand(1), 1);
817
818 static_assert(InlineAsm::MIOp_FirstOperand == 2, "Asm format changed");
819
820 unsigned OpNo = InlineAsm::MIOp_FirstOperand;
821 unsigned NumOps;
822 for (unsigned e = MI->getNumOperands(); OpNo < e; OpNo += NumOps) {
823 const MachineOperand &MO = MI->getOperand(OpNo);
824 // There may be implicit ops after the fixed operands.
825 if (!MO.isImm())
826 break;
827 NumOps = 1 + InlineAsm::getNumOperandRegisters(MO.getImm());
828 }
829
830 if (OpNo > MI->getNumOperands())
831 report("Missing operands in last group", MI);
832
833 // An optional MDNode follows the groups.
834 if (OpNo < MI->getNumOperands() && MI->getOperand(OpNo).isMetadata())
835 ++OpNo;
836
837 // All trailing operands must be implicit registers.
838 for (unsigned e = MI->getNumOperands(); OpNo < e; ++OpNo) {
839 const MachineOperand &MO = MI->getOperand(OpNo);
840 if (!MO.isReg() || !MO.isImplicit())
841 report("Expected implicit register after groups", &MO, OpNo);
842 }
843}
844
845void MachineVerifier::visitMachineInstrBefore(const MachineInstr *MI) {
846 const MCInstrDesc &MCID = MI->getDesc();
847 if (MI->getNumOperands() < MCID.getNumOperands()) {
848 report("Too few operands", MI);
849 errs() << MCID.getNumOperands() << " operands expected, but "
850 << MI->getNumOperands() << " given.\n";
851 }
852
853 if (MI->isPHI() && MF->getProperties().hasProperty(
854 MachineFunctionProperties::Property::NoPHIs))
855 report("Found PHI instruction with NoPHIs property set", MI);
856
857 // Check the tied operands.
858 if (MI->isInlineAsm())
859 verifyInlineAsm(MI);
860
861 // Check the MachineMemOperands for basic consistency.
862 for (MachineInstr::mmo_iterator I = MI->memoperands_begin(),
863 E = MI->memoperands_end(); I != E; ++I) {
864 if ((*I)->isLoad() && !MI->mayLoad())
865 report("Missing mayLoad flag", MI);
866 if ((*I)->isStore() && !MI->mayStore())
867 report("Missing mayStore flag", MI);
868 }
869
870 // Debug values must not have a slot index.
871 // Other instructions must have one, unless they are inside a bundle.
872 if (LiveInts) {
873 bool mapped = !LiveInts->isNotInMIMap(*MI);
874 if (MI->isDebugValue()) {
875 if (mapped)
876 report("Debug instruction has a slot index", MI);
877 } else if (MI->isInsideBundle()) {
878 if (mapped)
879 report("Instruction inside bundle has a slot index", MI);
880 } else {
881 if (!mapped)
882 report("Missing slot index", MI);
883 }
884 }
885
886 // Check types.
887 if (isPreISelGenericOpcode(MCID.getOpcode())) {
888 if (isFunctionSelected)
889 report("Unexpected generic instruction in a Selected function", MI);
890
891 // Generic instructions specify equality constraints between some
892 // of their operands. Make sure these are consistent.
893 SmallVector<LLT, 4> Types;
894 for (unsigned i = 0; i < MCID.getNumOperands(); ++i) {
895 if (!MCID.OpInfo[i].isGenericType())
896 continue;
897 size_t TypeIdx = MCID.OpInfo[i].getGenericTypeIndex();
898 Types.resize(std::max(TypeIdx + 1, Types.size()));
899
900 LLT OpTy = MRI->getType(MI->getOperand(i).getReg());
901 if (Types[TypeIdx].isValid() && Types[TypeIdx] != OpTy)
902 report("type mismatch in generic instruction", MI);
903 Types[TypeIdx] = OpTy;
904 }
905 }
906
907 // Generic opcodes must not have physical register operands.
908 if (isPreISelGenericOpcode(MCID.getOpcode())) {
909 for (auto &Op : MI->operands()) {
910 if (Op.isReg() && TargetRegisterInfo::isPhysicalRegister(Op.getReg()))
911 report("Generic instruction cannot have physical register", MI);
912 }
913 }
914
915 // Generic loads and stores must have a single MachineMemOperand
916 // describing that access.
917 if ((MI->getOpcode() == TargetOpcode::G_LOAD ||
918 MI->getOpcode() == TargetOpcode::G_STORE) &&
919 !MI->hasOneMemOperand())
920 report("Generic instruction accessing memory must have one mem operand",
921 MI);
922
923 StringRef ErrorInfo;
924 if (!TII->verifyInstruction(*MI, ErrorInfo))
925 report(ErrorInfo.data(), MI);
926}
927
928void
929MachineVerifier::visitMachineOperand(const MachineOperand *MO, unsigned MONum) {
930 const MachineInstr *MI = MO->getParent();
931 const MCInstrDesc &MCID = MI->getDesc();
932 unsigned NumDefs = MCID.getNumDefs();
933 if (MCID.getOpcode() == TargetOpcode::PATCHPOINT)
934 NumDefs = (MONum == 0 && MO->isReg()) ? NumDefs : 0;
935
936 // The first MCID.NumDefs operands must be explicit register defines
937 if (MONum < NumDefs) {
938 const MCOperandInfo &MCOI = MCID.OpInfo[MONum];
939 if (!MO->isReg())
940 report("Explicit definition must be a register", MO, MONum);
941 else if (!MO->isDef() && !MCOI.isOptionalDef())
942 report("Explicit definition marked as use", MO, MONum);
943 else if (MO->isImplicit())
944 report("Explicit definition marked as implicit", MO, MONum);
945 } else if (MONum < MCID.getNumOperands()) {
946 const MCOperandInfo &MCOI = MCID.OpInfo[MONum];
947 // Don't check if it's the last operand in a variadic instruction. See,
948 // e.g., LDM_RET in the arm back end.
949 if (MO->isReg() &&
950 !(MI->isVariadic() && MONum == MCID.getNumOperands()-1)) {
951 if (MO->isDef() && !MCOI.isOptionalDef())
952 report("Explicit operand marked as def", MO, MONum);
953 if (MO->isImplicit())
954 report("Explicit operand marked as implicit", MO, MONum);
955 }
956
957 int TiedTo = MCID.getOperandConstraint(MONum, MCOI::TIED_TO);
958 if (TiedTo != -1) {
959 if (!MO->isReg())
960 report("Tied use must be a register", MO, MONum);
961 else if (!MO->isTied())
962 report("Operand should be tied", MO, MONum);
963 else if (unsigned(TiedTo) != MI->findTiedOperandIdx(MONum))
964 report("Tied def doesn't match MCInstrDesc", MO, MONum);
965 } else if (MO->isReg() && MO->isTied())
966 report("Explicit operand should not be tied", MO, MONum);
967 } else {
968 // ARM adds %reg0 operands to indicate predicates. We'll allow that.
969 if (MO->isReg() && !MO->isImplicit() && !MI->isVariadic() && MO->getReg())
970 report("Extra explicit operand on non-variadic instruction", MO, MONum);
971 }
972
973 switch (MO->getType()) {
974 case MachineOperand::MO_Register: {
975 const unsigned Reg = MO->getReg();
976 if (!Reg)
977 return;
978 if (MRI->tracksLiveness() && !MI->isDebugValue())
979 checkLiveness(MO, MONum);
980
981 // Verify the consistency of tied operands.
982 if (MO->isTied()) {
983 unsigned OtherIdx = MI->findTiedOperandIdx(MONum);
984 const MachineOperand &OtherMO = MI->getOperand(OtherIdx);
985 if (!OtherMO.isReg())
986 report("Must be tied to a register", MO, MONum);
987 if (!OtherMO.isTied())
988 report("Missing tie flags on tied operand", MO, MONum);
989 if (MI->findTiedOperandIdx(OtherIdx) != MONum)
990 report("Inconsistent tie links", MO, MONum);
991 if (MONum < MCID.getNumDefs()) {
992 if (OtherIdx < MCID.getNumOperands()) {
993 if (-1 == MCID.getOperandConstraint(OtherIdx, MCOI::TIED_TO))
994 report("Explicit def tied to explicit use without tie constraint",
995 MO, MONum);
996 } else {
997 if (!OtherMO.isImplicit())
998 report("Explicit def should be tied to implicit use", MO, MONum);
999 }
1000 }
1001 }
1002
1003 // Verify two-address constraints after leaving SSA form.
1004 unsigned DefIdx;
1005 if (!MRI->isSSA() && MO->isUse() &&
1006 MI->isRegTiedToDefOperand(MONum, &DefIdx) &&
1007 Reg != MI->getOperand(DefIdx).getReg())
1008 report("Two-address instruction operands must be identical", MO, MONum);
1009
1010 // Check register classes.
1011 if (MONum < MCID.getNumOperands() && !MO->isImplicit()) {
1012 unsigned SubIdx = MO->getSubReg();
1013
1014 if (TargetRegisterInfo::isPhysicalRegister(Reg)) {
1015 if (SubIdx) {
1016 report("Illegal subregister index for physical register", MO, MONum);
1017 return;
1018 }
1019 if (const TargetRegisterClass *DRC =
1020 TII->getRegClass(MCID, MONum, TRI, *MF)) {
1021 if (!DRC->contains(Reg)) {
1022 report("Illegal physical register for instruction", MO, MONum);
1023 errs() << TRI->getName(Reg) << " is not a "
1024 << TRI->getRegClassName(DRC) << " register.\n";
1025 }
1026 }
1027 } else {
1028 // Virtual register.
1029 const TargetRegisterClass *RC = MRI->getRegClassOrNull(Reg);
1030 if (!RC) {
1031 // This is a generic virtual register.
1032
1033 // If we're post-Select, we can't have gvregs anymore.
1034 if (isFunctionSelected) {
1035 report("Generic virtual register invalid in a Selected function",
1036 MO, MONum);
1037 return;
1038 }
1039
1040 // The gvreg must have a type and it must not have a SubIdx.
1041 LLT Ty = MRI->getType(Reg);
1042 if (!Ty.isValid()) {
1043 report("Generic virtual register must have a valid type", MO,
1044 MONum);
1045 return;
1046 }
1047
1048 const RegisterBank *RegBank = MRI->getRegBankOrNull(Reg);
1049
1050 // If we're post-RegBankSelect, the gvreg must have a bank.
1051 if (!RegBank && isFunctionRegBankSelected) {
1052 report("Generic virtual register must have a bank in a "
1053 "RegBankSelected function",
1054 MO, MONum);
1055 return;
1056 }
1057
1058 // Make sure the register fits into its register bank if any.
1059 if (RegBank && Ty.isValid() &&
1060 RegBank->getSize() < Ty.getSizeInBits()) {
1061 report("Register bank is too small for virtual register", MO,
1062 MONum);
1063 errs() << "Register bank " << RegBank->getName() << " too small("
1064 << RegBank->getSize() << ") to fit " << Ty.getSizeInBits()
1065 << "-bits\n";
1066 return;
1067 }
1068 if (SubIdx) {
1069 report("Generic virtual register does not subregister index", MO,
1070 MONum);
1071 return;
1072 }
1073
1074 // If this is a target specific instruction and this operand
1075 // has register class constraint, the virtual register must
1076 // comply to it.
1077 if (!isPreISelGenericOpcode(MCID.getOpcode()) &&
1078 TII->getRegClass(MCID, MONum, TRI, *MF)) {
1079 report("Virtual register does not match instruction constraint", MO,
1080 MONum);
1081 errs() << "Expect register class "
1082 << TRI->getRegClassName(
1083 TII->getRegClass(MCID, MONum, TRI, *MF))
1084 << " but got nothing\n";
1085 return;
1086 }
1087
1088 break;
1089 }
1090 if (SubIdx) {
1091 const TargetRegisterClass *SRC =
1092 TRI->getSubClassWithSubReg(RC, SubIdx);
1093 if (!SRC) {
1094 report("Invalid subregister index for virtual register", MO, MONum);
1095 errs() << "Register class " << TRI->getRegClassName(RC)
1096 << " does not support subreg index " << SubIdx << "\n";
1097 return;
1098 }
1099 if (RC != SRC) {
1100 report("Invalid register class for subregister index", MO, MONum);
1101 errs() << "Register class " << TRI->getRegClassName(RC)
1102 << " does not fully support subreg index " << SubIdx << "\n";
1103 return;
1104 }
1105 }
1106 if (const TargetRegisterClass *DRC =
1107 TII->getRegClass(MCID, MONum, TRI, *MF)) {
1108 if (SubIdx) {
1109 const TargetRegisterClass *SuperRC =
1110 TRI->getLargestLegalSuperClass(RC, *MF);
1111 if (!SuperRC) {
1112 report("No largest legal super class exists.", MO, MONum);
1113 return;
1114 }
1115 DRC = TRI->getMatchingSuperRegClass(SuperRC, DRC, SubIdx);
1116 if (!DRC) {
1117 report("No matching super-reg register class.", MO, MONum);
1118 return;
1119 }
1120 }
1121 if (!RC->hasSuperClassEq(DRC)) {
1122 report("Illegal virtual register for instruction", MO, MONum);
1123 errs() << "Expected a " << TRI->getRegClassName(DRC)
1124 << " register, but got a " << TRI->getRegClassName(RC)
1125 << " register\n";
1126 }
1127 }
1128 }
1129 }
1130 break;
1131 }
1132
1133 case MachineOperand::MO_RegisterMask:
1134 regMasks.push_back(MO->getRegMask());
1135 break;
1136
1137 case MachineOperand::MO_MachineBasicBlock:
1138 if (MI->isPHI() && !MO->getMBB()->isSuccessor(MI->getParent()))
1139 report("PHI operand is not in the CFG", MO, MONum);
1140 break;
1141
1142 case MachineOperand::MO_FrameIndex:
1143 if (LiveStks && LiveStks->hasInterval(MO->getIndex()) &&
1144 LiveInts && !LiveInts->isNotInMIMap(*MI)) {
1145 int FI = MO->getIndex();
1146 LiveInterval &LI = LiveStks->getInterval(FI);
1147 SlotIndex Idx = LiveInts->getInstructionIndex(*MI);
1148
1149 bool stores = MI->mayStore();
1150 bool loads = MI->mayLoad();
1151 // For a memory-to-memory move, we need to check if the frame
1152 // index is used for storing or loading, by inspecting the
1153 // memory operands.
1154 if (stores && loads) {
1155 for (auto *MMO : MI->memoperands()) {
1156 const PseudoSourceValue *PSV = MMO->getPseudoValue();
1157 if (PSV == nullptr) continue;
1158 const FixedStackPseudoSourceValue *Value =
1159 dyn_cast<FixedStackPseudoSourceValue>(PSV);
1160 if (Value == nullptr) continue;
1161 if (Value->getFrameIndex() != FI) continue;
1162
1163 if (MMO->isStore())
1164 loads = false;
1165 else
1166 stores = false;
1167 break;
1168 }
1169 if (loads == stores)
1170 report("Missing fixed stack memoperand.", MI);
1171 }
1172 if (loads && !LI.liveAt(Idx.getRegSlot(true))) {
1173 report("Instruction loads from dead spill slot", MO, MONum);
1174 errs() << "Live stack: " << LI << '\n';
1175 }
1176 if (stores && !LI.liveAt(Idx.getRegSlot())) {
1177 report("Instruction stores to dead spill slot", MO, MONum);
1178 errs() << "Live stack: " << LI << '\n';
1179 }
1180 }
1181 break;
1182
1183 default:
1184 break;
1185 }
1186}
1187
1188void MachineVerifier::checkLivenessAtUse(const MachineOperand *MO,
1189 unsigned MONum, SlotIndex UseIdx, const LiveRange &LR, unsigned VRegOrUnit,
1190 LaneBitmask LaneMask) {
1191 LiveQueryResult LRQ = LR.Query(UseIdx);
1192 // Check if we have a segment at the use, note however that we only need one
1193 // live subregister range, the others may be dead.
1194 if (!LRQ.valueIn() && LaneMask.none()) {
1195 report("No live segment at use", MO, MONum);
1196 report_context_liverange(LR);
1197 report_context_vreg_regunit(VRegOrUnit);
1198 report_context(UseIdx);
1199 }
1200 if (MO->isKill() && !LRQ.isKill()) {
1201 report("Live range continues after kill flag", MO, MONum);
1202 report_context_liverange(LR);
1203 report_context_vreg_regunit(VRegOrUnit);
1204 if (LaneMask.any())
1205 report_context_lanemask(LaneMask);
1206 report_context(UseIdx);
1207 }
1208}
1209
1210void MachineVerifier::checkLivenessAtDef(const MachineOperand *MO,
1211 unsigned MONum, SlotIndex DefIdx, const LiveRange &LR, unsigned VRegOrUnit,
1212 LaneBitmask LaneMask) {
1213 if (const VNInfo *VNI = LR.getVNInfoAt(DefIdx)) {
1214 assert(VNI && "NULL valno is not allowed")((VNI && "NULL valno is not allowed") ? static_cast<
void> (0) : __assert_fail ("VNI && \"NULL valno is not allowed\""
, "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn303373/lib/CodeGen/MachineVerifier.cpp"
, 1214, __PRETTY_FUNCTION__))
;
1215 if (VNI->def != DefIdx) {
1216 report("Inconsistent valno->def", MO, MONum);
1217 report_context_liverange(LR);
1218 report_context_vreg_regunit(VRegOrUnit);
1219 if (LaneMask.any())
1220 report_context_lanemask(LaneMask);
1221 report_context(*VNI);
1222 report_context(DefIdx);
1223 }
1224 } else {
1225 report("No live segment at def", MO, MONum);
1226 report_context_liverange(LR);
1227 report_context_vreg_regunit(VRegOrUnit);
1228 if (LaneMask.any())
1229 report_context_lanemask(LaneMask);
1230 report_context(DefIdx);
1231 }
1232 // Check that, if the dead def flag is present, LiveInts agree.
1233 if (MO->isDead()) {
1234 LiveQueryResult LRQ = LR.Query(DefIdx);
1235 if (!LRQ.isDeadDef()) {
1236 // In case of physregs we can have a non-dead definition on another
1237 // operand.
1238 bool otherDef = false;
1239 if (!TargetRegisterInfo::isVirtualRegister(VRegOrUnit)) {
1240 const MachineInstr &MI = *MO->getParent();
1241 for (const MachineOperand &MO : MI.operands()) {
1242 if (!MO.isReg() || !MO.isDef() || MO.isDead())
1243 continue;
1244 unsigned Reg = MO.getReg();
1245 for (MCRegUnitIterator Units(Reg, TRI); Units.isValid(); ++Units) {
1246 if (*Units == VRegOrUnit) {
1247 otherDef = true;
1248 break;
1249 }
1250 }
1251 }
1252 }
1253
1254 if (!otherDef) {
1255 report("Live range continues after dead def flag", MO, MONum);
1256 report_context_liverange(LR);
1257 report_context_vreg_regunit(VRegOrUnit);
1258 if (LaneMask.any())
1259 report_context_lanemask(LaneMask);
1260 }
1261 }
1262 }
1263}
1264
1265void MachineVerifier::checkLiveness(const MachineOperand *MO, unsigned MONum) {
1266 const MachineInstr *MI = MO->getParent();
1267 const unsigned Reg = MO->getReg();
1268
1269 // Both use and def operands can read a register.
1270 if (MO->readsReg()) {
1271 regsLiveInButUnused.erase(Reg);
1272
1273 if (MO->isKill())
1274 addRegWithSubRegs(regsKilled, Reg);
1275
1276 // Check that LiveVars knows this kill.
1277 if (LiveVars && TargetRegisterInfo::isVirtualRegister(Reg) &&
1278 MO->isKill()) {
1279 LiveVariables::VarInfo &VI = LiveVars->getVarInfo(Reg);
1280 if (!is_contained(VI.Kills, MI))
1281 report("Kill missing from LiveVariables", MO, MONum);
1282 }
1283
1284 // Check LiveInts liveness and kill.
1285 if (LiveInts && !LiveInts->isNotInMIMap(*MI)) {
1286 SlotIndex UseIdx = LiveInts->getInstructionIndex(*MI);
1287 // Check the cached regunit intervals.
1288 if (TargetRegisterInfo::isPhysicalRegister(Reg) && !isReserved(Reg)) {
1289 for (MCRegUnitIterator Units(Reg, TRI); Units.isValid(); ++Units) {
1290 if (const LiveRange *LR = LiveInts->getCachedRegUnit(*Units))
1291 checkLivenessAtUse(MO, MONum, UseIdx, *LR, *Units);
1292 }
1293 }
1294
1295 if (TargetRegisterInfo::isVirtualRegister(Reg)) {
1296 if (LiveInts->hasInterval(Reg)) {
1297 // This is a virtual register interval.
1298 const LiveInterval &LI = LiveInts->getInterval(Reg);
1299 checkLivenessAtUse(MO, MONum, UseIdx, LI, Reg);
1300
1301 if (LI.hasSubRanges() && !MO->isDef()) {
1302 unsigned SubRegIdx = MO->getSubReg();
1303 LaneBitmask MOMask = SubRegIdx != 0
1304 ? TRI->getSubRegIndexLaneMask(SubRegIdx)
1305 : MRI->getMaxLaneMaskForVReg(Reg);
1306 LaneBitmask LiveInMask;
1307 for (const LiveInterval::SubRange &SR : LI.subranges()) {
1308 if ((MOMask & SR.LaneMask).none())
1309 continue;
1310 checkLivenessAtUse(MO, MONum, UseIdx, SR, Reg, SR.LaneMask);
1311 LiveQueryResult LRQ = SR.Query(UseIdx);
1312 if (LRQ.valueIn())
1313 LiveInMask |= SR.LaneMask;
1314 }
1315 // At least parts of the register has to be live at the use.
1316 if ((LiveInMask & MOMask).none()) {
1317 report("No live subrange at use", MO, MONum);
1318 report_context(LI);
1319 report_context(UseIdx);
1320 }
1321 }
1322 } else {
1323 report("Virtual register has no live interval", MO, MONum);
1324 }
1325 }
1326 }
1327
1328 // Use of a dead register.
1329 if (!regsLive.count(Reg)) {
1330 if (TargetRegisterInfo::isPhysicalRegister(Reg)) {
1331 // Reserved registers may be used even when 'dead'.
1332 bool Bad = !isReserved(Reg);
1333 // We are fine if just any subregister has a defined value.
1334 if (Bad) {
1335 for (MCSubRegIterator SubRegs(Reg, TRI); SubRegs.isValid();
1336 ++SubRegs) {
1337 if (regsLive.count(*SubRegs)) {
1338 Bad = false;
1339 break;
1340 }
1341 }
1342 }
1343 // If there is an additional implicit-use of a super register we stop
1344 // here. By definition we are fine if the super register is not
1345 // (completely) dead, if the complete super register is dead we will
1346 // get a report for its operand.
1347 if (Bad) {
1348 for (const MachineOperand &MOP : MI->uses()) {
1349 if (!MOP.isReg())
1350 continue;
1351 if (!MOP.isImplicit())
1352 continue;
1353 for (MCSubRegIterator SubRegs(MOP.getReg(), TRI); SubRegs.isValid();
1354 ++SubRegs) {
1355 if (*SubRegs == Reg) {
1356 Bad = false;
1357 break;
1358 }
1359 }
1360 }
1361 }
1362 if (Bad)
1363 report("Using an undefined physical register", MO, MONum);
1364 } else if (MRI->def_empty(Reg)) {
1365 report("Reading virtual register without a def", MO, MONum);
1366 } else {
1367 BBInfo &MInfo = MBBInfoMap[MI->getParent()];
1368 // We don't know which virtual registers are live in, so only complain
1369 // if vreg was killed in this MBB. Otherwise keep track of vregs that
1370 // must be live in. PHI instructions are handled separately.
1371 if (MInfo.regsKilled.count(Reg))
1372 report("Using a killed virtual register", MO, MONum);
1373 else if (!MI->isPHI())
1374 MInfo.vregsLiveIn.insert(std::make_pair(Reg, MI));
1375 }
1376 }
1377 }
1378
1379 if (MO->isDef()) {
1380 // Register defined.
1381 // TODO: verify that earlyclobber ops are not used.
1382 if (MO->isDead())
1383 addRegWithSubRegs(regsDead, Reg);
1384 else
1385 addRegWithSubRegs(regsDefined, Reg);
1386
1387 // Verify SSA form.
1388 if (MRI->isSSA() && TargetRegisterInfo::isVirtualRegister(Reg) &&
1389 std::next(MRI->def_begin(Reg)) != MRI->def_end())
1390 report("Multiple virtual register defs in SSA form", MO, MONum);
1391
1392 // Check LiveInts for a live segment, but only for virtual registers.
1393 if (LiveInts && !LiveInts->isNotInMIMap(*MI)) {
1394 SlotIndex DefIdx = LiveInts->getInstructionIndex(*MI);
1395 DefIdx = DefIdx.getRegSlot(MO->isEarlyClobber());
1396
1397 if (TargetRegisterInfo::isVirtualRegister(Reg)) {
1398 if (LiveInts->hasInterval(Reg)) {
1399 const LiveInterval &LI = LiveInts->getInterval(Reg);
1400 checkLivenessAtDef(MO, MONum, DefIdx, LI, Reg);
1401
1402 if (LI.hasSubRanges()) {
1403 unsigned SubRegIdx = MO->getSubReg();
1404 LaneBitmask MOMask = SubRegIdx != 0
1405 ? TRI->getSubRegIndexLaneMask(SubRegIdx)
1406 : MRI->getMaxLaneMaskForVReg(Reg);
1407 for (const LiveInterval::SubRange &SR : LI.subranges()) {
1408 if ((SR.LaneMask & MOMask).none())
1409 continue;
1410 checkLivenessAtDef(MO, MONum, DefIdx, SR, Reg, SR.LaneMask);
1411 }
1412 }
1413 } else {
1414 report("Virtual register has no Live interval", MO, MONum);
1415 }
1416 }
1417 }
1418 }
1419}
1420
1421void MachineVerifier::visitMachineInstrAfter(const MachineInstr *MI) {
1422}
1423
1424// This function gets called after visiting all instructions in a bundle. The
1425// argument points to the bundle header.
1426// Normal stand-alone instructions are also considered 'bundles', and this
1427// function is called for all of them.
1428void MachineVerifier::visitMachineBundleAfter(const MachineInstr *MI) {
1429 BBInfo &MInfo = MBBInfoMap[MI->getParent()];
1430 set_union(MInfo.regsKilled, regsKilled);
1431 set_subtract(regsLive, regsKilled); regsKilled.clear();
1432 // Kill any masked registers.
1433 while (!regMasks.empty()) {
1434 const uint32_t *Mask = regMasks.pop_back_val();
1435 for (RegSet::iterator I = regsLive.begin(), E = regsLive.end(); I != E; ++I)
1436 if (TargetRegisterInfo::isPhysicalRegister(*I) &&
1437 MachineOperand::clobbersPhysReg(Mask, *I))
1438 regsDead.push_back(*I);
1439 }
1440 set_subtract(regsLive, regsDead); regsDead.clear();
1441 set_union(regsLive, regsDefined); regsDefined.clear();
1442}
1443
1444void
1445MachineVerifier::visitMachineBasicBlockAfter(const MachineBasicBlock *MBB) {
1446 MBBInfoMap[MBB].regsLiveOut = regsLive;
1447 regsLive.clear();
1448
1449 if (Indexes) {
1450 SlotIndex stop = Indexes->getMBBEndIdx(MBB);
1451 if (!(stop > lastIndex)) {
1452 report("Block ends before last instruction index", MBB);
1453 errs() << "Block ends at " << stop
1454 << " last instruction was at " << lastIndex << '\n';
1455 }
1456 lastIndex = stop;
1457 }
1458}
1459
1460// Calculate the largest possible vregsPassed sets. These are the registers that
1461// can pass through an MBB live, but may not be live every time. It is assumed
1462// that all vregsPassed sets are empty before the call.
1463void MachineVerifier::calcRegsPassed() {
1464 // First push live-out regs to successors' vregsPassed. Remember the MBBs that
1465 // have any vregsPassed.
1466 SmallPtrSet<const MachineBasicBlock*, 8> todo;
1467 for (const auto &MBB : *MF) {
1468 BBInfo &MInfo = MBBInfoMap[&MBB];
1469 if (!MInfo.reachable)
1470 continue;
1471 for (MachineBasicBlock::const_succ_iterator SuI = MBB.succ_begin(),
1472 SuE = MBB.succ_end(); SuI != SuE; ++SuI) {
1473 BBInfo &SInfo = MBBInfoMap[*SuI];
1474 if (SInfo.addPassed(MInfo.regsLiveOut))
1475 todo.insert(*SuI);
1476 }
1477 }
1478
1479 // Iteratively push vregsPassed to successors. This will converge to the same
1480 // final state regardless of DenseSet iteration order.
1481 while (!todo.empty()) {
1482 const MachineBasicBlock *MBB = *todo.begin();
1483 todo.erase(MBB);
1484 BBInfo &MInfo = MBBInfoMap[MBB];
1485 for (MachineBasicBlock::const_succ_iterator SuI = MBB->succ_begin(),
1486 SuE = MBB->succ_end(); SuI != SuE; ++SuI) {
1487 if (*SuI == MBB)
1488 continue;
1489 BBInfo &SInfo = MBBInfoMap[*SuI];
1490 if (SInfo.addPassed(MInfo.vregsPassed))
1491 todo.insert(*SuI);
1492 }
1493 }
1494}
1495
1496// Calculate the set of virtual registers that must be passed through each basic
1497// block in order to satisfy the requirements of successor blocks. This is very
1498// similar to calcRegsPassed, only backwards.
1499void MachineVerifier::calcRegsRequired() {
1500 // First push live-in regs to predecessors' vregsRequired.
1501 SmallPtrSet<const MachineBasicBlock*, 8> todo;
1502 for (const auto &MBB : *MF) {
1503 BBInfo &MInfo = MBBInfoMap[&MBB];
1504 for (MachineBasicBlock::const_pred_iterator PrI = MBB.pred_begin(),
1505 PrE = MBB.pred_end(); PrI != PrE; ++PrI) {
1506 BBInfo &PInfo = MBBInfoMap[*PrI];
1507 if (PInfo.addRequired(MInfo.vregsLiveIn))
1508 todo.insert(*PrI);
1509 }
1510 }
1511
1512 // Iteratively push vregsRequired to predecessors. This will converge to the
1513 // same final state regardless of DenseSet iteration order.
1514 while (!todo.empty()) {
1515 const MachineBasicBlock *MBB = *todo.begin();
1516 todo.erase(MBB);
1517 BBInfo &MInfo = MBBInfoMap[MBB];
1518 for (MachineBasicBlock::const_pred_iterator PrI = MBB->pred_begin(),
1519 PrE = MBB->pred_end(); PrI != PrE; ++PrI) {
1520 if (*PrI == MBB)
1521 continue;
1522 BBInfo &SInfo = MBBInfoMap[*PrI];
1523 if (SInfo.addRequired(MInfo.vregsRequired))
1524 todo.insert(*PrI);
1525 }
1526 }
1527}
1528
1529// Check PHI instructions at the beginning of MBB. It is assumed that
1530// calcRegsPassed has been run so BBInfo::isLiveOut is valid.
1531void MachineVerifier::checkPHIOps(const MachineBasicBlock *MBB) {
1532 SmallPtrSet<const MachineBasicBlock*, 8> seen;
1533 for (const auto &BBI : *MBB) {
1534 if (!BBI.isPHI())
1535 break;
1536 seen.clear();
1537
1538 for (unsigned i = 1, e = BBI.getNumOperands(); i != e; i += 2) {
1539 unsigned Reg = BBI.getOperand(i).getReg();
1540 const MachineBasicBlock *Pre = BBI.getOperand(i + 1).getMBB();
1541 if (!Pre->isSuccessor(MBB))
1542 continue;
1543 seen.insert(Pre);
1544 BBInfo &PrInfo = MBBInfoMap[Pre];
1545 if (PrInfo.reachable && !PrInfo.isLiveOut(Reg))
1546 report("PHI operand is not live-out from predecessor",
1547 &BBI.getOperand(i), i);
1548 }
1549
1550 // Did we see all predecessors?
1551 for (MachineBasicBlock::const_pred_iterator PrI = MBB->pred_begin(),
1552 PrE = MBB->pred_end(); PrI != PrE; ++PrI) {
1553 if (!seen.count(*PrI)) {
1554 report("Missing PHI operand", &BBI);
1555 errs() << "BB#" << (*PrI)->getNumber()
1556 << " is a predecessor according to the CFG.\n";
1557 }
1558 }
1559 }
1560}
1561
1562void MachineVerifier::visitMachineFunctionAfter() {
1563 calcRegsPassed();
1564
1565 for (const auto &MBB : *MF) {
1566 BBInfo &MInfo = MBBInfoMap[&MBB];
1567
1568 // Skip unreachable MBBs.
1569 if (!MInfo.reachable)
1570 continue;
1571
1572 checkPHIOps(&MBB);
1573 }
1574
1575 // Now check liveness info if available
1576 calcRegsRequired();
1577
1578 // Check for killed virtual registers that should be live out.
1579 for (const auto &MBB : *MF) {
1580 BBInfo &MInfo = MBBInfoMap[&MBB];
1581 for (RegSet::iterator
1582 I = MInfo.vregsRequired.begin(), E = MInfo.vregsRequired.end(); I != E;
1583 ++I)
1584 if (MInfo.regsKilled.count(*I)) {
1585 report("Virtual register killed in block, but needed live out.", &MBB);
1586 errs() << "Virtual register " << PrintReg(*I)
1587 << " is used after the block.\n";
1588 }
1589 }
1590
1591 if (!MF->empty()) {
1592 BBInfo &MInfo = MBBInfoMap[&MF->front()];
1593 for (RegSet::iterator
1594 I = MInfo.vregsRequired.begin(), E = MInfo.vregsRequired.end(); I != E;
1595 ++I) {
1596 report("Virtual register defs don't dominate all uses.", MF);
1597 report_context_vreg(*I);
1598 }
1599 }
1600
1601 if (LiveVars)
1602 verifyLiveVariables();
1603 if (LiveInts)
1604 verifyLiveIntervals();
1605}
1606
1607void MachineVerifier::verifyLiveVariables() {
1608 assert(LiveVars && "Don't call verifyLiveVariables without LiveVars")((LiveVars && "Don't call verifyLiveVariables without LiveVars"
) ? static_cast<void> (0) : __assert_fail ("LiveVars && \"Don't call verifyLiveVariables without LiveVars\""
, "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn303373/lib/CodeGen/MachineVerifier.cpp"
, 1608, __PRETTY_FUNCTION__))
;
1609 for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) {
1610 unsigned Reg = TargetRegisterInfo::index2VirtReg(i);
1611 LiveVariables::VarInfo &VI = LiveVars->getVarInfo(Reg);
1612 for (const auto &MBB : *MF) {
1613 BBInfo &MInfo = MBBInfoMap[&MBB];
1614
1615 // Our vregsRequired should be identical to LiveVariables' AliveBlocks
1616 if (MInfo.vregsRequired.count(Reg)) {
1617 if (!VI.AliveBlocks.test(MBB.getNumber())) {
1618 report("LiveVariables: Block missing from AliveBlocks", &MBB);
1619 errs() << "Virtual register " << PrintReg(Reg)
1620 << " must be live through the block.\n";
1621 }
1622 } else {
1623 if (VI.AliveBlocks.test(MBB.getNumber())) {
1624 report("LiveVariables: Block should not be in AliveBlocks", &MBB);
1625 errs() << "Virtual register " << PrintReg(Reg)
1626 << " is not needed live through the block.\n";
1627 }
1628 }
1629 }
1630 }
1631}
1632
1633void MachineVerifier::verifyLiveIntervals() {
1634 assert(LiveInts && "Don't call verifyLiveIntervals without LiveInts")((LiveInts && "Don't call verifyLiveIntervals without LiveInts"
) ? static_cast<void> (0) : __assert_fail ("LiveInts && \"Don't call verifyLiveIntervals without LiveInts\""
, "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn303373/lib/CodeGen/MachineVerifier.cpp"
, 1634, __PRETTY_FUNCTION__))
;
1635 for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) {
1636 unsigned Reg = TargetRegisterInfo::index2VirtReg(i);
1637
1638 // Spilling and splitting may leave unused registers around. Skip them.
1639 if (MRI->reg_nodbg_empty(Reg))
1640 continue;
1641
1642 if (!LiveInts->hasInterval(Reg)) {
1643 report("Missing live interval for virtual register", MF);
1644 errs() << PrintReg(Reg, TRI) << " still has defs or uses\n";
1645 continue;
1646 }
1647
1648 const LiveInterval &LI = LiveInts->getInterval(Reg);
1649 assert(Reg == LI.reg && "Invalid reg to interval mapping")((Reg == LI.reg && "Invalid reg to interval mapping")
? static_cast<void> (0) : __assert_fail ("Reg == LI.reg && \"Invalid reg to interval mapping\""
, "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn303373/lib/CodeGen/MachineVerifier.cpp"
, 1649, __PRETTY_FUNCTION__))
;
1650 verifyLiveInterval(LI);
1651 }
1652
1653 // Verify all the cached regunit intervals.
1654 for (unsigned i = 0, e = TRI->getNumRegUnits(); i != e; ++i)
1655 if (const LiveRange *LR = LiveInts->getCachedRegUnit(i))
1656 verifyLiveRange(*LR, i);
1657}
1658
1659void MachineVerifier::verifyLiveRangeValue(const LiveRange &LR,
1660 const VNInfo *VNI, unsigned Reg,
1661 LaneBitmask LaneMask) {
1662 if (VNI->isUnused())
1663 return;
1664
1665 const VNInfo *DefVNI = LR.getVNInfoAt(VNI->def);
1666
1667 if (!DefVNI) {
1668 report("Value not live at VNInfo def and not marked unused", MF);
1669 report_context(LR, Reg, LaneMask);
1670 report_context(*VNI);
1671 return;
1672 }
1673
1674 if (DefVNI != VNI) {
1675 report("Live segment at def has different VNInfo", MF);
1676 report_context(LR, Reg, LaneMask);
1677 report_context(*VNI);
1678 return;
1679 }
1680
1681 const MachineBasicBlock *MBB = LiveInts->getMBBFromIndex(VNI->def);
1682 if (!MBB) {
1683 report("Invalid VNInfo definition index", MF);
1684 report_context(LR, Reg, LaneMask);
1685 report_context(*VNI);
1686 return;
1687 }
1688
1689 if (VNI->isPHIDef()) {
1690 if (VNI->def != LiveInts->getMBBStartIdx(MBB)) {
1691 report("PHIDef VNInfo is not defined at MBB start", MBB);
1692 report_context(LR, Reg, LaneMask);
1693 report_context(*VNI);
1694 }
1695 return;
1696 }
1697
1698 // Non-PHI def.
1699 const MachineInstr *MI = LiveInts->getInstructionFromIndex(VNI->def);
1700 if (!MI) {
1701 report("No instruction at VNInfo def index", MBB);
1702 report_context(LR, Reg, LaneMask);
1703 report_context(*VNI);
1704 return;
1705 }
1706
1707 if (Reg != 0) {
1708 bool hasDef = false;
1709 bool isEarlyClobber = false;
1710 for (ConstMIBundleOperands MOI(*MI); MOI.isValid(); ++MOI) {
1711 if (!MOI->isReg() || !MOI->isDef())
1712 continue;
1713 if (TargetRegisterInfo::isVirtualRegister(Reg)) {
1714 if (MOI->getReg() != Reg)
1715 continue;
1716 } else {
1717 if (!TargetRegisterInfo::isPhysicalRegister(MOI->getReg()) ||
1718 !TRI->hasRegUnit(MOI->getReg(), Reg))
1719 continue;
1720 }
1721 if (LaneMask.any() &&
1722 (TRI->getSubRegIndexLaneMask(MOI->getSubReg()) & LaneMask).none())
1723 continue;
1724 hasDef = true;
1725 if (MOI->isEarlyClobber())
1726 isEarlyClobber = true;
1727 }
1728
1729 if (!hasDef) {
1730 report("Defining instruction does not modify register", MI);
1731 report_context(LR, Reg, LaneMask);
1732 report_context(*VNI);
1733 }
1734
1735 // Early clobber defs begin at USE slots, but other defs must begin at
1736 // DEF slots.
1737 if (isEarlyClobber) {
1738 if (!VNI->def.isEarlyClobber()) {
1739 report("Early clobber def must be at an early-clobber slot", MBB);
1740 report_context(LR, Reg, LaneMask);
1741 report_context(*VNI);
1742 }
1743 } else if (!VNI->def.isRegister()) {
1744 report("Non-PHI, non-early clobber def must be at a register slot", MBB);
1745 report_context(LR, Reg, LaneMask);
1746 report_context(*VNI);
1747 }
1748 }
1749}
1750
1751void MachineVerifier::verifyLiveRangeSegment(const LiveRange &LR,
1752 const LiveRange::const_iterator I,
1753 unsigned Reg, LaneBitmask LaneMask)
1754{
1755 const LiveRange::Segment &S = *I;
1756 const VNInfo *VNI = S.valno;
1757 assert(VNI && "Live segment has no valno")((VNI && "Live segment has no valno") ? static_cast<
void> (0) : __assert_fail ("VNI && \"Live segment has no valno\""
, "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn303373/lib/CodeGen/MachineVerifier.cpp"
, 1757, __PRETTY_FUNCTION__))
;
1758
1759 if (VNI->id >= LR.getNumValNums() || VNI != LR.getValNumInfo(VNI->id)) {
1
Assuming the condition is false
2
Taking false branch
1760 report("Foreign valno in live segment", MF);
1761 report_context(LR, Reg, LaneMask);
1762 report_context(S);
1763 report_context(*VNI);
1764 }
1765
1766 if (VNI->isUnused()) {
3
Taking false branch
1767 report("Live segment valno is marked unused", MF);
1768 report_context(LR, Reg, LaneMask);
1769 report_context(S);
1770 }
1771
1772 const MachineBasicBlock *MBB = LiveInts->getMBBFromIndex(S.start);
1773 if (!MBB) {
4
Taking false branch
1774 report("Bad start of live segment, no basic block", MF);
1775 report_context(LR, Reg, LaneMask);
1776 report_context(S);
1777 return;
1778 }
1779 SlotIndex MBBStartIdx = LiveInts->getMBBStartIdx(MBB);
1780 if (S.start != MBBStartIdx && S.start != VNI->def) {
1781 report("Live segment must begin at MBB entry or valno def", MBB);
1782 report_context(LR, Reg, LaneMask);
1783 report_context(S);
1784 }
1785
1786 const MachineBasicBlock *EndMBB =
1787 LiveInts->getMBBFromIndex(S.end.getPrevSlot());
1788 if (!EndMBB) {
5
Taking false branch
1789 report("Bad end of live segment, no basic block", MF);
1790 report_context(LR, Reg, LaneMask);
1791 report_context(S);
1792 return;
1793 }
1794
1795 // No more checks for live-out segments.
1796 if (S.end == LiveInts->getMBBEndIdx(EndMBB))
6
Taking false branch
1797 return;
1798
1799 // RegUnit intervals are allowed dead phis.
1800 if (!TargetRegisterInfo::isVirtualRegister(Reg) && VNI->isPHIDef() &&
7
Taking false branch
1801 S.start == VNI->def && S.end == VNI->def.getDeadSlot())
1802 return;
1803
1804 // The live segment is ending inside EndMBB
1805 const MachineInstr *MI =
1806 LiveInts->getInstructionFromIndex(S.end.getPrevSlot());
1807 if (!MI) {
8
Assuming 'MI' is non-null
9
Taking false branch
1808 report("Live segment doesn't end at a valid instruction", EndMBB);
1809 report_context(LR, Reg, LaneMask);
1810 report_context(S);
1811 return;
1812 }
1813
1814 // The block slot must refer to a basic block boundary.
1815 if (S.end.isBlock()) {
10
Taking false branch
1816 report("Live segment ends at B slot of an instruction", EndMBB);
1817 report_context(LR, Reg, LaneMask);
1818 report_context(S);
1819 }
1820
1821 if (S.end.isDead()) {
11
Taking false branch
1822 // Segment ends on the dead slot.
1823 // That means there must be a dead def.
1824 if (!SlotIndex::isSameInstr(S.start, S.end)) {
1825 report("Live segment ending at dead slot spans instructions", EndMBB);
1826 report_context(LR, Reg, LaneMask);
1827 report_context(S);
1828 }
1829 }
1830
1831 // A live segment can only end at an early-clobber slot if it is being
1832 // redefined by an early-clobber def.
1833 if (S.end.isEarlyClobber()) {
12
Taking false branch
1834 if (I+1 == LR.end() || (I+1)->start != S.end) {
1835 report("Live segment ending at early clobber slot must be "
1836 "redefined by an EC def in the same instruction", EndMBB);
1837 report_context(LR, Reg, LaneMask);
1838 report_context(S);
1839 }
1840 }
1841
1842 // The following checks only apply to virtual registers. Physreg liveness
1843 // is too weird to check.
1844 if (TargetRegisterInfo::isVirtualRegister(Reg)) {
13
Taking false branch
1845 // A live segment can end with either a redefinition, a kill flag on a
1846 // use, or a dead flag on a def.
1847 bool hasRead = false;
1848 bool hasSubRegDef = false;
1849 bool hasDeadDef = false;
1850 for (ConstMIBundleOperands MOI(*MI); MOI.isValid(); ++MOI) {
1851 if (!MOI->isReg() || MOI->getReg() != Reg)
1852 continue;
1853 unsigned Sub = MOI->getSubReg();
1854 LaneBitmask SLM = Sub != 0 ? TRI->getSubRegIndexLaneMask(Sub)
1855 : LaneBitmask::getAll();
1856 if (MOI->isDef()) {
1857 if (Sub != 0) {
1858 hasSubRegDef = true;
1859 // An operand vreg0:sub0<def> reads vreg0:sub1..n. Invert the lane
1860 // mask for subregister defs. Read-undef defs will be handled by
1861 // readsReg below.
1862 SLM = ~SLM;
1863 }
1864 if (MOI->isDead())
1865 hasDeadDef = true;
1866 }
1867 if (LaneMask.any() && (LaneMask & SLM).none())
1868 continue;
1869 if (MOI->readsReg())
1870 hasRead = true;
1871 }
1872 if (S.end.isDead()) {
1873 // Make sure that the corresponding machine operand for a "dead" live
1874 // range has the dead flag. We cannot perform this check for subregister
1875 // liveranges as partially dead values are allowed.
1876 if (LaneMask.none() && !hasDeadDef) {
1877 report("Instruction ending live segment on dead slot has no dead flag",
1878 MI);
1879 report_context(LR, Reg, LaneMask);
1880 report_context(S);
1881 }
1882 } else {
1883 if (!hasRead) {
1884 // When tracking subregister liveness, the main range must start new
1885 // values on partial register writes, even if there is no read.
1886 if (!MRI->shouldTrackSubRegLiveness(Reg) || LaneMask.any() ||
1887 !hasSubRegDef) {
1888 report("Instruction ending live segment doesn't read the register",
1889 MI);
1890 report_context(LR, Reg, LaneMask);
1891 report_context(S);
1892 }
1893 }
1894 }
1895 }
1896
1897 // Now check all the basic blocks in this live segment.
1898 MachineFunction::const_iterator MFI = MBB->getIterator();
1899 // Is this live segment the beginning of a non-PHIDef VN?
1900 if (S.start == VNI->def && !VNI->isPHIDef()) {
1901 // Not live-in to any blocks.
1902 if (MBB == EndMBB)
1903 return;
1904 // Skip this block.
1905 ++MFI;
1906 }
1907 for (;;) {
14
Loop condition is true. Entering loop body
20
Loop condition is true. Entering loop body
26
Loop condition is true. Entering loop body
32
Loop condition is true. Entering loop body
1908 assert(LiveInts->isLiveInToMBB(LR, &*MFI))((LiveInts->isLiveInToMBB(LR, &*MFI)) ? static_cast<
void> (0) : __assert_fail ("LiveInts->isLiveInToMBB(LR, &*MFI)"
, "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn303373/lib/CodeGen/MachineVerifier.cpp"
, 1908, __PRETTY_FUNCTION__))
;
1909 // We don't know how to track physregs into a landing pad.
1910 if (!TargetRegisterInfo::isVirtualRegister(Reg) &&
16
Taking false branch
22
Taking false branch
28
Taking false branch
34
Taking false branch
1911 MFI->isEHPad()) {
15
Assuming the condition is false
21
Assuming the condition is false
27
Assuming the condition is false
33
Assuming the condition is false
1912 if (&*MFI == EndMBB)
1913 break;
1914 ++MFI;
1915 continue;
1916 }
1917
1918 // Is VNI a PHI-def in the current block?
1919 bool IsPHI = VNI->isPHIDef() &&
1920 VNI->def == LiveInts->getMBBStartIdx(&*MFI);
1921
1922 // Check that VNI is live-out of all predecessors.
1923 for (MachineBasicBlock::const_pred_iterator PI = MFI->pred_begin(),
17
Loop condition is false. Execution continues on line 1950
23
Loop condition is false. Execution continues on line 1950
29
Loop condition is false. Execution continues on line 1950
35
Loop condition is true. Entering loop body
1924 PE = MFI->pred_end(); PI != PE; ++PI) {
1925 SlotIndex PEnd = LiveInts->getMBBEndIdx(*PI);
1926 const VNInfo *PVNI = LR.getVNInfoBefore(PEnd);
36
'PVNI' initialized here
1927
1928 // All predecessors must have a live-out value if this is not a
1929 // subregister liverange.
1930 if (!PVNI && LaneMask.none()) {
37
Assuming 'PVNI' is null
38
Taking false branch
1931 report("Register not marked live out of predecessor", *PI);
1932 report_context(LR, Reg, LaneMask);
1933 report_context(*VNI);
1934 errs() << " live into BB#" << MFI->getNumber()
1935 << '@' << LiveInts->getMBBStartIdx(&*MFI) << ", not live before "
1936 << PEnd << '\n';
1937 continue;
1938 }
1939
1940 // Only PHI-defs can take different predecessor values.
1941 if (!IsPHI && PVNI != VNI) {
39
Taking true branch
1942 report("Different value live out of predecessor", *PI);
1943 report_context(LR, Reg, LaneMask);
1944 errs() << "Valno #" << PVNI->id << " live out of BB#"
40
Access to field 'id' results in a dereference of a null pointer (loaded from variable 'PVNI')
1945 << (*PI)->getNumber() << '@' << PEnd << "\nValno #" << VNI->id
1946 << " live into BB#" << MFI->getNumber() << '@'
1947 << LiveInts->getMBBStartIdx(&*MFI) << '\n';
1948 }
1949 }
1950 if (&*MFI == EndMBB)
18
Assuming the condition is false
19
Taking false branch
24
Assuming the condition is false
25
Taking false branch
30
Assuming the condition is false
31
Taking false branch
1951 break;
1952 ++MFI;
1953 }
1954}
1955
1956void MachineVerifier::verifyLiveRange(const LiveRange &LR, unsigned Reg,
1957 LaneBitmask LaneMask) {
1958 for (const VNInfo *VNI : LR.valnos)
1959 verifyLiveRangeValue(LR, VNI, Reg, LaneMask);
1960
1961 for (LiveRange::const_iterator I = LR.begin(), E = LR.end(); I != E; ++I)
1962 verifyLiveRangeSegment(LR, I, Reg, LaneMask);
1963}
1964
1965void MachineVerifier::verifyLiveInterval(const LiveInterval &LI) {
1966 unsigned Reg = LI.reg;
1967 assert(TargetRegisterInfo::isVirtualRegister(Reg))((TargetRegisterInfo::isVirtualRegister(Reg)) ? static_cast<
void> (0) : __assert_fail ("TargetRegisterInfo::isVirtualRegister(Reg)"
, "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn303373/lib/CodeGen/MachineVerifier.cpp"
, 1967, __PRETTY_FUNCTION__))
;
1968 verifyLiveRange(LI, Reg);
1969
1970 LaneBitmask Mask;
1971 LaneBitmask MaxMask = MRI->getMaxLaneMaskForVReg(Reg);
1972 for (const LiveInterval::SubRange &SR : LI.subranges()) {
1973 if ((Mask & SR.LaneMask).any()) {
1974 report("Lane masks of sub ranges overlap in live interval", MF);
1975 report_context(LI);
1976 }
1977 if ((SR.LaneMask & ~MaxMask).any()) {
1978 report("Subrange lanemask is invalid", MF);
1979 report_context(LI);
1980 }
1981 if (SR.empty()) {
1982 report("Subrange must not be empty", MF);
1983 report_context(SR, LI.reg, SR.LaneMask);
1984 }
1985 Mask |= SR.LaneMask;
1986 verifyLiveRange(SR, LI.reg, SR.LaneMask);
1987 if (!LI.covers(SR)) {
1988 report("A Subrange is not covered by the main range", MF);
1989 report_context(LI);
1990 }
1991 }
1992
1993 // Check the LI only has one connected component.
1994 ConnectedVNInfoEqClasses ConEQ(*LiveInts);
1995 unsigned NumComp = ConEQ.Classify(LI);
1996 if (NumComp > 1) {
1997 report("Multiple connected components in live interval", MF);
1998 report_context(LI);
1999 for (unsigned comp = 0; comp != NumComp; ++comp) {
2000 errs() << comp << ": valnos";
2001 for (LiveInterval::const_vni_iterator I = LI.vni_begin(),
2002 E = LI.vni_end(); I!=E; ++I)
2003 if (comp == ConEQ.getEqClass(*I))
2004 errs() << ' ' << (*I)->id;
2005 errs() << '\n';
2006 }
2007 }
2008}
2009
2010namespace {
2011 // FrameSetup and FrameDestroy can have zero adjustment, so using a single
2012 // integer, we can't tell whether it is a FrameSetup or FrameDestroy if the
2013 // value is zero.
2014 // We use a bool plus an integer to capture the stack state.
2015 struct StackStateOfBB {
2016 StackStateOfBB() : EntryValue(0), ExitValue(0), EntryIsSetup(false),
2017 ExitIsSetup(false) { }
2018 StackStateOfBB(int EntryVal, int ExitVal, bool EntrySetup, bool ExitSetup) :
2019 EntryValue(EntryVal), ExitValue(ExitVal), EntryIsSetup(EntrySetup),
2020 ExitIsSetup(ExitSetup) { }
2021 // Can be negative, which means we are setting up a frame.
2022 int EntryValue;
2023 int ExitValue;
2024 bool EntryIsSetup;
2025 bool ExitIsSetup;
2026 };
2027}
2028
2029/// Make sure on every path through the CFG, a FrameSetup <n> is always followed
2030/// by a FrameDestroy <n>, stack adjustments are identical on all
2031/// CFG edges to a merge point, and frame is destroyed at end of a return block.
2032void MachineVerifier::verifyStackFrame() {
2033 unsigned FrameSetupOpcode = TII->getCallFrameSetupOpcode();
2034 unsigned FrameDestroyOpcode = TII->getCallFrameDestroyOpcode();
2035 if (FrameSetupOpcode == ~0u && FrameDestroyOpcode == ~0u)
2036 return;
2037
2038 SmallVector<StackStateOfBB, 8> SPState;
2039 SPState.resize(MF->getNumBlockIDs());
2040 df_iterator_default_set<const MachineBasicBlock*> Reachable;
2041
2042 // Visit the MBBs in DFS order.
2043 for (df_ext_iterator<const MachineFunction*,
2044 df_iterator_default_set<const MachineBasicBlock*> >
2045 DFI = df_ext_begin(MF, Reachable), DFE = df_ext_end(MF, Reachable);
2046 DFI != DFE; ++DFI) {
2047 const MachineBasicBlock *MBB = *DFI;
2048
2049 StackStateOfBB BBState;
2050 // Check the exit state of the DFS stack predecessor.
2051 if (DFI.getPathLength() >= 2) {
2052 const MachineBasicBlock *StackPred = DFI.getPath(DFI.getPathLength() - 2);
2053 assert(Reachable.count(StackPred) &&((Reachable.count(StackPred) && "DFS stack predecessor is already visited.\n"
) ? static_cast<void> (0) : __assert_fail ("Reachable.count(StackPred) && \"DFS stack predecessor is already visited.\\n\""
, "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn303373/lib/CodeGen/MachineVerifier.cpp"
, 2054, __PRETTY_FUNCTION__))
2054 "DFS stack predecessor is already visited.\n")((Reachable.count(StackPred) && "DFS stack predecessor is already visited.\n"
) ? static_cast<void> (0) : __assert_fail ("Reachable.count(StackPred) && \"DFS stack predecessor is already visited.\\n\""
, "/tmp/buildd/llvm-toolchain-snapshot-5.0~svn303373/lib/CodeGen/MachineVerifier.cpp"
, 2054, __PRETTY_FUNCTION__))
;
2055 BBState.EntryValue = SPState[StackPred->getNumber()].ExitValue;
2056 BBState.EntryIsSetup = SPState[StackPred->getNumber()].ExitIsSetup;
2057 BBState.ExitValue = BBState.EntryValue;
2058 BBState.ExitIsSetup = BBState.EntryIsSetup;
2059 }
2060
2061 // Update stack state by checking contents of MBB.
2062 for (const auto &I : *MBB) {
2063 if (I.getOpcode() == FrameSetupOpcode) {
2064 if (BBState.ExitIsSetup)
2065 report("FrameSetup is after another FrameSetup", &I);
2066 BBState.ExitValue -= TII->getFrameTotalSize(I);
2067 BBState.ExitIsSetup = true;
2068 }
2069
2070 if (I.getOpcode() == FrameDestroyOpcode) {
2071 int Size = TII->getFrameTotalSize(I);
2072 if (!BBState.ExitIsSetup)
2073 report("FrameDestroy is not after a FrameSetup", &I);
2074 int AbsSPAdj = BBState.ExitValue < 0 ? -BBState.ExitValue :
2075 BBState.ExitValue;
2076 if (BBState.ExitIsSetup && AbsSPAdj != Size) {
2077 report("FrameDestroy <n> is after FrameSetup <m>", &I);
2078 errs() << "FrameDestroy <" << Size << "> is after FrameSetup <"
2079 << AbsSPAdj << ">.\n";
2080 }
2081 BBState.ExitValue += Size;
2082 BBState.ExitIsSetup = false;
2083 }
2084 }
2085 SPState[MBB->getNumber()] = BBState;
2086
2087 // Make sure the exit state of any predecessor is consistent with the entry
2088 // state.
2089 for (MachineBasicBlock::const_pred_iterator I = MBB->pred_begin(),
2090 E = MBB->pred_end(); I != E; ++I) {
2091 if (Reachable.count(*I) &&
2092 (SPState[(*I)->getNumber()].ExitValue != BBState.EntryValue ||
2093 SPState[(*I)->getNumber()].ExitIsSetup != BBState.EntryIsSetup)) {
2094 report("The exit stack state of a predecessor is inconsistent.", MBB);
2095 errs() << "Predecessor BB#" << (*I)->getNumber() << " has exit state ("
2096 << SPState[(*I)->getNumber()].ExitValue << ", "
2097 << SPState[(*I)->getNumber()].ExitIsSetup
2098 << "), while BB#" << MBB->getNumber() << " has entry state ("
2099 << BBState.EntryValue << ", " << BBState.EntryIsSetup << ").\n";
2100 }
2101 }
2102
2103 // Make sure the entry state of any successor is consistent with the exit
2104 // state.
2105 for (MachineBasicBlock::const_succ_iterator I = MBB->succ_begin(),
2106 E = MBB->succ_end(); I != E; ++I) {
2107 if (Reachable.count(*I) &&
2108 (SPState[(*I)->getNumber()].EntryValue != BBState.ExitValue ||
2109 SPState[(*I)->getNumber()].EntryIsSetup != BBState.ExitIsSetup)) {
2110 report("The entry stack state of a successor is inconsistent.", MBB);
2111 errs() << "Successor BB#" << (*I)->getNumber() << " has entry state ("
2112 << SPState[(*I)->getNumber()].EntryValue << ", "
2113 << SPState[(*I)->getNumber()].EntryIsSetup
2114 << "), while BB#" << MBB->getNumber() << " has exit state ("
2115 << BBState.ExitValue << ", " << BBState.ExitIsSetup << ").\n";
2116 }
2117 }
2118
2119 // Make sure a basic block with return ends with zero stack adjustment.
2120 if (!MBB->empty() && MBB->back().isReturn()) {
2121 if (BBState.ExitIsSetup)
2122 report("A return block ends with a FrameSetup.", MBB);
2123 if (BBState.ExitValue)
2124 report("A return block ends with a nonzero stack adjustment.", MBB);
2125 }
2126 }
2127}