PageRenderTime 89ms CodeModel.GetById 23ms RepoModel.GetById 0ms app.codeStats 1ms

/3rd_party/llvm/lib/CodeGen/MachineVerifier.cpp

https://code.google.com/p/softart/
C++ | 1752 lines | 1372 code | 188 blank | 192 comment | 515 complexity | b136e9a50d4ff9a0a19f5df635caa79d MD5 | raw file
Possible License(s): LGPL-2.1, BSD-3-Clause, JSON, MPL-2.0-no-copyleft-exception, GPL-2.0, GPL-3.0, LGPL-3.0, BSD-2-Clause
  1. //===-- MachineVerifier.cpp - Machine Code Verifier -----------------------===//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // Pass to verify generated machine code. The following is checked:
  11. //
  12. // Operand counts: All explicit operands must be present.
  13. //
  14. // Register classes: All physical and virtual register operands must be
  15. // compatible with the register class required by the instruction descriptor.
  16. //
  17. // Register live intervals: Registers must be defined only once, and must be
  18. // defined before use.
  19. //
  20. // The machine code verifier is enabled from LLVMTargetMachine.cpp with the
  21. // command-line option -verify-machineinstrs, or by defining the environment
  22. // variable LLVM_VERIFY_MACHINEINSTRS to the name of a file that will receive
  23. // the verifier errors.
  24. //===----------------------------------------------------------------------===//
  25. #include "llvm/CodeGen/Passes.h"
  26. #include "llvm/ADT/DenseSet.h"
  27. #include "llvm/ADT/DepthFirstIterator.h"
  28. #include "llvm/ADT/SetOperations.h"
  29. #include "llvm/ADT/SmallVector.h"
  30. #include "llvm/CodeGen/LiveIntervalAnalysis.h"
  31. #include "llvm/CodeGen/LiveStackAnalysis.h"
  32. #include "llvm/CodeGen/LiveVariables.h"
  33. #include "llvm/CodeGen/MachineFrameInfo.h"
  34. #include "llvm/CodeGen/MachineFunctionPass.h"
  35. #include "llvm/CodeGen/MachineInstrBundle.h"
  36. #include "llvm/CodeGen/MachineMemOperand.h"
  37. #include "llvm/CodeGen/MachineRegisterInfo.h"
  38. #include "llvm/IR/BasicBlock.h"
  39. #include "llvm/IR/InlineAsm.h"
  40. #include "llvm/IR/Instructions.h"
  41. #include "llvm/MC/MCAsmInfo.h"
  42. #include "llvm/Support/Debug.h"
  43. #include "llvm/Support/ErrorHandling.h"
  44. #include "llvm/Support/raw_ostream.h"
  45. #include "llvm/Target/TargetInstrInfo.h"
  46. #include "llvm/Target/TargetMachine.h"
  47. #include "llvm/Target/TargetRegisterInfo.h"
  48. using namespace llvm;
  49. namespace {
  50. struct MachineVerifier {
  51. MachineVerifier(Pass *pass, const char *b) :
  52. PASS(pass),
  53. Banner(b),
  54. OutFileName(getenv("LLVM_VERIFY_MACHINEINSTRS"))
  55. {}
  56. bool runOnMachineFunction(MachineFunction &MF);
  57. Pass *const PASS;
  58. const char *Banner;
  59. const char *const OutFileName;
  60. raw_ostream *OS;
  61. const MachineFunction *MF;
  62. const TargetMachine *TM;
  63. const TargetInstrInfo *TII;
  64. const TargetRegisterInfo *TRI;
  65. const MachineRegisterInfo *MRI;
  66. unsigned foundErrors;
  67. typedef SmallVector<unsigned, 16> RegVector;
  68. typedef SmallVector<const uint32_t*, 4> RegMaskVector;
  69. typedef DenseSet<unsigned> RegSet;
  70. typedef DenseMap<unsigned, const MachineInstr*> RegMap;
  71. typedef SmallPtrSet<const MachineBasicBlock*, 8> BlockSet;
  72. const MachineInstr *FirstTerminator;
  73. BlockSet FunctionBlocks;
  74. BitVector regsReserved;
  75. RegSet regsLive;
  76. RegVector regsDefined, regsDead, regsKilled;
  77. RegMaskVector regMasks;
  78. RegSet regsLiveInButUnused;
  79. SlotIndex lastIndex;
  80. // Add Reg and any sub-registers to RV
  81. void addRegWithSubRegs(RegVector &RV, unsigned Reg) {
  82. RV.push_back(Reg);
  83. if (TargetRegisterInfo::isPhysicalRegister(Reg))
  84. for (MCSubRegIterator SubRegs(Reg, TRI); SubRegs.isValid(); ++SubRegs)
  85. RV.push_back(*SubRegs);
  86. }
  87. struct BBInfo {
  88. // Is this MBB reachable from the MF entry point?
  89. bool reachable;
  90. // Vregs that must be live in because they are used without being
  91. // defined. Map value is the user.
  92. RegMap vregsLiveIn;
  93. // Regs killed in MBB. They may be defined again, and will then be in both
  94. // regsKilled and regsLiveOut.
  95. RegSet regsKilled;
  96. // Regs defined in MBB and live out. Note that vregs passing through may
  97. // be live out without being mentioned here.
  98. RegSet regsLiveOut;
  99. // Vregs that pass through MBB untouched. This set is disjoint from
  100. // regsKilled and regsLiveOut.
  101. RegSet vregsPassed;
  102. // Vregs that must pass through MBB because they are needed by a successor
  103. // block. This set is disjoint from regsLiveOut.
  104. RegSet vregsRequired;
  105. // Set versions of block's predecessor and successor lists.
  106. BlockSet Preds, Succs;
  107. BBInfo() : reachable(false) {}
  108. // Add register to vregsPassed if it belongs there. Return true if
  109. // anything changed.
  110. bool addPassed(unsigned Reg) {
  111. if (!TargetRegisterInfo::isVirtualRegister(Reg))
  112. return false;
  113. if (regsKilled.count(Reg) || regsLiveOut.count(Reg))
  114. return false;
  115. return vregsPassed.insert(Reg).second;
  116. }
  117. // Same for a full set.
  118. bool addPassed(const RegSet &RS) {
  119. bool changed = false;
  120. for (RegSet::const_iterator I = RS.begin(), E = RS.end(); I != E; ++I)
  121. if (addPassed(*I))
  122. changed = true;
  123. return changed;
  124. }
  125. // Add register to vregsRequired if it belongs there. Return true if
  126. // anything changed.
  127. bool addRequired(unsigned Reg) {
  128. if (!TargetRegisterInfo::isVirtualRegister(Reg))
  129. return false;
  130. if (regsLiveOut.count(Reg))
  131. return false;
  132. return vregsRequired.insert(Reg).second;
  133. }
  134. // Same for a full set.
  135. bool addRequired(const RegSet &RS) {
  136. bool changed = false;
  137. for (RegSet::const_iterator I = RS.begin(), E = RS.end(); I != E; ++I)
  138. if (addRequired(*I))
  139. changed = true;
  140. return changed;
  141. }
  142. // Same for a full map.
  143. bool addRequired(const RegMap &RM) {
  144. bool changed = false;
  145. for (RegMap::const_iterator I = RM.begin(), E = RM.end(); I != E; ++I)
  146. if (addRequired(I->first))
  147. changed = true;
  148. return changed;
  149. }
  150. // Live-out registers are either in regsLiveOut or vregsPassed.
  151. bool isLiveOut(unsigned Reg) const {
  152. return regsLiveOut.count(Reg) || vregsPassed.count(Reg);
  153. }
  154. };
  155. // Extra register info per MBB.
  156. DenseMap<const MachineBasicBlock*, BBInfo> MBBInfoMap;
  157. bool isReserved(unsigned Reg) {
  158. return Reg < regsReserved.size() && regsReserved.test(Reg);
  159. }
  160. bool isAllocatable(unsigned Reg) {
  161. return Reg < TRI->getNumRegs() && MRI->isAllocatable(Reg);
  162. }
  163. // Analysis information if available
  164. LiveVariables *LiveVars;
  165. LiveIntervals *LiveInts;
  166. LiveStacks *LiveStks;
  167. SlotIndexes *Indexes;
  168. void visitMachineFunctionBefore();
  169. void visitMachineBasicBlockBefore(const MachineBasicBlock *MBB);
  170. void visitMachineBundleBefore(const MachineInstr *MI);
  171. void visitMachineInstrBefore(const MachineInstr *MI);
  172. void visitMachineOperand(const MachineOperand *MO, unsigned MONum);
  173. void visitMachineInstrAfter(const MachineInstr *MI);
  174. void visitMachineBundleAfter(const MachineInstr *MI);
  175. void visitMachineBasicBlockAfter(const MachineBasicBlock *MBB);
  176. void visitMachineFunctionAfter();
  177. void report(const char *msg, const MachineFunction *MF);
  178. void report(const char *msg, const MachineBasicBlock *MBB);
  179. void report(const char *msg, const MachineInstr *MI);
  180. void report(const char *msg, const MachineOperand *MO, unsigned MONum);
  181. void report(const char *msg, const MachineFunction *MF,
  182. const LiveInterval &LI);
  183. void report(const char *msg, const MachineBasicBlock *MBB,
  184. const LiveInterval &LI);
  185. void report(const char *msg, const MachineFunction *MF,
  186. const LiveRange &LR);
  187. void report(const char *msg, const MachineBasicBlock *MBB,
  188. const LiveRange &LR);
  189. void verifyInlineAsm(const MachineInstr *MI);
  190. void checkLiveness(const MachineOperand *MO, unsigned MONum);
  191. void markReachable(const MachineBasicBlock *MBB);
  192. void calcRegsPassed();
  193. void checkPHIOps(const MachineBasicBlock *MBB);
  194. void calcRegsRequired();
  195. void verifyLiveVariables();
  196. void verifyLiveIntervals();
  197. void verifyLiveInterval(const LiveInterval&);
  198. void verifyLiveRangeValue(const LiveRange&, const VNInfo*, unsigned);
  199. void verifyLiveRangeSegment(const LiveRange&,
  200. const LiveRange::const_iterator I, unsigned);
  201. void verifyLiveRange(const LiveRange&, unsigned);
  202. void verifyStackFrame();
  203. };
  204. struct MachineVerifierPass : public MachineFunctionPass {
  205. static char ID; // Pass ID, replacement for typeid
  206. const char *const Banner;
  207. MachineVerifierPass(const char *b = 0)
  208. : MachineFunctionPass(ID), Banner(b) {
  209. initializeMachineVerifierPassPass(*PassRegistry::getPassRegistry());
  210. }
  211. void getAnalysisUsage(AnalysisUsage &AU) const {
  212. AU.setPreservesAll();
  213. MachineFunctionPass::getAnalysisUsage(AU);
  214. }
  215. bool runOnMachineFunction(MachineFunction &MF) {
  216. MF.verify(this, Banner);
  217. return false;
  218. }
  219. };
  220. }
  221. char MachineVerifierPass::ID = 0;
  222. INITIALIZE_PASS(MachineVerifierPass, "machineverifier",
  223. "Verify generated machine code", false, false)
  224. FunctionPass *llvm::createMachineVerifierPass(const char *Banner) {
  225. return new MachineVerifierPass(Banner);
  226. }
  227. void MachineFunction::verify(Pass *p, const char *Banner) const {
  228. MachineVerifier(p, Banner)
  229. .runOnMachineFunction(const_cast<MachineFunction&>(*this));
  230. }
  231. bool MachineVerifier::runOnMachineFunction(MachineFunction &MF) {
  232. raw_ostream *OutFile = 0;
  233. if (OutFileName) {
  234. std::string ErrorInfo;
  235. OutFile = new raw_fd_ostream(OutFileName, ErrorInfo, sys::fs::F_Append);
  236. if (!ErrorInfo.empty()) {
  237. errs() << "Error opening '" << OutFileName << "': " << ErrorInfo << '\n';
  238. exit(1);
  239. }
  240. OS = OutFile;
  241. } else {
  242. OS = &errs();
  243. }
  244. foundErrors = 0;
  245. this->MF = &MF;
  246. TM = &MF.getTarget();
  247. TII = TM->getInstrInfo();
  248. TRI = TM->getRegisterInfo();
  249. MRI = &MF.getRegInfo();
  250. LiveVars = NULL;
  251. LiveInts = NULL;
  252. LiveStks = NULL;
  253. Indexes = NULL;
  254. if (PASS) {
  255. LiveInts = PASS->getAnalysisIfAvailable<LiveIntervals>();
  256. // We don't want to verify LiveVariables if LiveIntervals is available.
  257. if (!LiveInts)
  258. LiveVars = PASS->getAnalysisIfAvailable<LiveVariables>();
  259. LiveStks = PASS->getAnalysisIfAvailable<LiveStacks>();
  260. Indexes = PASS->getAnalysisIfAvailable<SlotIndexes>();
  261. }
  262. visitMachineFunctionBefore();
  263. for (MachineFunction::const_iterator MFI = MF.begin(), MFE = MF.end();
  264. MFI!=MFE; ++MFI) {
  265. visitMachineBasicBlockBefore(MFI);
  266. // Keep track of the current bundle header.
  267. const MachineInstr *CurBundle = 0;
  268. // Do we expect the next instruction to be part of the same bundle?
  269. bool InBundle = false;
  270. for (MachineBasicBlock::const_instr_iterator MBBI = MFI->instr_begin(),
  271. MBBE = MFI->instr_end(); MBBI != MBBE; ++MBBI) {
  272. if (MBBI->getParent() != MFI) {
  273. report("Bad instruction parent pointer", MFI);
  274. *OS << "Instruction: " << *MBBI;
  275. continue;
  276. }
  277. // Check for consistent bundle flags.
  278. if (InBundle && !MBBI->isBundledWithPred())
  279. report("Missing BundledPred flag, "
  280. "BundledSucc was set on predecessor", MBBI);
  281. if (!InBundle && MBBI->isBundledWithPred())
  282. report("BundledPred flag is set, "
  283. "but BundledSucc not set on predecessor", MBBI);
  284. // Is this a bundle header?
  285. if (!MBBI->isInsideBundle()) {
  286. if (CurBundle)
  287. visitMachineBundleAfter(CurBundle);
  288. CurBundle = MBBI;
  289. visitMachineBundleBefore(CurBundle);
  290. } else if (!CurBundle)
  291. report("No bundle header", MBBI);
  292. visitMachineInstrBefore(MBBI);
  293. for (unsigned I = 0, E = MBBI->getNumOperands(); I != E; ++I)
  294. visitMachineOperand(&MBBI->getOperand(I), I);
  295. visitMachineInstrAfter(MBBI);
  296. // Was this the last bundled instruction?
  297. InBundle = MBBI->isBundledWithSucc();
  298. }
  299. if (CurBundle)
  300. visitMachineBundleAfter(CurBundle);
  301. if (InBundle)
  302. report("BundledSucc flag set on last instruction in block", &MFI->back());
  303. visitMachineBasicBlockAfter(MFI);
  304. }
  305. visitMachineFunctionAfter();
  306. if (OutFile)
  307. delete OutFile;
  308. else if (foundErrors)
  309. report_fatal_error("Found "+Twine(foundErrors)+" machine code errors.");
  310. // Clean up.
  311. regsLive.clear();
  312. regsDefined.clear();
  313. regsDead.clear();
  314. regsKilled.clear();
  315. regMasks.clear();
  316. regsLiveInButUnused.clear();
  317. MBBInfoMap.clear();
  318. return false; // no changes
  319. }
  320. void MachineVerifier::report(const char *msg, const MachineFunction *MF) {
  321. assert(MF);
  322. *OS << '\n';
  323. if (!foundErrors++) {
  324. if (Banner)
  325. *OS << "# " << Banner << '\n';
  326. MF->print(*OS, Indexes);
  327. }
  328. *OS << "*** Bad machine code: " << msg << " ***\n"
  329. << "- function: " << MF->getName() << "\n";
  330. }
  331. void MachineVerifier::report(const char *msg, const MachineBasicBlock *MBB) {
  332. assert(MBB);
  333. report(msg, MBB->getParent());
  334. *OS << "- basic block: BB#" << MBB->getNumber()
  335. << ' ' << MBB->getName()
  336. << " (" << (const void*)MBB << ')';
  337. if (Indexes)
  338. *OS << " [" << Indexes->getMBBStartIdx(MBB)
  339. << ';' << Indexes->getMBBEndIdx(MBB) << ')';
  340. *OS << '\n';
  341. }
  342. void MachineVerifier::report(const char *msg, const MachineInstr *MI) {
  343. assert(MI);
  344. report(msg, MI->getParent());
  345. *OS << "- instruction: ";
  346. if (Indexes && Indexes->hasIndex(MI))
  347. *OS << Indexes->getInstructionIndex(MI) << '\t';
  348. MI->print(*OS, TM);
  349. }
  350. void MachineVerifier::report(const char *msg,
  351. const MachineOperand *MO, unsigned MONum) {
  352. assert(MO);
  353. report(msg, MO->getParent());
  354. *OS << "- operand " << MONum << ": ";
  355. MO->print(*OS, TM);
  356. *OS << "\n";
  357. }
  358. void MachineVerifier::report(const char *msg, const MachineFunction *MF,
  359. const LiveInterval &LI) {
  360. report(msg, MF);
  361. *OS << "- interval: " << LI << '\n';
  362. }
  363. void MachineVerifier::report(const char *msg, const MachineBasicBlock *MBB,
  364. const LiveInterval &LI) {
  365. report(msg, MBB);
  366. *OS << "- interval: " << LI << '\n';
  367. }
  368. void MachineVerifier::report(const char *msg, const MachineBasicBlock *MBB,
  369. const LiveRange &LR) {
  370. report(msg, MBB);
  371. *OS << "- liverange: " << LR << "\n";
  372. }
  373. void MachineVerifier::report(const char *msg, const MachineFunction *MF,
  374. const LiveRange &LR) {
  375. report(msg, MF);
  376. *OS << "- liverange: " << LR << "\n";
  377. }
  378. void MachineVerifier::markReachable(const MachineBasicBlock *MBB) {
  379. BBInfo &MInfo = MBBInfoMap[MBB];
  380. if (!MInfo.reachable) {
  381. MInfo.reachable = true;
  382. for (MachineBasicBlock::const_succ_iterator SuI = MBB->succ_begin(),
  383. SuE = MBB->succ_end(); SuI != SuE; ++SuI)
  384. markReachable(*SuI);
  385. }
  386. }
  387. void MachineVerifier::visitMachineFunctionBefore() {
  388. lastIndex = SlotIndex();
  389. regsReserved = MRI->getReservedRegs();
  390. // A sub-register of a reserved register is also reserved
  391. for (int Reg = regsReserved.find_first(); Reg>=0;
  392. Reg = regsReserved.find_next(Reg)) {
  393. for (MCSubRegIterator SubRegs(Reg, TRI); SubRegs.isValid(); ++SubRegs) {
  394. // FIXME: This should probably be:
  395. // assert(regsReserved.test(*SubRegs) && "Non-reserved sub-register");
  396. regsReserved.set(*SubRegs);
  397. }
  398. }
  399. markReachable(&MF->front());
  400. // Build a set of the basic blocks in the function.
  401. FunctionBlocks.clear();
  402. for (MachineFunction::const_iterator
  403. I = MF->begin(), E = MF->end(); I != E; ++I) {
  404. FunctionBlocks.insert(I);
  405. BBInfo &MInfo = MBBInfoMap[I];
  406. MInfo.Preds.insert(I->pred_begin(), I->pred_end());
  407. if (MInfo.Preds.size() != I->pred_size())
  408. report("MBB has duplicate entries in its predecessor list.", I);
  409. MInfo.Succs.insert(I->succ_begin(), I->succ_end());
  410. if (MInfo.Succs.size() != I->succ_size())
  411. report("MBB has duplicate entries in its successor list.", I);
  412. }
  413. // Check that the register use lists are sane.
  414. MRI->verifyUseLists();
  415. verifyStackFrame();
  416. }
  417. // Does iterator point to a and b as the first two elements?
  418. static bool matchPair(MachineBasicBlock::const_succ_iterator i,
  419. const MachineBasicBlock *a, const MachineBasicBlock *b) {
  420. if (*i == a)
  421. return *++i == b;
  422. if (*i == b)
  423. return *++i == a;
  424. return false;
  425. }
  426. void
  427. MachineVerifier::visitMachineBasicBlockBefore(const MachineBasicBlock *MBB) {
  428. FirstTerminator = 0;
  429. if (MRI->isSSA()) {
  430. // If this block has allocatable physical registers live-in, check that
  431. // it is an entry block or landing pad.
  432. for (MachineBasicBlock::livein_iterator LI = MBB->livein_begin(),
  433. LE = MBB->livein_end();
  434. LI != LE; ++LI) {
  435. unsigned reg = *LI;
  436. if (isAllocatable(reg) && !MBB->isLandingPad() &&
  437. MBB != MBB->getParent()->begin()) {
  438. report("MBB has allocable live-in, but isn't entry or landing-pad.", MBB);
  439. }
  440. }
  441. }
  442. // Count the number of landing pad successors.
  443. SmallPtrSet<MachineBasicBlock*, 4> LandingPadSuccs;
  444. for (MachineBasicBlock::const_succ_iterator I = MBB->succ_begin(),
  445. E = MBB->succ_end(); I != E; ++I) {
  446. if ((*I)->isLandingPad())
  447. LandingPadSuccs.insert(*I);
  448. if (!FunctionBlocks.count(*I))
  449. report("MBB has successor that isn't part of the function.", MBB);
  450. if (!MBBInfoMap[*I].Preds.count(MBB)) {
  451. report("Inconsistent CFG", MBB);
  452. *OS << "MBB is not in the predecessor list of the successor BB#"
  453. << (*I)->getNumber() << ".\n";
  454. }
  455. }
  456. // Check the predecessor list.
  457. for (MachineBasicBlock::const_pred_iterator I = MBB->pred_begin(),
  458. E = MBB->pred_end(); I != E; ++I) {
  459. if (!FunctionBlocks.count(*I))
  460. report("MBB has predecessor that isn't part of the function.", MBB);
  461. if (!MBBInfoMap[*I].Succs.count(MBB)) {
  462. report("Inconsistent CFG", MBB);
  463. *OS << "MBB is not in the successor list of the predecessor BB#"
  464. << (*I)->getNumber() << ".\n";
  465. }
  466. }
  467. const MCAsmInfo *AsmInfo = TM->getMCAsmInfo();
  468. const BasicBlock *BB = MBB->getBasicBlock();
  469. if (LandingPadSuccs.size() > 1 &&
  470. !(AsmInfo &&
  471. AsmInfo->getExceptionHandlingType() == ExceptionHandling::SjLj &&
  472. BB && isa<SwitchInst>(BB->getTerminator())))
  473. report("MBB has more than one landing pad successor", MBB);
  474. // Call AnalyzeBranch. If it succeeds, there several more conditions to check.
  475. MachineBasicBlock *TBB = 0, *FBB = 0;
  476. SmallVector<MachineOperand, 4> Cond;
  477. if (!TII->AnalyzeBranch(*const_cast<MachineBasicBlock *>(MBB),
  478. TBB, FBB, Cond)) {
  479. // Ok, AnalyzeBranch thinks it knows what's going on with this block. Let's
  480. // check whether its answers match up with reality.
  481. if (!TBB && !FBB) {
  482. // Block falls through to its successor.
  483. MachineFunction::const_iterator MBBI = MBB;
  484. ++MBBI;
  485. if (MBBI == MF->end()) {
  486. // It's possible that the block legitimately ends with a noreturn
  487. // call or an unreachable, in which case it won't actually fall
  488. // out the bottom of the function.
  489. } else if (MBB->succ_size() == LandingPadSuccs.size()) {
  490. // It's possible that the block legitimately ends with a noreturn
  491. // call or an unreachable, in which case it won't actuall fall
  492. // out of the block.
  493. } else if (MBB->succ_size() != 1+LandingPadSuccs.size()) {
  494. report("MBB exits via unconditional fall-through but doesn't have "
  495. "exactly one CFG successor!", MBB);
  496. } else if (!MBB->isSuccessor(MBBI)) {
  497. report("MBB exits via unconditional fall-through but its successor "
  498. "differs from its CFG successor!", MBB);
  499. }
  500. if (!MBB->empty() && getBundleStart(&MBB->back())->isBarrier() &&
  501. !TII->isPredicated(getBundleStart(&MBB->back()))) {
  502. report("MBB exits via unconditional fall-through but ends with a "
  503. "barrier instruction!", MBB);
  504. }
  505. if (!Cond.empty()) {
  506. report("MBB exits via unconditional fall-through but has a condition!",
  507. MBB);
  508. }
  509. } else if (TBB && !FBB && Cond.empty()) {
  510. // Block unconditionally branches somewhere.
  511. if (MBB->succ_size() != 1+LandingPadSuccs.size()) {
  512. report("MBB exits via unconditional branch but doesn't have "
  513. "exactly one CFG successor!", MBB);
  514. } else if (!MBB->isSuccessor(TBB)) {
  515. report("MBB exits via unconditional branch but the CFG "
  516. "successor doesn't match the actual successor!", MBB);
  517. }
  518. if (MBB->empty()) {
  519. report("MBB exits via unconditional branch but doesn't contain "
  520. "any instructions!", MBB);
  521. } else if (!getBundleStart(&MBB->back())->isBarrier()) {
  522. report("MBB exits via unconditional branch but doesn't end with a "
  523. "barrier instruction!", MBB);
  524. } else if (!getBundleStart(&MBB->back())->isTerminator()) {
  525. report("MBB exits via unconditional branch but the branch isn't a "
  526. "terminator instruction!", MBB);
  527. }
  528. } else if (TBB && !FBB && !Cond.empty()) {
  529. // Block conditionally branches somewhere, otherwise falls through.
  530. MachineFunction::const_iterator MBBI = MBB;
  531. ++MBBI;
  532. if (MBBI == MF->end()) {
  533. report("MBB conditionally falls through out of function!", MBB);
  534. } else if (MBB->succ_size() == 1) {
  535. // A conditional branch with only one successor is weird, but allowed.
  536. if (&*MBBI != TBB)
  537. report("MBB exits via conditional branch/fall-through but only has "
  538. "one CFG successor!", MBB);
  539. else if (TBB != *MBB->succ_begin())
  540. report("MBB exits via conditional branch/fall-through but the CFG "
  541. "successor don't match the actual successor!", MBB);
  542. } else if (MBB->succ_size() != 2) {
  543. report("MBB exits via conditional branch/fall-through but doesn't have "
  544. "exactly two CFG successors!", MBB);
  545. } else if (!matchPair(MBB->succ_begin(), TBB, MBBI)) {
  546. report("MBB exits via conditional branch/fall-through but the CFG "
  547. "successors don't match the actual successors!", MBB);
  548. }
  549. if (MBB->empty()) {
  550. report("MBB exits via conditional branch/fall-through but doesn't "
  551. "contain any instructions!", MBB);
  552. } else if (getBundleStart(&MBB->back())->isBarrier()) {
  553. report("MBB exits via conditional branch/fall-through but ends with a "
  554. "barrier instruction!", MBB);
  555. } else if (!getBundleStart(&MBB->back())->isTerminator()) {
  556. report("MBB exits via conditional branch/fall-through but the branch "
  557. "isn't a terminator instruction!", MBB);
  558. }
  559. } else if (TBB && FBB) {
  560. // Block conditionally branches somewhere, otherwise branches
  561. // somewhere else.
  562. if (MBB->succ_size() == 1) {
  563. // A conditional branch with only one successor is weird, but allowed.
  564. if (FBB != TBB)
  565. report("MBB exits via conditional branch/branch through but only has "
  566. "one CFG successor!", MBB);
  567. else if (TBB != *MBB->succ_begin())
  568. report("MBB exits via conditional branch/branch through but the CFG "
  569. "successor don't match the actual successor!", MBB);
  570. } else if (MBB->succ_size() != 2) {
  571. report("MBB exits via conditional branch/branch but doesn't have "
  572. "exactly two CFG successors!", MBB);
  573. } else if (!matchPair(MBB->succ_begin(), TBB, FBB)) {
  574. report("MBB exits via conditional branch/branch but the CFG "
  575. "successors don't match the actual successors!", MBB);
  576. }
  577. if (MBB->empty()) {
  578. report("MBB exits via conditional branch/branch but doesn't "
  579. "contain any instructions!", MBB);
  580. } else if (!getBundleStart(&MBB->back())->isBarrier()) {
  581. report("MBB exits via conditional branch/branch but doesn't end with a "
  582. "barrier instruction!", MBB);
  583. } else if (!getBundleStart(&MBB->back())->isTerminator()) {
  584. report("MBB exits via conditional branch/branch but the branch "
  585. "isn't a terminator instruction!", MBB);
  586. }
  587. if (Cond.empty()) {
  588. report("MBB exits via conditinal branch/branch but there's no "
  589. "condition!", MBB);
  590. }
  591. } else {
  592. report("AnalyzeBranch returned invalid data!", MBB);
  593. }
  594. }
  595. regsLive.clear();
  596. for (MachineBasicBlock::livein_iterator I = MBB->livein_begin(),
  597. E = MBB->livein_end(); I != E; ++I) {
  598. if (!TargetRegisterInfo::isPhysicalRegister(*I)) {
  599. report("MBB live-in list contains non-physical register", MBB);
  600. continue;
  601. }
  602. for (MCSubRegIterator SubRegs(*I, TRI, /*IncludeSelf=*/true);
  603. SubRegs.isValid(); ++SubRegs)
  604. regsLive.insert(*SubRegs);
  605. }
  606. regsLiveInButUnused = regsLive;
  607. const MachineFrameInfo *MFI = MF->getFrameInfo();
  608. assert(MFI && "Function has no frame info");
  609. BitVector PR = MFI->getPristineRegs(MBB);
  610. for (int I = PR.find_first(); I>0; I = PR.find_next(I)) {
  611. for (MCSubRegIterator SubRegs(I, TRI, /*IncludeSelf=*/true);
  612. SubRegs.isValid(); ++SubRegs)
  613. regsLive.insert(*SubRegs);
  614. }
  615. regsKilled.clear();
  616. regsDefined.clear();
  617. if (Indexes)
  618. lastIndex = Indexes->getMBBStartIdx(MBB);
  619. }
  620. // This function gets called for all bundle headers, including normal
  621. // stand-alone unbundled instructions.
  622. void MachineVerifier::visitMachineBundleBefore(const MachineInstr *MI) {
  623. if (Indexes && Indexes->hasIndex(MI)) {
  624. SlotIndex idx = Indexes->getInstructionIndex(MI);
  625. if (!(idx > lastIndex)) {
  626. report("Instruction index out of order", MI);
  627. *OS << "Last instruction was at " << lastIndex << '\n';
  628. }
  629. lastIndex = idx;
  630. }
  631. // Ensure non-terminators don't follow terminators.
  632. // Ignore predicated terminators formed by if conversion.
  633. // FIXME: If conversion shouldn't need to violate this rule.
  634. if (MI->isTerminator() && !TII->isPredicated(MI)) {
  635. if (!FirstTerminator)
  636. FirstTerminator = MI;
  637. } else if (FirstTerminator) {
  638. report("Non-terminator instruction after the first terminator", MI);
  639. *OS << "First terminator was:\t" << *FirstTerminator;
  640. }
  641. }
  642. // The operands on an INLINEASM instruction must follow a template.
  643. // Verify that the flag operands make sense.
  644. void MachineVerifier::verifyInlineAsm(const MachineInstr *MI) {
  645. // The first two operands on INLINEASM are the asm string and global flags.
  646. if (MI->getNumOperands() < 2) {
  647. report("Too few operands on inline asm", MI);
  648. return;
  649. }
  650. if (!MI->getOperand(0).isSymbol())
  651. report("Asm string must be an external symbol", MI);
  652. if (!MI->getOperand(1).isImm())
  653. report("Asm flags must be an immediate", MI);
  654. // Allowed flags are Extra_HasSideEffects = 1, Extra_IsAlignStack = 2,
  655. // Extra_AsmDialect = 4, Extra_MayLoad = 8, and Extra_MayStore = 16.
  656. if (!isUInt<5>(MI->getOperand(1).getImm()))
  657. report("Unknown asm flags", &MI->getOperand(1), 1);
  658. assert(InlineAsm::MIOp_FirstOperand == 2 && "Asm format changed");
  659. unsigned OpNo = InlineAsm::MIOp_FirstOperand;
  660. unsigned NumOps;
  661. for (unsigned e = MI->getNumOperands(); OpNo < e; OpNo += NumOps) {
  662. const MachineOperand &MO = MI->getOperand(OpNo);
  663. // There may be implicit ops after the fixed operands.
  664. if (!MO.isImm())
  665. break;
  666. NumOps = 1 + InlineAsm::getNumOperandRegisters(MO.getImm());
  667. }
  668. if (OpNo > MI->getNumOperands())
  669. report("Missing operands in last group", MI);
  670. // An optional MDNode follows the groups.
  671. if (OpNo < MI->getNumOperands() && MI->getOperand(OpNo).isMetadata())
  672. ++OpNo;
  673. // All trailing operands must be implicit registers.
  674. for (unsigned e = MI->getNumOperands(); OpNo < e; ++OpNo) {
  675. const MachineOperand &MO = MI->getOperand(OpNo);
  676. if (!MO.isReg() || !MO.isImplicit())
  677. report("Expected implicit register after groups", &MO, OpNo);
  678. }
  679. }
  680. void MachineVerifier::visitMachineInstrBefore(const MachineInstr *MI) {
  681. const MCInstrDesc &MCID = MI->getDesc();
  682. if (MI->getNumOperands() < MCID.getNumOperands()) {
  683. report("Too few operands", MI);
  684. *OS << MCID.getNumOperands() << " operands expected, but "
  685. << MI->getNumOperands() << " given.\n";
  686. }
  687. // Check the tied operands.
  688. if (MI->isInlineAsm())
  689. verifyInlineAsm(MI);
  690. // Check the MachineMemOperands for basic consistency.
  691. for (MachineInstr::mmo_iterator I = MI->memoperands_begin(),
  692. E = MI->memoperands_end(); I != E; ++I) {
  693. if ((*I)->isLoad() && !MI->mayLoad())
  694. report("Missing mayLoad flag", MI);
  695. if ((*I)->isStore() && !MI->mayStore())
  696. report("Missing mayStore flag", MI);
  697. }
  698. // Debug values must not have a slot index.
  699. // Other instructions must have one, unless they are inside a bundle.
  700. if (LiveInts) {
  701. bool mapped = !LiveInts->isNotInMIMap(MI);
  702. if (MI->isDebugValue()) {
  703. if (mapped)
  704. report("Debug instruction has a slot index", MI);
  705. } else if (MI->isInsideBundle()) {
  706. if (mapped)
  707. report("Instruction inside bundle has a slot index", MI);
  708. } else {
  709. if (!mapped)
  710. report("Missing slot index", MI);
  711. }
  712. }
  713. StringRef ErrorInfo;
  714. if (!TII->verifyInstruction(MI, ErrorInfo))
  715. report(ErrorInfo.data(), MI);
  716. }
  717. void
  718. MachineVerifier::visitMachineOperand(const MachineOperand *MO, unsigned MONum) {
  719. const MachineInstr *MI = MO->getParent();
  720. const MCInstrDesc &MCID = MI->getDesc();
  721. // The first MCID.NumDefs operands must be explicit register defines
  722. if (MONum < MCID.getNumDefs()) {
  723. const MCOperandInfo &MCOI = MCID.OpInfo[MONum];
  724. if (!MO->isReg())
  725. report("Explicit definition must be a register", MO, MONum);
  726. else if (!MO->isDef() && !MCOI.isOptionalDef())
  727. report("Explicit definition marked as use", MO, MONum);
  728. else if (MO->isImplicit())
  729. report("Explicit definition marked as implicit", MO, MONum);
  730. } else if (MONum < MCID.getNumOperands()) {
  731. const MCOperandInfo &MCOI = MCID.OpInfo[MONum];
  732. // Don't check if it's the last operand in a variadic instruction. See,
  733. // e.g., LDM_RET in the arm back end.
  734. if (MO->isReg() &&
  735. !(MI->isVariadic() && MONum == MCID.getNumOperands()-1)) {
  736. if (MO->isDef() && !MCOI.isOptionalDef())
  737. report("Explicit operand marked as def", MO, MONum);
  738. if (MO->isImplicit())
  739. report("Explicit operand marked as implicit", MO, MONum);
  740. }
  741. int TiedTo = MCID.getOperandConstraint(MONum, MCOI::TIED_TO);
  742. if (TiedTo != -1) {
  743. if (!MO->isReg())
  744. report("Tied use must be a register", MO, MONum);
  745. else if (!MO->isTied())
  746. report("Operand should be tied", MO, MONum);
  747. else if (unsigned(TiedTo) != MI->findTiedOperandIdx(MONum))
  748. report("Tied def doesn't match MCInstrDesc", MO, MONum);
  749. } else if (MO->isReg() && MO->isTied())
  750. report("Explicit operand should not be tied", MO, MONum);
  751. } else {
  752. // ARM adds %reg0 operands to indicate predicates. We'll allow that.
  753. if (MO->isReg() && !MO->isImplicit() && !MI->isVariadic() && MO->getReg())
  754. report("Extra explicit operand on non-variadic instruction", MO, MONum);
  755. }
  756. switch (MO->getType()) {
  757. case MachineOperand::MO_Register: {
  758. const unsigned Reg = MO->getReg();
  759. if (!Reg)
  760. return;
  761. if (MRI->tracksLiveness() && !MI->isDebugValue())
  762. checkLiveness(MO, MONum);
  763. // Verify the consistency of tied operands.
  764. if (MO->isTied()) {
  765. unsigned OtherIdx = MI->findTiedOperandIdx(MONum);
  766. const MachineOperand &OtherMO = MI->getOperand(OtherIdx);
  767. if (!OtherMO.isReg())
  768. report("Must be tied to a register", MO, MONum);
  769. if (!OtherMO.isTied())
  770. report("Missing tie flags on tied operand", MO, MONum);
  771. if (MI->findTiedOperandIdx(OtherIdx) != MONum)
  772. report("Inconsistent tie links", MO, MONum);
  773. if (MONum < MCID.getNumDefs()) {
  774. if (OtherIdx < MCID.getNumOperands()) {
  775. if (-1 == MCID.getOperandConstraint(OtherIdx, MCOI::TIED_TO))
  776. report("Explicit def tied to explicit use without tie constraint",
  777. MO, MONum);
  778. } else {
  779. if (!OtherMO.isImplicit())
  780. report("Explicit def should be tied to implicit use", MO, MONum);
  781. }
  782. }
  783. }
  784. // Verify two-address constraints after leaving SSA form.
  785. unsigned DefIdx;
  786. if (!MRI->isSSA() && MO->isUse() &&
  787. MI->isRegTiedToDefOperand(MONum, &DefIdx) &&
  788. Reg != MI->getOperand(DefIdx).getReg())
  789. report("Two-address instruction operands must be identical", MO, MONum);
  790. // Check register classes.
  791. if (MONum < MCID.getNumOperands() && !MO->isImplicit()) {
  792. unsigned SubIdx = MO->getSubReg();
  793. if (TargetRegisterInfo::isPhysicalRegister(Reg)) {
  794. if (SubIdx) {
  795. report("Illegal subregister index for physical register", MO, MONum);
  796. return;
  797. }
  798. if (const TargetRegisterClass *DRC =
  799. TII->getRegClass(MCID, MONum, TRI, *MF)) {
  800. if (!DRC->contains(Reg)) {
  801. report("Illegal physical register for instruction", MO, MONum);
  802. *OS << TRI->getName(Reg) << " is not a "
  803. << DRC->getName() << " register.\n";
  804. }
  805. }
  806. } else {
  807. // Virtual register.
  808. const TargetRegisterClass *RC = MRI->getRegClass(Reg);
  809. if (SubIdx) {
  810. const TargetRegisterClass *SRC =
  811. TRI->getSubClassWithSubReg(RC, SubIdx);
  812. if (!SRC) {
  813. report("Invalid subregister index for virtual register", MO, MONum);
  814. *OS << "Register class " << RC->getName()
  815. << " does not support subreg index " << SubIdx << "\n";
  816. return;
  817. }
  818. if (RC != SRC) {
  819. report("Invalid register class for subregister index", MO, MONum);
  820. *OS << "Register class " << RC->getName()
  821. << " does not fully support subreg index " << SubIdx << "\n";
  822. return;
  823. }
  824. }
  825. if (const TargetRegisterClass *DRC =
  826. TII->getRegClass(MCID, MONum, TRI, *MF)) {
  827. if (SubIdx) {
  828. const TargetRegisterClass *SuperRC =
  829. TRI->getLargestLegalSuperClass(RC);
  830. if (!SuperRC) {
  831. report("No largest legal super class exists.", MO, MONum);
  832. return;
  833. }
  834. DRC = TRI->getMatchingSuperRegClass(SuperRC, DRC, SubIdx);
  835. if (!DRC) {
  836. report("No matching super-reg register class.", MO, MONum);
  837. return;
  838. }
  839. }
  840. if (!RC->hasSuperClassEq(DRC)) {
  841. report("Illegal virtual register for instruction", MO, MONum);
  842. *OS << "Expected a " << DRC->getName() << " register, but got a "
  843. << RC->getName() << " register\n";
  844. }
  845. }
  846. }
  847. }
  848. break;
  849. }
  850. case MachineOperand::MO_RegisterMask:
  851. regMasks.push_back(MO->getRegMask());
  852. break;
  853. case MachineOperand::MO_MachineBasicBlock:
  854. if (MI->isPHI() && !MO->getMBB()->isSuccessor(MI->getParent()))
  855. report("PHI operand is not in the CFG", MO, MONum);
  856. break;
  857. case MachineOperand::MO_FrameIndex:
  858. if (LiveStks && LiveStks->hasInterval(MO->getIndex()) &&
  859. LiveInts && !LiveInts->isNotInMIMap(MI)) {
  860. LiveInterval &LI = LiveStks->getInterval(MO->getIndex());
  861. SlotIndex Idx = LiveInts->getInstructionIndex(MI);
  862. if (MI->mayLoad() && !LI.liveAt(Idx.getRegSlot(true))) {
  863. report("Instruction loads from dead spill slot", MO, MONum);
  864. *OS << "Live stack: " << LI << '\n';
  865. }
  866. if (MI->mayStore() && !LI.liveAt(Idx.getRegSlot())) {
  867. report("Instruction stores to dead spill slot", MO, MONum);
  868. *OS << "Live stack: " << LI << '\n';
  869. }
  870. }
  871. break;
  872. default:
  873. break;
  874. }
  875. }
  876. void MachineVerifier::checkLiveness(const MachineOperand *MO, unsigned MONum) {
  877. const MachineInstr *MI = MO->getParent();
  878. const unsigned Reg = MO->getReg();
  879. // Both use and def operands can read a register.
  880. if (MO->readsReg()) {
  881. regsLiveInButUnused.erase(Reg);
  882. if (MO->isKill())
  883. addRegWithSubRegs(regsKilled, Reg);
  884. // Check that LiveVars knows this kill.
  885. if (LiveVars && TargetRegisterInfo::isVirtualRegister(Reg) &&
  886. MO->isKill()) {
  887. LiveVariables::VarInfo &VI = LiveVars->getVarInfo(Reg);
  888. if (std::find(VI.Kills.begin(), VI.Kills.end(), MI) == VI.Kills.end())
  889. report("Kill missing from LiveVariables", MO, MONum);
  890. }
  891. // Check LiveInts liveness and kill.
  892. if (LiveInts && !LiveInts->isNotInMIMap(MI)) {
  893. SlotIndex UseIdx = LiveInts->getInstructionIndex(MI);
  894. // Check the cached regunit intervals.
  895. if (TargetRegisterInfo::isPhysicalRegister(Reg) && !isReserved(Reg)) {
  896. for (MCRegUnitIterator Units(Reg, TRI); Units.isValid(); ++Units) {
  897. if (const LiveRange *LR = LiveInts->getCachedRegUnit(*Units)) {
  898. LiveQueryResult LRQ = LR->Query(UseIdx);
  899. if (!LRQ.valueIn()) {
  900. report("No live segment at use", MO, MONum);
  901. *OS << UseIdx << " is not live in " << PrintRegUnit(*Units, TRI)
  902. << ' ' << *LR << '\n';
  903. }
  904. if (MO->isKill() && !LRQ.isKill()) {
  905. report("Live range continues after kill flag", MO, MONum);
  906. *OS << PrintRegUnit(*Units, TRI) << ' ' << *LR << '\n';
  907. }
  908. }
  909. }
  910. }
  911. if (TargetRegisterInfo::isVirtualRegister(Reg)) {
  912. if (LiveInts->hasInterval(Reg)) {
  913. // This is a virtual register interval.
  914. const LiveInterval &LI = LiveInts->getInterval(Reg);
  915. LiveQueryResult LRQ = LI.Query(UseIdx);
  916. if (!LRQ.valueIn()) {
  917. report("No live segment at use", MO, MONum);
  918. *OS << UseIdx << " is not live in " << LI << '\n';
  919. }
  920. // Check for extra kill flags.
  921. // Note that we allow missing kill flags for now.
  922. if (MO->isKill() && !LRQ.isKill()) {
  923. report("Live range continues after kill flag", MO, MONum);
  924. *OS << "Live range: " << LI << '\n';
  925. }
  926. } else {
  927. report("Virtual register has no live interval", MO, MONum);
  928. }
  929. }
  930. }
  931. // Use of a dead register.
  932. if (!regsLive.count(Reg)) {
  933. if (TargetRegisterInfo::isPhysicalRegister(Reg)) {
  934. // Reserved registers may be used even when 'dead'.
  935. if (!isReserved(Reg))
  936. report("Using an undefined physical register", MO, MONum);
  937. } else if (MRI->def_empty(Reg)) {
  938. report("Reading virtual register without a def", MO, MONum);
  939. } else {
  940. BBInfo &MInfo = MBBInfoMap[MI->getParent()];
  941. // We don't know which virtual registers are live in, so only complain
  942. // if vreg was killed in this MBB. Otherwise keep track of vregs that
  943. // must be live in. PHI instructions are handled separately.
  944. if (MInfo.regsKilled.count(Reg))
  945. report("Using a killed virtual register", MO, MONum);
  946. else if (!MI->isPHI())
  947. MInfo.vregsLiveIn.insert(std::make_pair(Reg, MI));
  948. }
  949. }
  950. }
  951. if (MO->isDef()) {
  952. // Register defined.
  953. // TODO: verify that earlyclobber ops are not used.
  954. if (MO->isDead())
  955. addRegWithSubRegs(regsDead, Reg);
  956. else
  957. addRegWithSubRegs(regsDefined, Reg);
  958. // Verify SSA form.
  959. if (MRI->isSSA() && TargetRegisterInfo::isVirtualRegister(Reg) &&
  960. llvm::next(MRI->def_begin(Reg)) != MRI->def_end())
  961. report("Multiple virtual register defs in SSA form", MO, MONum);
  962. // Check LiveInts for a live segment, but only for virtual registers.
  963. if (LiveInts && TargetRegisterInfo::isVirtualRegister(Reg) &&
  964. !LiveInts->isNotInMIMap(MI)) {
  965. SlotIndex DefIdx = LiveInts->getInstructionIndex(MI);
  966. DefIdx = DefIdx.getRegSlot(MO->isEarlyClobber());
  967. if (LiveInts->hasInterval(Reg)) {
  968. const LiveInterval &LI = LiveInts->getInterval(Reg);
  969. if (const VNInfo *VNI = LI.getVNInfoAt(DefIdx)) {
  970. assert(VNI && "NULL valno is not allowed");
  971. if (VNI->def != DefIdx) {
  972. report("Inconsistent valno->def", MO, MONum);
  973. *OS << "Valno " << VNI->id << " is not defined at "
  974. << DefIdx << " in " << LI << '\n';
  975. }
  976. } else {
  977. report("No live segment at def", MO, MONum);
  978. *OS << DefIdx << " is not live in " << LI << '\n';
  979. }
  980. // Check that, if the dead def flag is present, LiveInts agree.
  981. if (MO->isDead()) {
  982. LiveQueryResult LRQ = LI.Query(DefIdx);
  983. if (!LRQ.isDeadDef()) {
  984. report("Live range continues after dead def flag", MO, MONum);
  985. *OS << "Live range: " << LI << '\n';
  986. }
  987. }
  988. } else {
  989. report("Virtual register has no Live interval", MO, MONum);
  990. }
  991. }
  992. }
  993. }
  994. void MachineVerifier::visitMachineInstrAfter(const MachineInstr *MI) {
  995. }
  996. // This function gets called after visiting all instructions in a bundle. The
  997. // argument points to the bundle header.
  998. // Normal stand-alone instructions are also considered 'bundles', and this
  999. // function is called for all of them.
  1000. void MachineVerifier::visitMachineBundleAfter(const MachineInstr *MI) {
  1001. BBInfo &MInfo = MBBInfoMap[MI->getParent()];
  1002. set_union(MInfo.regsKilled, regsKilled);
  1003. set_subtract(regsLive, regsKilled); regsKilled.clear();
  1004. // Kill any masked registers.
  1005. while (!regMasks.empty()) {
  1006. const uint32_t *Mask = regMasks.pop_back_val();
  1007. for (RegSet::iterator I = regsLive.begin(), E = regsLive.end(); I != E; ++I)
  1008. if (TargetRegisterInfo::isPhysicalRegister(*I) &&
  1009. MachineOperand::clobbersPhysReg(Mask, *I))
  1010. regsDead.push_back(*I);
  1011. }
  1012. set_subtract(regsLive, regsDead); regsDead.clear();
  1013. set_union(regsLive, regsDefined); regsDefined.clear();
  1014. }
  1015. void
  1016. MachineVerifier::visitMachineBasicBlockAfter(const MachineBasicBlock *MBB) {
  1017. MBBInfoMap[MBB].regsLiveOut = regsLive;
  1018. regsLive.clear();
  1019. if (Indexes) {
  1020. SlotIndex stop = Indexes->getMBBEndIdx(MBB);
  1021. if (!(stop > lastIndex)) {
  1022. report("Block ends before last instruction index", MBB);
  1023. *OS << "Block ends at " << stop
  1024. << " last instruction was at " << lastIndex << '\n';
  1025. }
  1026. lastIndex = stop;
  1027. }
  1028. }
  1029. // Calculate the largest possible vregsPassed sets. These are the registers that
  1030. // can pass through an MBB live, but may not be live every time. It is assumed
  1031. // that all vregsPassed sets are empty before the call.
  1032. void MachineVerifier::calcRegsPassed() {
  1033. // First push live-out regs to successors' vregsPassed. Remember the MBBs that
  1034. // have any vregsPassed.
  1035. SmallPtrSet<const MachineBasicBlock*, 8> todo;
  1036. for (MachineFunction::const_iterator MFI = MF->begin(), MFE = MF->end();
  1037. MFI != MFE; ++MFI) {
  1038. const MachineBasicBlock &MBB(*MFI);
  1039. BBInfo &MInfo = MBBInfoMap[&MBB];
  1040. if (!MInfo.reachable)
  1041. continue;
  1042. for (MachineBasicBlock::const_succ_iterator SuI = MBB.succ_begin(),
  1043. SuE = MBB.succ_end(); SuI != SuE; ++SuI) {
  1044. BBInfo &SInfo = MBBInfoMap[*SuI];
  1045. if (SInfo.addPassed(MInfo.regsLiveOut))
  1046. todo.insert(*SuI);
  1047. }
  1048. }
  1049. // Iteratively push vregsPassed to successors. This will converge to the same
  1050. // final state regardless of DenseSet iteration order.
  1051. while (!todo.empty()) {
  1052. const MachineBasicBlock *MBB = *todo.begin();
  1053. todo.erase(MBB);
  1054. BBInfo &MInfo = MBBInfoMap[MBB];
  1055. for (MachineBasicBlock::const_succ_iterator SuI = MBB->succ_begin(),
  1056. SuE = MBB->succ_end(); SuI != SuE; ++SuI) {
  1057. if (*SuI == MBB)
  1058. continue;
  1059. BBInfo &SInfo = MBBInfoMap[*SuI];
  1060. if (SInfo.addPassed(MInfo.vregsPassed))
  1061. todo.insert(*SuI);
  1062. }
  1063. }
  1064. }
  1065. // Calculate the set of virtual registers that must be passed through each basic
  1066. // block in order to satisfy the requirements of successor blocks. This is very
  1067. // similar to calcRegsPassed, only backwards.
  1068. void MachineVerifier::calcRegsRequired() {
  1069. // First push live-in regs to predecessors' vregsRequired.
  1070. SmallPtrSet<const MachineBasicBlock*, 8> todo;
  1071. for (MachineFunction::const_iterator MFI = MF->begin(), MFE = MF->end();
  1072. MFI != MFE; ++MFI) {
  1073. const MachineBasicBlock &MBB(*MFI);
  1074. BBInfo &MInfo = MBBInfoMap[&MBB];
  1075. for (MachineBasicBlock::const_pred_iterator PrI = MBB.pred_begin(),
  1076. PrE = MBB.pred_end(); PrI != PrE; ++PrI) {
  1077. BBInfo &PInfo = MBBInfoMap[*PrI];
  1078. if (PInfo.addRequired(MInfo.vregsLiveIn))
  1079. todo.insert(*PrI);
  1080. }
  1081. }
  1082. // Iteratively push vregsRequired to predecessors. This will converge to the
  1083. // same final state regardless of DenseSet iteration order.
  1084. while (!todo.empty()) {
  1085. const MachineBasicBlock *MBB = *todo.begin();
  1086. todo.erase(MBB);
  1087. BBInfo &MInfo = MBBInfoMap[MBB];
  1088. for (MachineBasicBlock::const_pred_iterator PrI = MBB->pred_begin(),
  1089. PrE = MBB->pred_end(); PrI != PrE; ++PrI) {
  1090. if (*PrI == MBB)
  1091. continue;
  1092. BBInfo &SInfo = MBBInfoMap[*PrI];
  1093. if (SInfo.addRequired(MInfo.vregsRequired))
  1094. todo.insert(*PrI);
  1095. }
  1096. }
  1097. }
  1098. // Check PHI instructions at the beginning of MBB. It is assumed that
  1099. // calcRegsPassed has been run so BBInfo::isLiveOut is valid.
  1100. void MachineVerifier::checkPHIOps(const MachineBasicBlock *MBB) {
  1101. SmallPtrSet<const MachineBasicBlock*, 8> seen;
  1102. for (MachineBasicBlock::const_iterator BBI = MBB->begin(), BBE = MBB->end();
  1103. BBI != BBE && BBI->isPHI(); ++BBI) {
  1104. seen.clear();
  1105. for (unsigned i = 1, e = BBI->getNumOperands(); i != e; i += 2) {
  1106. unsigned Reg = BBI->getOperand(i).getReg();
  1107. const MachineBasicBlock *Pre = BBI->getOperand(i + 1).getMBB();
  1108. if (!Pre->isSuccessor(MBB))
  1109. continue;
  1110. seen.insert(Pre);
  1111. BBInfo &PrInfo = MBBInfoMap[Pre];
  1112. if (PrInfo.reachable && !PrInfo.isLiveOut(Reg))
  1113. report("PHI operand is not live-out from predecessor",
  1114. &BBI->getOperand(i), i);
  1115. }
  1116. // Did we see all predecessors?
  1117. for (MachineBasicBlock::const_pred_iterator PrI = MBB->pred_begin(),
  1118. PrE = MBB->pred_end(); PrI != PrE; ++PrI) {
  1119. if (!seen.count(*PrI)) {
  1120. report("Missing PHI operand", BBI);
  1121. *OS << "BB#" << (*PrI)->getNumber()
  1122. << " is a predecessor according to the CFG.\n";
  1123. }
  1124. }
  1125. }
  1126. }
  1127. void MachineVerifier::visitMachineFunctionAfter() {
  1128. calcRegsPassed();
  1129. for (MachineFunction::const_iterator MFI = MF->begin(), MFE = MF->end();
  1130. MFI != MFE; ++MFI) {
  1131. BBInfo &MInfo = MBBInfoMap[MFI];
  1132. // Skip unreachable MBBs.
  1133. if (!MInfo.reachable)
  1134. continue;
  1135. checkPHIOps(MFI);
  1136. }
  1137. // Now check liveness info if available
  1138. calcRegsRequired();
  1139. // Check for killed virtual registers that should be live out.
  1140. for (MachineFunction::const_iterator MFI = MF->begin(), MFE = MF->end();
  1141. MFI != MFE; ++MFI) {
  1142. BBInfo &MInfo = MBBInfoMap[MFI];
  1143. for (RegSet::iterator
  1144. I = MInfo.vregsRequired.begin(), E = MInfo.vregsRequired.end(); I != E;
  1145. ++I)
  1146. if (MInfo.regsKilled.count(*I)) {
  1147. report("Virtual register killed in block, but needed live out.", MFI);
  1148. *OS << "Virtual register " << PrintReg(*I)
  1149. << " is used after the block.\n";
  1150. }
  1151. }
  1152. if (!MF->empty()) {
  1153. BBInfo &MInfo = MBBInfoMap[&MF->front()];
  1154. for (RegSet::iterator
  1155. I = MInfo.vregsRequired.begin(), E = MInfo.vregsRequired.end(); I != E;
  1156. ++I)
  1157. report("Virtual register def doesn't dominate all uses.",
  1158. MRI->getVRegDef(*I));
  1159. }
  1160. if (LiveVars)
  1161. verifyLiveVariables();
  1162. if (LiveInts)
  1163. verifyLiveIntervals();
  1164. }
  1165. void MachineVerifier::verifyLiveVariables() {
  1166. assert(LiveVars && "Don't call verifyLiveVariables without LiveVars");
  1167. for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) {
  1168. unsigned Reg = TargetRegisterInfo::index2VirtReg(i);
  1169. LiveVariables::VarInfo &VI = LiveVars->getVarInfo(Reg);
  1170. for (MachineFunction::const_iterator MFI = MF->begin(), MFE = MF->end();
  1171. MFI != MFE; ++MFI) {
  1172. BBInfo &MInfo = MBBInfoMap[MFI];
  1173. // Our vregsRequired should be identical to LiveVariables' AliveBlocks
  1174. if (MInfo.vregsRequired.count(Reg)) {
  1175. if (!VI.AliveBlocks.test(MFI->getNumber())) {
  1176. report("LiveVariables: Block missing from AliveBlocks", MFI);
  1177. *OS << "Virtual register " << PrintReg(Reg)
  1178. << " must be live through the block.\n";
  1179. }
  1180. } else {
  1181. if (VI.AliveBlocks.test(MFI->getNumber())) {
  1182. report("LiveVariables: Block should not be in AliveBlocks", MFI);
  1183. *OS << "Virtual register " << PrintReg(Reg)
  1184. << " is not needed live through the block.\n";
  1185. }
  1186. }
  1187. }
  1188. }
  1189. }
  1190. void MachineVerifier::verifyLiveIntervals() {
  1191. assert(LiveInts && "Don't call verifyLiveIntervals without LiveInts");
  1192. for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) {
  1193. unsigned Reg = TargetRegisterInfo::index2VirtReg(i);
  1194. // Spilling and splitting may leave unused registers around. Skip them.
  1195. if (MRI->reg_nodbg_empty(Reg))
  1196. continue;
  1197. if (!LiveInts->hasInterval(Reg)) {
  1198. report("Missing live interval for virtual register", MF);
  1199. *OS << PrintReg(Reg, TRI) << " still has defs or uses\n";
  1200. continue;
  1201. }
  1202. const LiveInterval &LI = LiveInts->getInterval(Reg);
  1203. assert(Reg == LI.reg && "Invalid reg to interval mapping");
  1204. verifyLiveInterval(LI);
  1205. }
  1206. // Verify all the cached regunit intervals.
  1207. for (unsigned i = 0, e = TRI->getNumRegUnits(); i != e; ++i)
  1208. if (const LiveRange *LR = LiveInts->getCachedRegUnit(i))
  1209. verifyLiveRange(*LR, i);
  1210. }
  1211. void MachineVerifier::verifyLiveRangeValue(const LiveRange &LR,
  1212. const VNInfo *VNI,
  1213. unsigned Reg) {
  1214. if (VNI->isUnused())
  1215. return;
  1216. const VNInfo *DefVNI = LR.getVNInfoAt(VNI->def);
  1217. if (!DefVNI) {
  1218. report("Valno not live at def and not marked unused", MF, LR);
  1219. *OS << "Valno #" << VNI->id << '\n';
  1220. return;
  1221. }
  1222. if (DefVNI != VNI) {
  1223. report("Live segment at def has different valno", MF, LR);
  1224. *OS << "Valno #" << VNI->id << " is defined at " << VNI->def
  1225. << " where valno #" << DefVNI->id << " is live\n";
  1226. return;
  1227. }
  1228. const MachineBasicBlock *MBB = LiveInts->getMBBFromIndex(VNI->def);
  1229. if (!MBB) {
  1230. report("Invalid definition index", MF, LR);
  1231. *OS << "Valno #" << VNI->id << " is defined at " << VNI->def
  1232. << " in " << LR << '\n';
  1233. return;
  1234. }
  1235. if (VNI->isPHIDef()) {
  1236. if (VNI->def != LiveInts->getMBBStartIdx(MBB)) {
  1237. report("PHIDef value is not defined at MBB start", MBB, LR);
  1238. *OS << "Valno #" << VNI->id << " is defined at " << VNI->def
  1239. << ", not at the beginning of BB#" << MBB->getNumber() << '\n';
  1240. }
  1241. return;
  1242. }
  1243. // Non-PHI def.
  1244. const MachineInstr *MI = LiveInts->getInstructionFromIndex(VNI->def);
  1245. if (!MI) {
  1246. report("No instruction at def index", MBB, LR);
  1247. *OS << "Valno #" << VNI->id << " is defined at " << VNI->def << '\n';
  1248. return;
  1249. }
  1250. if (Reg != 0) {
  1251. bool hasDef = false;
  1252. bool isEarlyClobber = false;
  1253. for (ConstMIBundleOperands MOI(MI); MOI.isValid(); ++MOI) {
  1254. if (!MOI->isReg() || !MOI->isDef())
  1255. continue;
  1256. if (TargetRegisterInfo::isVirtualRegister(Reg)) {
  1257. if (MOI->getReg() != Reg)
  1258. continue;
  1259. } else {
  1260. if (!TargetRegisterInfo::isPhysicalRegister(MOI->getReg()) ||
  1261. !TRI->hasRegUnit(MOI->getReg(), Reg))
  1262. continue;
  1263. }
  1264. hasDef = true;
  1265. if (MOI->isEarlyClobber())
  1266. isEarlyClobber = true;
  1267. }
  1268. if (!hasDef) {
  1269. report("Defining instruction does not modify register", MI);
  1270. *OS << "Valno #" << VNI->id << " in " << LR << '\n';
  1271. }
  1272. // Early clobber defs begin at USE slots, but other defs must begin at
  1273. // DEF slots.
  1274. if (isEarlyClobber) {
  1275. if (!VNI->def.isEarlyClobber()) {
  1276. report("Early clobber def must be at an early-clobber slot", MBB, LR);
  1277. *OS << "Valno #" << VNI->id << " is defined at " << VNI->def << '\n';
  1278. }
  1279. } else if (!VNI->def.isRegister()) {
  1280. report("Non-PHI, non-early clobber def must be at a register slot",
  1281. MBB, LR);
  1282. *OS << "Valno #" << VNI->id << " is defined at " << VNI->def << '\n';
  1283. }
  1284. }
  1285. }
  1286. void MachineVerifier::verifyLiveRangeSegment(const LiveRange &LR,
  1287. const LiveRange::const_iterator I,
  1288. unsigned Reg) {
  1289. const LiveRange::Segment &S = *I;
  1290. const VNInfo *VNI = S.valno;
  1291. assert(VNI && "Live segment has no valno");
  1292. if (VNI->id >= LR.getNumValNums() || VNI != LR.getValNumInfo(VNI->id)) {
  1293. report("Foreign valno in live segment", MF, LR);
  1294. *OS << S << " has a bad valno\n";
  1295. }
  1296. if (VNI->isUnused()) {
  1297. report("Live segment valno is marked unused", MF, LR);
  1298. *OS << S << '\n';
  1299. }
  1300. const MachineBasicBlock *MBB = LiveInts->getMBBFromIndex(S.start);
  1301. if (!MBB) {
  1302. report("Bad start of live segment, no basic block", MF, LR);
  1303. *OS << S << '\n';
  1304. return;
  1305. }
  1306. SlotIndex MBBStartIdx = LiveInts->getMBBStartIdx(MBB);
  1307. if (S.start != MBBStartIdx && S.start != VNI->def) {
  1308. report("Live segment must begin at MBB entry or valno def", MBB, LR);
  1309. *OS << S << '\n';
  1310. }
  1311. const MachineBasicBlock *EndMBB =
  1312. LiveInts->getMBBFromIndex(S.end.getPrevSlot());
  1313. if (!EndMBB) {
  1314. report("Bad end of live segment, no basic block", MF, LR);
  1315. *OS << S << '\n';
  1316. return;
  1317. }
  1318. // No more checks for live-out segments.
  1319. if (S.end == LiveInts->getMBBEndIdx(EndMBB))
  1320. return;
  1321. // RegUnit intervals are allowed dead phis.
  1322. if (!TargetRegisterInfo::isVirtualRegister(Reg) && VNI->isPHIDef() &&
  1323. S.start == VNI->def && S.end == VNI->def.getDeadSlot())
  1324. return;
  1325. // The live segment is ending inside EndMBB
  1326. const MachineInstr *MI =
  1327. LiveInts->getInstructionFromIndex(S.end.getPrevSlot());
  1328. if (!MI) {
  1329. report("Live segment doesn't end at a valid instruction", EndMBB, LR);
  1330. *OS << S << '\n';
  1331. return;
  1332. }
  1333. // The block slot must refer to a basic block boundary.
  1334. if (S.end.isBlock()) {
  1335. report("Live segment ends at B slot of an instruction", EndMBB, LR);
  1336. *OS << S << '\n';
  1337. }
  1338. if (S.end.isDead()) {
  1339. // Segment ends on the dead slot.
  1340. // That means there must be a dead def.
  1341. if (!SlotIndex::isSameInstr(S.start, S.end)) {
  1342. report("Live segment ending at dead slot spans instructions", EndMBB, LR);
  1343. *OS << S << '\n';
  1344. }
  1345. }
  1346. // A live segment can only end at an early-clobber slot if it is being
  1347. // redefined by an early-clobber def.
  1348. if (S.end.isEarlyClobber()) {
  1349. if (I+1 == LR.end() || (I+1)->start != S.end) {
  1350. report("Live segment ending at early clobber slot must be "
  1351. "redefined by an EC def in the same instruction", EndMBB, LR);
  1352. *OS << S << '\n';
  1353. }
  1354. }
  1355. // The following checks only apply to virtual registers. Physreg liveness
  1356. // is too weird to check.
  1357. if (TargetRegisterInfo::isVirtualRegister(Reg)) {
  1358. // A live segment can end with either a redefinition, a kill flag on a
  1359. // use, or a dead flag on a def.
  1360. bool hasRead = false;
  1361. for (ConstMIBundleOperands MOI(MI); MOI.isValid(); ++MOI) {
  1362. if (!MOI->isReg() || MOI->getReg() != Reg)
  1363. continue;
  1364. if (MOI->readsReg())
  1365. hasRead = true;
  1366. }
  1367. if (!S.end.isDead()) {
  1368. if (!hasRead) {
  1369. report("Instruction ending live segment doesn't read the register", MI);
  1370. *OS << S << " in " << LR << '\n';
  1371. }
  1372. }
  1373. }
  1374. // Now check all the basic blocks in this live segment.
  1375. MachineFunction::const_iterator MFI = MBB;
  1376. // Is this live segment the beginning of a non-PHIDef VN?
  1377. if (S.start == VNI->def && !VNI->isPHIDef()) {
  1378. // Not live-in to any blocks.
  1379. if (MBB == EndMBB)
  1380. return;
  1381. // Skip this block.
  1382. ++MFI;
  1383. }
  1384. for (;;) {
  1385. assert(LiveInts->isLiveInToMBB(LR, MFI));
  1386. // We don't know how to track physregs into a landing pad.
  1387. if (!TargetRegisterInfo::isVirtualRegister(Reg) &&
  1388. MFI->isLandingPad()) {
  1389. if (&*MFI == EndMBB)
  1390. break;
  1391. ++MFI;
  1392. continue;
  1393. }
  1394. // Is VNI a PHI-def in the current block?
  1395. bool IsPHI = VNI->isPHIDef() &&
  1396. VNI->def == LiveInts->getMBBStartIdx(MFI);
  1397. // Check that VNI is live-out of all predecessors.
  1398. for (MachineBasicBlock::const_pred_iterator PI = MFI->pred_begin(),
  1399. PE = MFI->pred_end(); PI != PE; ++PI) {
  1400. SlotIndex PEnd = LiveInts->getMBBEndIdx(*PI);
  1401. const VNInfo *PVNI = LR.getVNInfoBefore(PEnd);
  1402. // All predecessors must have a live-out value.
  1403. if (!PVNI) {
  1404. report("Register not marked live out of predecessor", *PI, LR);
  1405. *OS << "Valno #" << VNI->id << " live into BB#" << MFI->getNumber()
  1406. << '@' << LiveInts->getMBBStartIdx(MFI) << ", not live before "
  1407. << PEnd << '\n';
  1408. continue;
  1409. }
  1410. // Only PHI-defs can take different predecessor values.
  1411. if (!IsPHI && PVNI != VNI) {
  1412. report("Different value live out of predecessor", *PI, LR);
  1413. *OS << "Valno #" << PVNI->id << " live out of BB#"
  1414. << (*PI)->getNumber() << '@' << PEnd
  1415. << "\nValno #" << VNI->id << " live into BB#" << MFI->getNumber()
  1416. << '@' << LiveInts->getMBBStartIdx(MFI) << '\n';
  1417. }
  1418. }
  1419. if (&*MFI == EndMBB)
  1420. break;
  1421. ++MFI;
  1422. }
  1423. }
  1424. void MachineVerifier::verifyLiveRange(const LiveRange &LR, unsigned Reg) {
  1425. for (LiveRange::const_vni_iterator I = LR.vni_begin(), E = LR.vni_end();
  1426. I != E; ++I)
  1427. verifyLiveRangeValue(LR, *I, Reg);
  1428. for (LiveRange::const_iterator I = LR.begin(), E = LR.end(); I != E; ++I)
  1429. verifyLiveRangeSegment(LR, I, Reg);
  1430. }
  1431. void MachineVerifier::verifyLiveInterval(const LiveInterval &LI) {
  1432. verifyLiveRange(LI, LI.reg);
  1433. // Check the LI only has one connected component.
  1434. if (TargetRegisterInfo::isVirtualRegister(LI.reg)) {
  1435. ConnectedVNInfoEqClasses ConEQ(*LiveInts);
  1436. unsigned NumComp = ConEQ.Classify(&LI);
  1437. if (NumComp > 1) {
  1438. report("Multiple connected components in live interval", MF, LI);
  1439. for (unsigned comp = 0; comp != NumComp; ++comp) {
  1440. *OS << comp << ": valnos";
  1441. for (LiveInterval::const_vni_iterator I = LI.vni_begin(),
  1442. E = LI.vni_end(); I!=E; ++I)
  1443. if (comp == ConEQ.getEqClass(*I))
  1444. *OS << ' ' << (*I)->id;
  1445. *OS << '\n';
  1446. }
  1447. }
  1448. }
  1449. }
  1450. namespace {
  1451. // FrameSetup and FrameDestroy can have zero adjustment, so using a single
  1452. // integer, we can't tell whether it is a FrameSetup or FrameDestroy if the
  1453. // value is zero.
  1454. // We use a bool plus an integer to capture the stack state.
  1455. struct StackStateOfBB {
  1456. StackStateOfBB() : EntryValue(0), ExitValue(0), EntryIsSetup(false),
  1457. ExitIsSetup(false) { }
  1458. StackStateOfBB(int EntryVal, int ExitVal, bool EntrySetup, bool ExitSetup) :
  1459. EntryValue(EntryVal), ExitValue(ExitVal), EntryIsSetup(EntrySetup),
  1460. ExitIsSetup(ExitSetup) { }
  1461. // Can be negative, which means we are setting up a frame.
  1462. int EntryValue;
  1463. int ExitValue;
  1464. bool EntryIsSetup;
  1465. bool ExitIsSetup;
  1466. };
  1467. }
  1468. /// Make sure on every path through the CFG, a FrameSetup <n> is always followed
  1469. /// by a FrameDestroy <n>, stack adjustments are identical on all
  1470. /// CFG edges to a merge point, and frame is destroyed at end of a return block.
  1471. void MachineVerifier::verifyStackFrame() {
  1472. int FrameSetupOpcode = TII->getCallFrameSetupOpcode();
  1473. int FrameDestroyOpcode = TII->getCallFrameDestroyOpcode();
  1474. SmallVector<StackStateOfBB, 8> SPState;
  1475. SPState.resize(MF->getNumBlockIDs());
  1476. SmallPtrSet<const MachineBasicBlock*, 8> Reachable;
  1477. // Visit the MBBs in DFS order.
  1478. for (df_ext_iterator<const MachineFunction*,
  1479. SmallPtrSet<const MachineBasicBlock*, 8> >
  1480. DFI = df_ext_begin(MF, Reachable), DFE = df_ext_end(MF, Reachable);
  1481. DFI != DFE; ++DFI) {
  1482. const MachineBasicBlock *MBB = *DFI;
  1483. StackStateOfBB BBState;
  1484. // Check the exit state of the DFS stack predecessor.
  1485. if (DFI.getPathLength() >= 2) {
  1486. const MachineBasicBlock *StackPred = DFI.getPath(DFI.getPathLength() - 2);
  1487. assert(Reachable.count(StackPred) &&
  1488. "DFS stack predecessor is already visited.\n");
  1489. BBState.EntryValue = SPState[StackPred->getNumber()].ExitValue;
  1490. BBState.EntryIsSetup = SPState[StackPred->getNumber()].ExitIsSetup;
  1491. BBState.ExitValue = BBState.EntryValue;
  1492. BBState.ExitIsSetup = BBState.EntryIsSetup;
  1493. }
  1494. // Update stack state by checking contents of MBB.
  1495. for (MachineBasicBlock::const_iterator I = MBB->begin(), E = MBB->end();
  1496. I != E; ++I) {
  1497. if (I->getOpcode() == FrameSetupOpcode) {
  1498. // The first operand of a FrameOpcode should be i32.
  1499. int Size = I->getOperand(0).getImm();
  1500. assert(Size >= 0 &&
  1501. "Value should be non-negative in FrameSetup and FrameDestroy.\n");
  1502. if (BBState.ExitIsSetup)
  1503. report("FrameSetup is after another FrameSetup", I);
  1504. BBState.ExitValue -= Size;
  1505. BBState.ExitIsSetup = true;
  1506. }
  1507. if (I->getOpcode() == FrameDestroyOpcode) {
  1508. // The first operand of a FrameOpcode should be i32.
  1509. int Size = I->getOperand(0).getImm();
  1510. assert(Size >= 0 &&
  1511. "Value should be non-negative in FrameSetup and FrameDestroy.\n");
  1512. if (!BBState.ExitIsSetup)
  1513. report("FrameDestroy is not after a FrameSetup", I);
  1514. int AbsSPAdj = BBState.ExitValue < 0 ? -BBState.ExitValue :
  1515. BBState.ExitValue;
  1516. if (BBState.ExitIsSetup && AbsSPAdj != Size) {
  1517. report("FrameDestroy <n> is after FrameSetup <m>", I);
  1518. *OS << "FrameDestroy <" << Size << "> is after FrameSetup <"
  1519. << AbsSPAdj << ">.\n";
  1520. }
  1521. BBState.ExitValue += Size;
  1522. BBState.ExitIsSetup = false;
  1523. }
  1524. }
  1525. SPState[MBB->getNumber()] = BBState;
  1526. // Make sure the exit state of any predecessor is consistent with the entry
  1527. // state.
  1528. for (MachineBasicBlock::const_pred_iterator I = MBB->pred_begin(),
  1529. E = MBB->pred_end(); I != E; ++I) {
  1530. if (Reachable.count(*I) &&
  1531. (SPState[(*I)->getNumber()].ExitValue != BBState.EntryValue ||
  1532. SPState[(*I)->getNumber()].ExitIsSetup != BBState.EntryIsSetup)) {
  1533. report("The exit stack state of a predecessor is inconsistent.", MBB);
  1534. *OS << "Predecessor BB#" << (*I)->getNumber() << " has exit state ("
  1535. << SPState[(*I)->getNumber()].ExitValue << ", "
  1536. << SPState[(*I)->getNumber()].ExitIsSetup
  1537. << "), while BB#" << MBB->getNumber() << " has entry state ("
  1538. << BBState.EntryValue << ", " << BBState.EntryIsSetup << ").\n";
  1539. }
  1540. }
  1541. // Make sure the entry state of any successor is consistent with the exit
  1542. // state.
  1543. for (MachineBasicBlock::const_succ_iterator I = MBB->succ_begin(),
  1544. E = MBB->succ_end(); I != E; ++I) {
  1545. if (Reachable.count(*I) &&
  1546. (SPState[(*I)->getNumber()].EntryValue != BBState.ExitValue ||
  1547. SPState[(*I)->getNumber()].EntryIsSetup != BBState.ExitIsSetup)) {
  1548. report("The entry stack state of a successor is inconsistent.", MBB);
  1549. *OS << "Successor BB#" << (*I)->getNumber() << " has entry state ("
  1550. << SPState[(*I)->getNumber()].EntryValue << ", "
  1551. << SPState[(*I)->getNumber()].EntryIsSetup
  1552. << "), while BB#" << MBB->getNumber() << " has exit state ("
  1553. << BBState.ExitValue << ", " << BBState.ExitIsSetup << ").\n";
  1554. }
  1555. }
  1556. // Make sure a basic block with return ends with zero stack adjustment.
  1557. if (!MBB->empty() && MBB->back().isReturn()) {
  1558. if (BBState.ExitIsSetup)
  1559. report("A return block ends with a FrameSetup.", MBB);
  1560. if (BBState.ExitValue)
  1561. report("A return block ends with a nonzero stack adjustment.", MBB);
  1562. }
  1563. }
  1564. }