LCOV - code coverage report
Current view: top level - lib/Analysis - CaptureTracking.cpp (source / functions) Hit Total Coverage
Test: llvm-toolchain.info Lines: 93 95 97.9 %
Date: 2018-05-20 00:06:23 Functions: 12 17 70.6 %
Legend: Lines: hit not hit

          Line data    Source code
       1             : //===--- CaptureTracking.cpp - Determine whether a pointer is captured ----===//
       2             : //
       3             : //                     The LLVM Compiler Infrastructure
       4             : //
       5             : // This file is distributed under the University of Illinois Open Source
       6             : // License. See LICENSE.TXT for details.
       7             : //
       8             : //===----------------------------------------------------------------------===//
       9             : //
      10             : // This file contains routines that help determine which pointers are captured.
      11             : // A pointer value is captured if the function makes a copy of any part of the
      12             : // pointer that outlives the call.  Not being captured means, more or less, that
      13             : // the pointer is only dereferenced and not stored in a global.  Returning part
      14             : // of the pointer as the function return value may or may not count as capturing
      15             : // the pointer, depending on the context.
      16             : //
      17             : //===----------------------------------------------------------------------===//
      18             : 
      19             : #include "llvm/Analysis/CaptureTracking.h"
      20             : #include "llvm/ADT/SmallSet.h"
      21             : #include "llvm/ADT/SmallVector.h"
      22             : #include "llvm/Analysis/AliasAnalysis.h"
      23             : #include "llvm/Analysis/CFG.h"
      24             : #include "llvm/Analysis/OrderedBasicBlock.h"
      25             : #include "llvm/IR/CallSite.h"
      26             : #include "llvm/IR/Constants.h"
      27             : #include "llvm/IR/Dominators.h"
      28             : #include "llvm/IR/Instructions.h"
      29             : #include "llvm/IR/IntrinsicInst.h"
      30             : 
      31             : using namespace llvm;
      32             : 
      33      906308 : CaptureTracker::~CaptureTracker() {}
      34             : 
      35     7098093 : bool CaptureTracker::shouldExplore(const Use *U) { return true; }
      36             : 
      37             : namespace {
      38      797094 :   struct SimpleCaptureTracker : public CaptureTracker {
      39             :     explicit SimpleCaptureTracker(bool ReturnCaptures)
      40      797094 :       : ReturnCaptures(ReturnCaptures), Captured(false) {}
      41             : 
      42       20036 :     void tooManyUses() override { Captured = true; }
      43             : 
      44      744078 :     bool captured(const Use *U) override {
      45      744517 :       if (isa<ReturnInst>(U->getUser()) && !ReturnCaptures)
      46             :         return false;
      47             : 
      48      743666 :       Captured = true;
      49      743666 :       return true;
      50             :     }
      51             : 
      52             :     bool ReturnCaptures;
      53             : 
      54             :     bool Captured;
      55             :   };
      56             : 
      57             :   /// Only find pointer captures which happen before the given instruction. Uses
      58             :   /// the dominator tree to determine whether one instruction is before another.
      59             :   /// Only support the case where the Value is defined in the same basic block
      60             :   /// as the given instruction and the use.
      61      104473 :   struct CapturesBefore : public CaptureTracker {
      62             : 
      63             :     CapturesBefore(bool ReturnCaptures, const Instruction *I, const DominatorTree *DT,
      64             :                    bool IncludeI, OrderedBasicBlock *IC)
      65      104473 :       : OrderedBB(IC), BeforeHere(I), DT(DT),
      66      104473 :         ReturnCaptures(ReturnCaptures), IncludeI(IncludeI), Captured(false) {}
      67             : 
      68        1576 :     void tooManyUses() override { Captured = true; }
      69             : 
      70      646190 :     bool isSafeToPrune(Instruction *I) {
      71      646190 :       BasicBlock *BB = I->getParent();
      72             :       // We explore this usage only if the usage can reach "BeforeHere".
      73             :       // If use is not reachable from entry, there is no need to explore.
      74      646190 :       if (BeforeHere != I && !DT->isReachableFromEntry(BB))
      75             :         return true;
      76             : 
      77             :       // Compute the case where both instructions are inside the same basic
      78             :       // block. Since instructions in the same BB as BeforeHere are numbered in
      79             :       // 'OrderedBB', avoid using 'dominates' and 'isPotentiallyReachable'
      80             :       // which are very expensive for large basic blocks.
      81      646190 :       if (BB == BeforeHere->getParent()) {
      82             :         // 'I' dominates 'BeforeHere' => not safe to prune.
      83             :         //
      84             :         // The value defined by an invoke dominates an instruction only
      85             :         // if it dominates every instruction in UseBB. A PHI is dominated only
      86             :         // if the instruction dominates every possible use in the UseBB. Since
      87             :         // UseBB == BB, avoid pruning.
      88      202831 :         if (isa<InvokeInst>(BeforeHere) || isa<PHINode>(I) || I == BeforeHere)
      89             :           return false;
      90       84946 :         if (!OrderedBB->dominates(BeforeHere, I))
      91             :           return false;
      92             : 
      93             :         // 'BeforeHere' comes before 'I', it's safe to prune if we also
      94             :         // guarantee that 'I' never reaches 'BeforeHere' through a back-edge or
      95             :         // by its successors, i.e, prune if:
      96             :         //
      97             :         //  (1) BB is an entry block or have no successors.
      98             :         //  (2) There's no path coming back through BB successors.
      99      161372 :         if (BB == &BB->getParent()->getEntryBlock() ||
     100       38462 :             !BB->getTerminator()->getNumSuccessors())
     101             :           return true;
     102             : 
     103             :         SmallVector<BasicBlock*, 32> Worklist;
     104       68760 :         Worklist.append(succ_begin(BB), succ_end(BB));
     105       34380 :         return !isPotentiallyReachableFromMany(Worklist, BB, DT);
     106             :       }
     107             : 
     108             :       // If the value is defined in the same basic block as use and BeforeHere,
     109             :       // there is no need to explore the use if BeforeHere dominates use.
     110             :       // Check whether there is a path from I to BeforeHere.
     111      623125 :       if (BeforeHere != I && DT->dominates(BeforeHere, I) &&
     112       79665 :           !isPotentiallyReachable(I, BeforeHere, DT))
     113             :         return true;
     114             : 
     115             :       return false;
     116             :     }
     117             : 
     118      646190 :     bool shouldExplore(const Use *U) override {
     119      646190 :       Instruction *I = cast<Instruction>(U->getUser());
     120             : 
     121      646190 :       if (BeforeHere == I && !IncludeI)
     122             :         return false;
     123             : 
     124      646190 :       if (isSafeToPrune(I))
     125             :         return false;
     126             : 
     127      556439 :       return true;
     128             :     }
     129             : 
     130       70213 :     bool captured(const Use *U) override {
     131       70221 :       if (isa<ReturnInst>(U->getUser()) && !ReturnCaptures)
     132             :         return false;
     133             : 
     134       70213 :       if (!shouldExplore(U))
     135             :         return false;
     136             : 
     137       70213 :       Captured = true;
     138       70213 :       return true;
     139             :     }
     140             : 
     141             :     OrderedBasicBlock *OrderedBB;
     142             :     const Instruction *BeforeHere;
     143             :     const DominatorTree *DT;
     144             : 
     145             :     bool ReturnCaptures;
     146             :     bool IncludeI;
     147             : 
     148             :     bool Captured;
     149             :   };
     150             : }
     151             : 
     152             : /// PointerMayBeCaptured - Return true if this pointer value may be captured
     153             : /// by the enclosing function (which is required to exist).  This routine can
     154             : /// be expensive, so consider caching the results.  The boolean ReturnCaptures
     155             : /// specifies whether returning the value (or part of it) from the function
     156             : /// counts as capturing it or not.  The boolean StoreCaptures specified whether
     157             : /// storing the value (or part of it) into memory anywhere automatically
     158             : /// counts as capturing it or not.
     159      797094 : bool llvm::PointerMayBeCaptured(const Value *V,
     160             :                                 bool ReturnCaptures, bool StoreCaptures) {
     161             :   assert(!isa<GlobalValue>(V) &&
     162             :          "It doesn't make sense to ask whether a global is captured.");
     163             : 
     164             :   // TODO: If StoreCaptures is not true, we could do Fancy analysis
     165             :   // to determine whether this store is not actually an escape point.
     166             :   // In that case, BasicAliasAnalysis should be updated as well to
     167             :   // take advantage of this.
     168             :   (void)StoreCaptures;
     169             : 
     170             :   SimpleCaptureTracker SCT(ReturnCaptures);
     171      797094 :   PointerMayBeCaptured(V, &SCT);
     172     1594188 :   return SCT.Captured;
     173             : }
     174             : 
     175             : /// PointerMayBeCapturedBefore - Return true if this pointer value may be
     176             : /// captured by the enclosing function (which is required to exist). If a
     177             : /// DominatorTree is provided, only captures which happen before the given
     178             : /// instruction are considered. This routine can be expensive, so consider
     179             : /// caching the results.  The boolean ReturnCaptures specifies whether
     180             : /// returning the value (or part of it) from the function counts as capturing
     181             : /// it or not.  The boolean StoreCaptures specified whether storing the value
     182             : /// (or part of it) into memory anywhere automatically counts as capturing it
     183             : /// or not. A ordered basic block \p OBB can be used in order to speed up
     184             : /// queries about relative order among instructions in the same basic block.
     185      104473 : bool llvm::PointerMayBeCapturedBefore(const Value *V, bool ReturnCaptures,
     186             :                                       bool StoreCaptures, const Instruction *I,
     187             :                                       const DominatorTree *DT, bool IncludeI,
     188             :                                       OrderedBasicBlock *OBB) {
     189             :   assert(!isa<GlobalValue>(V) &&
     190             :          "It doesn't make sense to ask whether a global is captured.");
     191             :   bool UseNewOBB = OBB == nullptr;
     192             : 
     193      104473 :   if (!DT)
     194           0 :     return PointerMayBeCaptured(V, ReturnCaptures, StoreCaptures);
     195      104473 :   if (UseNewOBB)
     196       53009 :     OBB = new OrderedBasicBlock(I->getParent());
     197             : 
     198             :   // TODO: See comment in PointerMayBeCaptured regarding what could be done
     199             :   // with StoreCaptures.
     200             : 
     201             :   CapturesBefore CB(ReturnCaptures, I, DT, IncludeI, OBB);
     202      104473 :   PointerMayBeCaptured(V, &CB);
     203             : 
     204      104473 :   if (UseNewOBB)
     205      106018 :     delete OBB;
     206      104473 :   return CB.Captured;
     207             : }
     208             : 
     209             : /// TODO: Write a new FunctionPass AliasAnalysis so that it can keep
     210             : /// a cache. Then we can move the code from BasicAliasAnalysis into
     211             : /// that path, and remove this threshold.
     212             : static int const Threshold = 20;
     213             : 
     214      906308 : void llvm::PointerMayBeCaptured(const Value *V, CaptureTracker *Tracker) {
     215             :   assert(V->getType()->isPointerTy() && "Capture is for pointers only!");
     216             :   SmallVector<const Use *, Threshold> Worklist;
     217             :   SmallSet<const Use *, Threshold> Visited;
     218             : 
     219     2287293 :   auto AddUses = [&](const Value *V) {
     220             :     int Count = 0;
     221    10099398 :     for (const Use &U : V->uses()) {
     222             :       // If there are lots of uses, conservatively say that the value
     223             :       // is captured to avoid taking too much compile time.
     224     7833729 :       if (Count++ >= Threshold)
     225     7695694 :         return Tracker->tooManyUses();
     226     7812105 :       if (!Visited.insert(&U).second)
     227      138035 :         continue;
     228     7674070 :       if (!Tracker->shouldExplore(&U))
     229       89751 :         continue;
     230     7584319 :       Worklist.push_back(&U);
     231             :     }
     232      906308 :   };
     233      906308 :   AddUses(V);
     234             : 
     235     5245823 :   while (!Worklist.empty()) {
     236             :     const Use *U = Worklist.pop_back_val();
     237     5155487 :     Instruction *I = cast<Instruction>(U->getUser());
     238     5155487 :     V = U->get();
     239             : 
     240     5155487 :     switch (I->getOpcode()) {
     241             :     case Instruction::Call:
     242             :     case Instruction::Invoke: {
     243             :       CallSite CS(I);
     244             :       // Not captured if the callee is readonly, doesn't return a copy through
     245             :       // its return value and doesn't unwind (a readonly function can leak bits
     246             :       // by throwing an exception or not depending on the input value).
     247     2523479 :       if (CS.onlyReadsMemory() && CS.doesNotThrow() && I->getType()->isVoidTy())
     248             :         break;
     249             : 
     250             :       // launder.invariant.group only captures pointer by returning it,
     251             :       // so the pointer wasn't captured if returned pointer is not captured.
     252             :       // Note that adding similar special cases for intrinsics requires handling
     253             :       // them in 'isEscapeSource' in BasicAA.
     254     2521132 :       if (CS.getIntrinsicID() == Intrinsic::launder_invariant_group) {
     255          10 :         AddUses(I);
     256          10 :         break;
     257             :       }
     258             : 
     259             :       // Volatile operations effectively capture the memory location that they
     260             :       // load and store to.
     261             :       if (auto *MI = dyn_cast<MemIntrinsic>(I))
     262       21637 :         if (MI->isVolatile())
     263          52 :           if (Tracker->captured(U))
     264      783421 :             return;
     265             : 
     266             :       // Not captured if only passed via 'nocapture' arguments.  Note that
     267             :       // calling a function pointer does not in itself cause the pointer to
     268             :       // be captured.  This is a subtle point considering that (for example)
     269             :       // the callee might return its own address.  It is analogous to saying
     270             :       // that loading a value from a pointer does not cause the pointer to be
     271             :       // captured, even though the loaded value might be the pointer itself
     272             :       // (think of self-referential objects).
     273             :       CallSite::data_operand_iterator B =
     274     2522792 :         CS.data_operands_begin(), E = CS.data_operands_end();
     275    11030948 :       for (CallSite::data_operand_iterator A = B; A != E; ++A)
     276     7560305 :         if (A->get() == V && !CS.doesNotCapture(A - B))
     277             :           // The parameter is not marked 'nocapture' - captured.
     278      783413 :           if (Tracker->captured(U))
     279             :             return;
     280             :       break;
     281             :     }
     282             :     case Instruction::Load:
     283             :       // Volatile loads make the address observable.
     284      806191 :       if (cast<LoadInst>(I)->isVolatile())
     285           4 :         if (Tracker->captured(U))
     286             :           return;
     287             :       break;
     288             :     case Instruction::VAArg:
     289             :       // "va-arg" from a pointer does not cause it to be captured.
     290             :       break;
     291      440185 :     case Instruction::Store:
     292             :         // Stored the pointer - conservatively assume it may be captured.
     293             :         // Volatile stores make the address observable.
     294     1291500 :       if (V == I->getOperand(0) || cast<StoreInst>(I)->isVolatile())
     295       29191 :         if (Tracker->captured(U))
     296             :           return;
     297             :       break;
     298             :     case Instruction::AtomicRMW: {
     299             :       // atomicrmw conceptually includes both a load and store from
     300             :       // the same location.
     301             :       // As with a store, the location being accessed is not captured,
     302             :       // but the value being stored is.
     303             :       // Volatile stores make the address observable.
     304             :       auto *ARMWI = cast<AtomicRMWInst>(I);
     305         220 :       if (ARMWI->getValOperand() == V || ARMWI->isVolatile())
     306           0 :         if (Tracker->captured(U))
     307             :           return;
     308             :       break;
     309             :     }
     310             :     case Instruction::AtomicCmpXchg: {
     311             :       // cmpxchg conceptually includes both a load and store from
     312             :       // the same location.
     313             :       // As with a store, the location being accessed is not captured,
     314             :       // but the value being stored is.
     315             :       // Volatile stores make the address observable.
     316             :       auto *ACXI = cast<AtomicCmpXchgInst>(I);
     317        3670 :       if (ACXI->getCompareOperand() == V || ACXI->getNewValOperand() == V ||
     318             :           ACXI->isVolatile())
     319          20 :         if (Tracker->captured(U))
     320             :           return;
     321             :       break;
     322             :     }
     323     1380975 :     case Instruction::BitCast:
     324             :     case Instruction::GetElementPtr:
     325             :     case Instruction::PHI:
     326             :     case Instruction::Select:
     327             :     case Instruction::AddrSpaceCast:
     328             :       // The original value is not captured via this if the new value isn't.
     329     1380975 :       AddUses(I);
     330     1380975 :       break;
     331        2812 :     case Instruction::ICmp: {
     332             :       // Don't count comparisons of a no-alias return value against null as
     333             :       // captures. This allows us to ignore comparisons of malloc results
     334             :       // with null, for example.
     335             :       if (ConstantPointerNull *CPN =
     336        2812 :           dyn_cast<ConstantPointerNull>(I->getOperand(1)))
     337         296 :         if (CPN->getType()->getAddressSpace() == 0)
     338         274 :           if (isNoAliasCall(V->stripPointerCasts()))
     339             :             break;
     340             :       // Comparison against value stored in global variable. Given the pointer
     341             :       // does not escape, its value cannot be guessed and stored separately in a
     342             :       // global variable.
     343        2627 :       unsigned OtherIndex = (I->getOperand(0) == V) ? 1 : 0;
     344             :       auto *LI = dyn_cast<LoadInst>(I->getOperand(OtherIndex));
     345        1843 :       if (LI && isa<GlobalVariable>(LI->getPointerOperand()))
     346             :         break;
     347             :       // Otherwise, be conservative. There are crazy ways to capture pointers
     348             :       // using comparisons.
     349        2622 :       if (Tracker->captured(U))
     350             :         return;
     351             :       break;
     352             :     }
     353        1126 :     default:
     354             :       // Something else - be conservative and say it is captured.
     355        1126 :       if (Tracker->captured(U))
     356             :         return;
     357             :       break;
     358             :     }
     359             :   }
     360             : 
     361             :   // All uses examined.
     362             : }

Generated by: LCOV version 1.13