clang  7.0.0
ExprEngineCallAndReturn.cpp
Go to the documentation of this file.
1 //=-- ExprEngineCallAndReturn.cpp - Support for call/return -----*- C++ -*-===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file defines ExprEngine's support for calls and returns.
11 //
12 //===----------------------------------------------------------------------===//
13 
17 #include "clang/AST/DeclCXX.h"
22 #include "llvm/ADT/SmallSet.h"
23 #include "llvm/ADT/Statistic.h"
24 #include "llvm/Support/SaveAndRestore.h"
25 
26 using namespace clang;
27 using namespace ento;
28 
29 #define DEBUG_TYPE "ExprEngine"
30 
31 STATISTIC(NumOfDynamicDispatchPathSplits,
32  "The # of times we split the path due to imprecise dynamic dispatch info");
33 
34 STATISTIC(NumInlinedCalls,
35  "The # of times we inlined a call");
36 
37 STATISTIC(NumReachedInlineCountMax,
38  "The # of times we reached inline count maximum");
39 
41  ExplodedNode *Pred) {
42  // Get the entry block in the CFG of the callee.
43  const StackFrameContext *calleeCtx = CE.getCalleeContext();
44  PrettyStackTraceLocationContext CrashInfo(calleeCtx);
45  const CFGBlock *Entry = CE.getEntry();
46 
47  // Validate the CFG.
48  assert(Entry->empty());
49  assert(Entry->succ_size() == 1);
50 
51  // Get the solitary successor.
52  const CFGBlock *Succ = *(Entry->succ_begin());
53 
54  // Construct an edge representing the starting location in the callee.
55  BlockEdge Loc(Entry, Succ, calleeCtx);
56 
57  ProgramStateRef state = Pred->getState();
58 
59  // Construct a new node, notify checkers that analysis of the function has
60  // begun, and add the resultant nodes to the worklist.
61  bool isNew;
62  ExplodedNode *Node = G.getNode(Loc, state, false, &isNew);
63  Node->addPredecessor(Pred, G);
64  if (isNew) {
65  ExplodedNodeSet DstBegin;
66  processBeginOfFunction(BC, Node, DstBegin, Loc);
67  Engine.enqueue(DstBegin);
68  }
69 }
70 
71 // Find the last statement on the path to the exploded node and the
72 // corresponding Block.
73 static std::pair<const Stmt*,
75  const Stmt *S = nullptr;
76  const CFGBlock *Blk = nullptr;
77  const StackFrameContext *SF = Node->getStackFrame();
78 
79  // Back up through the ExplodedGraph until we reach a statement node in this
80  // stack frame.
81  while (Node) {
82  const ProgramPoint &PP = Node->getLocation();
83 
84  if (PP.getStackFrame() == SF) {
85  if (Optional<StmtPoint> SP = PP.getAs<StmtPoint>()) {
86  S = SP->getStmt();
87  break;
88  } else if (Optional<CallExitEnd> CEE = PP.getAs<CallExitEnd>()) {
89  S = CEE->getCalleeContext()->getCallSite();
90  if (S)
91  break;
92 
93  // If there is no statement, this is an implicitly-generated call.
94  // We'll walk backwards over it and then continue the loop to find
95  // an actual statement.
97  do {
98  Node = Node->getFirstPred();
99  CE = Node->getLocationAs<CallEnter>();
100  } while (!CE || CE->getCalleeContext() != CEE->getCalleeContext());
101 
102  // Continue searching the graph.
103  } else if (Optional<BlockEdge> BE = PP.getAs<BlockEdge>()) {
104  Blk = BE->getSrc();
105  }
106  } else if (Optional<CallEnter> CE = PP.getAs<CallEnter>()) {
107  // If we reached the CallEnter for this function, it has no statements.
108  if (CE->getCalleeContext() == SF)
109  break;
110  }
111 
112  if (Node->pred_empty())
113  return std::make_pair(nullptr, nullptr);
114 
115  Node = *Node->pred_begin();
116  }
117 
118  return std::make_pair(S, Blk);
119 }
120 
121 /// Adjusts a return value when the called function's return type does not
122 /// match the caller's expression type. This can happen when a dynamic call
123 /// is devirtualized, and the overriding method has a covariant (more specific)
124 /// return type than the parent's method. For C++ objects, this means we need
125 /// to add base casts.
126 static SVal adjustReturnValue(SVal V, QualType ExpectedTy, QualType ActualTy,
127  StoreManager &StoreMgr) {
128  // For now, the only adjustments we handle apply only to locations.
129  if (!V.getAs<Loc>())
130  return V;
131 
132  // If the types already match, don't do any unnecessary work.
133  ExpectedTy = ExpectedTy.getCanonicalType();
134  ActualTy = ActualTy.getCanonicalType();
135  if (ExpectedTy == ActualTy)
136  return V;
137 
138  // No adjustment is needed between Objective-C pointer types.
139  if (ExpectedTy->isObjCObjectPointerType() &&
140  ActualTy->isObjCObjectPointerType())
141  return V;
142 
143  // C++ object pointers may need "derived-to-base" casts.
144  const CXXRecordDecl *ExpectedClass = ExpectedTy->getPointeeCXXRecordDecl();
145  const CXXRecordDecl *ActualClass = ActualTy->getPointeeCXXRecordDecl();
146  if (ExpectedClass && ActualClass) {
147  CXXBasePaths Paths(/*FindAmbiguities=*/true, /*RecordPaths=*/true,
148  /*DetectVirtual=*/false);
149  if (ActualClass->isDerivedFrom(ExpectedClass, Paths) &&
150  !Paths.isAmbiguous(ActualTy->getCanonicalTypeUnqualified())) {
151  return StoreMgr.evalDerivedToBase(V, Paths.front());
152  }
153  }
154 
155  // Unfortunately, Objective-C does not enforce that overridden methods have
156  // covariant return types, so we can't assert that that never happens.
157  // Be safe and return UnknownVal().
158  return UnknownVal();
159 }
160 
162  ExplodedNode *Pred,
163  ExplodedNodeSet &Dst) {
164  // Find the last statement in the function and the corresponding basic block.
165  const Stmt *LastSt = nullptr;
166  const CFGBlock *Blk = nullptr;
167  std::tie(LastSt, Blk) = getLastStmt(Pred);
168  if (!Blk || !LastSt) {
169  Dst.Add(Pred);
170  return;
171  }
172 
173  // Here, we destroy the current location context. We use the current
174  // function's entire body as a diagnostic statement, with which the program
175  // point will be associated. However, we only want to use LastStmt as a
176  // reference for what to clean up if it's a ReturnStmt; otherwise, everything
177  // is dead.
178  SaveAndRestore<const NodeBuilderContext *> NodeContextRAII(currBldrCtx, &BC);
179  const LocationContext *LCtx = Pred->getLocationContext();
180  removeDead(Pred, Dst, dyn_cast<ReturnStmt>(LastSt), LCtx,
181  LCtx->getAnalysisDeclContext()->getBody(),
183 }
184 
186  const StackFrameContext *calleeCtx) {
187  const Decl *RuntimeCallee = calleeCtx->getDecl();
188  const Decl *StaticDecl = Call->getDecl();
189  assert(RuntimeCallee);
190  if (!StaticDecl)
191  return true;
192  return RuntimeCallee->getCanonicalDecl() != StaticDecl->getCanonicalDecl();
193 }
194 
195 /// The call exit is simulated with a sequence of nodes, which occur between
196 /// CallExitBegin and CallExitEnd. The following operations occur between the
197 /// two program points:
198 /// 1. CallExitBegin (triggers the start of call exit sequence)
199 /// 2. Bind the return value
200 /// 3. Run Remove dead bindings to clean up the dead symbols from the callee.
201 /// 4. CallExitEnd (switch to the caller context)
202 /// 5. PostStmt<CallExpr>
204  // Step 1 CEBNode was generated before the call.
206  const StackFrameContext *calleeCtx = CEBNode->getStackFrame();
207 
208  // The parent context might not be a stack frame, so make sure we
209  // look up the first enclosing stack frame.
210  const StackFrameContext *callerCtx =
211  calleeCtx->getParent()->getStackFrame();
212 
213  const Stmt *CE = calleeCtx->getCallSite();
214  ProgramStateRef state = CEBNode->getState();
215  // Find the last statement in the function and the corresponding basic block.
216  const Stmt *LastSt = nullptr;
217  const CFGBlock *Blk = nullptr;
218  std::tie(LastSt, Blk) = getLastStmt(CEBNode);
219 
220  // Generate a CallEvent /before/ cleaning the state, so that we can get the
221  // correct value for 'this' (if necessary).
223  CallEventRef<> Call = CEMgr.getCaller(calleeCtx, state);
224 
225  // Step 2: generate node with bound return value: CEBNode -> BindedRetNode.
226 
227  // If the callee returns an expression, bind its value to CallExpr.
228  if (CE) {
229  if (const ReturnStmt *RS = dyn_cast_or_null<ReturnStmt>(LastSt)) {
230  const LocationContext *LCtx = CEBNode->getLocationContext();
231  SVal V = state->getSVal(RS, LCtx);
232 
233  // Ensure that the return type matches the type of the returned Expr.
234  if (wasDifferentDeclUsedForInlining(Call, calleeCtx)) {
235  QualType ReturnedTy =
236  CallEvent::getDeclaredResultType(calleeCtx->getDecl());
237  if (!ReturnedTy.isNull()) {
238  if (const Expr *Ex = dyn_cast<Expr>(CE)) {
239  V = adjustReturnValue(V, Ex->getType(), ReturnedTy,
240  getStoreManager());
241  }
242  }
243  }
244 
245  state = state->BindExpr(CE, callerCtx, V);
246  }
247 
248  // Bind the constructed object value to CXXConstructExpr.
249  if (const CXXConstructExpr *CCE = dyn_cast<CXXConstructExpr>(CE)) {
250  loc::MemRegionVal This =
251  svalBuilder.getCXXThis(CCE->getConstructor()->getParent(), calleeCtx);
252  SVal ThisV = state->getSVal(This);
253  ThisV = state->getSVal(ThisV.castAs<Loc>());
254  state = state->BindExpr(CCE, callerCtx, ThisV);
255  }
256 
257  if (const auto *CNE = dyn_cast<CXXNewExpr>(CE)) {
258  // We are currently evaluating a CXXNewAllocator CFGElement. It takes a
259  // while to reach the actual CXXNewExpr element from here, so keep the
260  // region for later use.
261  // Additionally cast the return value of the inlined operator new
262  // (which is of type 'void *') to the correct object type.
263  SVal AllocV = state->getSVal(CNE, callerCtx);
264  AllocV = svalBuilder.evalCast(
265  AllocV, CNE->getType(),
266  getContext().getPointerType(getContext().VoidTy));
267 
268  state = addObjectUnderConstruction(state, CNE, calleeCtx->getParent(),
269  AllocV);
270  }
271  }
272 
273  // Step 3: BindedRetNode -> CleanedNodes
274  // If we can find a statement and a block in the inlined function, run remove
275  // dead bindings before returning from the call. This is important to ensure
276  // that we report the issues such as leaks in the stack contexts in which
277  // they occurred.
278  ExplodedNodeSet CleanedNodes;
279  if (LastSt && Blk && AMgr.options.AnalysisPurgeOpt != PurgeNone) {
280  static SimpleProgramPointTag retValBind("ExprEngine", "Bind Return Value");
281  PostStmt Loc(LastSt, calleeCtx, &retValBind);
282  bool isNew;
283  ExplodedNode *BindedRetNode = G.getNode(Loc, state, false, &isNew);
284  BindedRetNode->addPredecessor(CEBNode, G);
285  if (!isNew)
286  return;
287 
288  NodeBuilderContext Ctx(getCoreEngine(), Blk, BindedRetNode);
289  currBldrCtx = &Ctx;
290  // Here, we call the Symbol Reaper with 0 statement and callee location
291  // context, telling it to clean up everything in the callee's context
292  // (and its children). We use the callee's function body as a diagnostic
293  // statement, with which the program point will be associated.
294  removeDead(BindedRetNode, CleanedNodes, nullptr, calleeCtx,
295  calleeCtx->getAnalysisDeclContext()->getBody(),
297  currBldrCtx = nullptr;
298  } else {
299  CleanedNodes.Add(CEBNode);
300  }
301 
302  for (ExplodedNodeSet::iterator I = CleanedNodes.begin(),
303  E = CleanedNodes.end(); I != E; ++I) {
304 
305  // Step 4: Generate the CallExit and leave the callee's context.
306  // CleanedNodes -> CEENode
307  CallExitEnd Loc(calleeCtx, callerCtx);
308  bool isNew;
309  ProgramStateRef CEEState = (*I == CEBNode) ? state : (*I)->getState();
310 
311  ExplodedNode *CEENode = G.getNode(Loc, CEEState, false, &isNew);
312  CEENode->addPredecessor(*I, G);
313  if (!isNew)
314  return;
315 
316  // Step 5: Perform the post-condition check of the CallExpr and enqueue the
317  // result onto the work list.
318  // CEENode -> Dst -> WorkList
319  NodeBuilderContext Ctx(Engine, calleeCtx->getCallSiteBlock(), CEENode);
320  SaveAndRestore<const NodeBuilderContext*> NBCSave(currBldrCtx,
321  &Ctx);
322  SaveAndRestore<unsigned> CBISave(currStmtIdx, calleeCtx->getIndex());
323 
324  CallEventRef<> UpdatedCall = Call.cloneWithState(CEEState);
325 
326  ExplodedNodeSet DstPostCall;
327  if (const CXXNewExpr *CNE = dyn_cast_or_null<CXXNewExpr>(CE)) {
328  ExplodedNodeSet DstPostPostCallCallback;
329  getCheckerManager().runCheckersForPostCall(DstPostPostCallCallback,
330  CEENode, *UpdatedCall, *this,
331  /*WasInlined=*/true);
332  for (auto I : DstPostPostCallCallback) {
334  CNE,
335  *getObjectUnderConstruction(I->getState(), CNE,
336  calleeCtx->getParent()),
337  DstPostCall, I, *this,
338  /*WasInlined=*/true);
339  }
340  } else {
341  getCheckerManager().runCheckersForPostCall(DstPostCall, CEENode,
342  *UpdatedCall, *this,
343  /*WasInlined=*/true);
344  }
345  ExplodedNodeSet Dst;
346  if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(Call)) {
347  getCheckerManager().runCheckersForPostObjCMessage(Dst, DstPostCall, *Msg,
348  *this,
349  /*WasInlined=*/true);
350  } else if (CE &&
351  !(isa<CXXNewExpr>(CE) && // Called when visiting CXXNewExpr.
353  getCheckerManager().runCheckersForPostStmt(Dst, DstPostCall, CE,
354  *this, /*WasInlined=*/true);
355  } else {
356  Dst.insert(DstPostCall);
357  }
358 
359  // Enqueue the next element in the block.
360  for (ExplodedNodeSet::iterator PSI = Dst.begin(), PSE = Dst.end();
361  PSI != PSE; ++PSI) {
362  Engine.getWorkList()->enqueue(*PSI, calleeCtx->getCallSiteBlock(),
363  calleeCtx->getIndex()+1);
364  }
365  }
366 }
367 
368 void ExprEngine::examineStackFrames(const Decl *D, const LocationContext *LCtx,
369  bool &IsRecursive, unsigned &StackDepth) {
370  IsRecursive = false;
371  StackDepth = 0;
372 
373  while (LCtx) {
374  if (const StackFrameContext *SFC = dyn_cast<StackFrameContext>(LCtx)) {
375  const Decl *DI = SFC->getDecl();
376 
377  // Mark recursive (and mutually recursive) functions and always count
378  // them when measuring the stack depth.
379  if (DI == D) {
380  IsRecursive = true;
381  ++StackDepth;
382  LCtx = LCtx->getParent();
383  continue;
384  }
385 
386  // Do not count the small functions when determining the stack depth.
387  AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(DI);
388  const CFG *CalleeCFG = CalleeADC->getCFG();
389  if (CalleeCFG->getNumBlockIDs() > AMgr.options.getAlwaysInlineSize())
390  ++StackDepth;
391  }
392  LCtx = LCtx->getParent();
393  }
394 }
395 
396 // The GDM component containing the dynamic dispatch bifurcation info. When
397 // the exact type of the receiver is not known, we want to explore both paths -
398 // one on which we do inline it and the other one on which we don't. This is
399 // done to ensure we do not drop coverage.
400 // This is the map from the receiver region to a bool, specifying either we
401 // consider this region's information precise or not along the given path.
402 namespace {
404  DynamicDispatchModeInlined = 1,
405  DynamicDispatchModeConservative
406  };
407 } // end anonymous namespace
408 
409 REGISTER_TRAIT_WITH_PROGRAMSTATE(DynamicDispatchBifurcationMap,
411  unsigned))
412 
413 bool ExprEngine::inlineCall(const CallEvent &Call, const Decl *D,
414  NodeBuilder &Bldr, ExplodedNode *Pred,
416  assert(D);
417 
418  const LocationContext *CurLC = Pred->getLocationContext();
419  const StackFrameContext *CallerSFC = CurLC->getStackFrame();
420  const LocationContext *ParentOfCallee = CallerSFC;
421  if (Call.getKind() == CE_Block &&
422  !cast<BlockCall>(Call).isConversionFromLambda()) {
423  const BlockDataRegion *BR = cast<BlockCall>(Call).getBlockRegion();
424  assert(BR && "If we have the block definition we should have its region");
425  AnalysisDeclContext *BlockCtx = AMgr.getAnalysisDeclContext(D);
426  ParentOfCallee = BlockCtx->getBlockInvocationContext(CallerSFC,
427  cast<BlockDecl>(D),
428  BR);
429  }
430 
431  // This may be NULL, but that's fine.
432  const Expr *CallE = Call.getOriginExpr();
433 
434  // Construct a new stack frame for the callee.
435  AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(D);
436  const StackFrameContext *CalleeSFC =
437  CalleeADC->getStackFrame(ParentOfCallee, CallE,
438  currBldrCtx->getBlock(),
439  currStmtIdx);
440 
441  CallEnter Loc(CallE, CalleeSFC, CurLC);
442 
443  // Construct a new state which contains the mapping from actual to
444  // formal arguments.
445  State = State->enterStackFrame(Call, CalleeSFC);
446 
447  bool isNew;
448  if (ExplodedNode *N = G.getNode(Loc, State, false, &isNew)) {
449  N->addPredecessor(Pred, G);
450  if (isNew)
451  Engine.getWorkList()->enqueue(N);
452  }
453 
454  // If we decided to inline the call, the successor has been manually
455  // added onto the work list so remove it from the node builder.
456  Bldr.takeNodes(Pred);
457 
458  NumInlinedCalls++;
459  Engine.FunctionSummaries->bumpNumTimesInlined(D);
460 
461  // Mark the decl as visited.
462  if (VisitedCallees)
463  VisitedCallees->insert(D);
464 
465  return true;
466 }
467 
469  const Stmt *CallE) {
470  const void *ReplayState = State->get<ReplayWithoutInlining>();
471  if (!ReplayState)
472  return nullptr;
473 
474  assert(ReplayState == CallE && "Backtracked to the wrong call.");
475  (void)CallE;
476 
477  return State->remove<ReplayWithoutInlining>();
478 }
479 
481  ExplodedNodeSet &dst) {
482  // Perform the previsit of the CallExpr.
483  ExplodedNodeSet dstPreVisit;
484  getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, CE, *this);
485 
486  // Get the call in its initial state. We use this as a template to perform
487  // all the checks.
489  CallEventRef<> CallTemplate
490  = CEMgr.getSimpleCall(CE, Pred->getState(), Pred->getLocationContext());
491 
492  // Evaluate the function call. We try each of the checkers
493  // to see if the can evaluate the function call.
494  ExplodedNodeSet dstCallEvaluated;
495  for (ExplodedNodeSet::iterator I = dstPreVisit.begin(), E = dstPreVisit.end();
496  I != E; ++I) {
497  evalCall(dstCallEvaluated, *I, *CallTemplate);
498  }
499 
500  // Finally, perform the post-condition check of the CallExpr and store
501  // the created nodes in 'Dst'.
502  // Note that if the call was inlined, dstCallEvaluated will be empty.
503  // The post-CallExpr check will occur in processCallExit.
504  getCheckerManager().runCheckersForPostStmt(dst, dstCallEvaluated, CE,
505  *this);
506 }
507 
509  const CallEvent &Call) {
510  // WARNING: At this time, the state attached to 'Call' may be older than the
511  // state in 'Pred'. This is a minor optimization since CheckerManager will
512  // use an updated CallEvent instance when calling checkers, but if 'Call' is
513  // ever used directly in this function all callers should be updated to pass
514  // the most recent state. (It is probably not worth doing the work here since
515  // for some callers this will not be necessary.)
516 
517  // Run any pre-call checks using the generic call interface.
518  ExplodedNodeSet dstPreVisit;
519  getCheckerManager().runCheckersForPreCall(dstPreVisit, Pred, Call, *this);
520 
521  // Actually evaluate the function call. We try each of the checkers
522  // to see if the can evaluate the function call, and get a callback at
523  // defaultEvalCall if all of them fail.
524  ExplodedNodeSet dstCallEvaluated;
525  getCheckerManager().runCheckersForEvalCall(dstCallEvaluated, dstPreVisit,
526  Call, *this);
527 
528  // Finally, run any post-call checks.
529  getCheckerManager().runCheckersForPostCall(Dst, dstCallEvaluated,
530  Call, *this);
531 }
532 
534  const LocationContext *LCtx,
536  const Expr *E = Call.getOriginExpr();
537  if (!E)
538  return State;
539 
540  // Some method families have known return values.
541  if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(&Call)) {
542  switch (Msg->getMethodFamily()) {
543  default:
544  break;
545  case OMF_autorelease:
546  case OMF_retain:
547  case OMF_self: {
548  // These methods return their receivers.
549  return State->BindExpr(E, LCtx, Msg->getReceiverSVal());
550  }
551  }
552  } else if (const CXXConstructorCall *C = dyn_cast<CXXConstructorCall>(&Call)){
553  SVal ThisV = C->getCXXThisVal();
554  ThisV = State->getSVal(ThisV.castAs<Loc>());
555  return State->BindExpr(E, LCtx, ThisV);
556  }
557 
558  SVal R;
559  QualType ResultTy = Call.getResultType();
560  unsigned Count = currBldrCtx->blockCount();
561  if (auto RTC = getCurrentCFGElement().getAs<CFGCXXRecordTypedCall>()) {
562  // Conjure a temporary if the function returns an object by value.
563  SVal Target;
564  assert(RTC->getStmt() == Call.getOriginExpr());
565  EvalCallOptions CallOpts; // FIXME: We won't really need those.
566  std::tie(State, Target) =
567  prepareForObjectConstruction(Call.getOriginExpr(), State, LCtx,
568  RTC->getConstructionContext(), CallOpts);
569  assert(Target.getAsRegion());
570  // Invalidate the region so that it didn't look uninitialized. Don't notify
571  // the checkers.
572  State = State->invalidateRegions(Target.getAsRegion(), E, Count, LCtx,
573  /* CausedByPointerEscape=*/false, nullptr,
574  &Call, nullptr);
575 
576  R = State->getSVal(Target.castAs<Loc>(), E->getType());
577  } else {
578  // Conjure a symbol if the return value is unknown.
579 
580  // See if we need to conjure a heap pointer instead of
581  // a regular unknown pointer.
582  bool IsHeapPointer = false;
583  if (const auto *CNE = dyn_cast<CXXNewExpr>(E))
584  if (CNE->getOperatorNew()->isReplaceableGlobalAllocationFunction()) {
585  // FIXME: Delegate this to evalCall in MallocChecker?
586  IsHeapPointer = true;
587  }
588 
589  R = IsHeapPointer ? svalBuilder.getConjuredHeapSymbolVal(E, LCtx, Count)
590  : svalBuilder.conjureSymbolVal(nullptr, E, LCtx, ResultTy,
591  Count);
592  }
593  return State->BindExpr(E, LCtx, R);
594 }
595 
596 // Conservatively evaluate call by invalidating regions and binding
597 // a conjured return value.
598 void ExprEngine::conservativeEvalCall(const CallEvent &Call, NodeBuilder &Bldr,
599  ExplodedNode *Pred,
601  State = Call.invalidateRegions(currBldrCtx->blockCount(), State);
602  State = bindReturnValue(Call, Pred->getLocationContext(), State);
603 
604  // And make the result node.
605  Bldr.generateNode(Call.getProgramPoint(), State, Pred);
606 }
607 
608 ExprEngine::CallInlinePolicy
609 ExprEngine::mayInlineCallKind(const CallEvent &Call, const ExplodedNode *Pred,
610  AnalyzerOptions &Opts,
611  const ExprEngine::EvalCallOptions &CallOpts) {
612  const LocationContext *CurLC = Pred->getLocationContext();
613  const StackFrameContext *CallerSFC = CurLC->getStackFrame();
614  switch (Call.getKind()) {
615  case CE_Function:
616  case CE_Block:
617  break;
618  case CE_CXXMember:
621  return CIP_DisallowedAlways;
622  break;
623  case CE_CXXConstructor: {
625  return CIP_DisallowedAlways;
626 
627  const CXXConstructorCall &Ctor = cast<CXXConstructorCall>(Call);
628 
629  const CXXConstructExpr *CtorExpr = Ctor.getOriginExpr();
630 
631  auto CCE = getCurrentCFGElement().getAs<CFGConstructor>();
632  const ConstructionContext *CC = CCE ? CCE->getConstructionContext()
633  : nullptr;
634 
635  if (CC && isa<NewAllocatedObjectConstructionContext>(CC) &&
636  !Opts.mayInlineCXXAllocator())
637  return CIP_DisallowedOnce;
638 
639  // FIXME: We don't handle constructors or destructors for arrays properly.
640  // Even once we do, we still need to be careful about implicitly-generated
641  // initializers for array fields in default move/copy constructors.
642  // We still allow construction into ElementRegion targets when they don't
643  // represent array elements.
644  if (CallOpts.IsArrayCtorOrDtor)
645  return CIP_DisallowedOnce;
646 
647  // Inlining constructors requires including initializers in the CFG.
648  const AnalysisDeclContext *ADC = CallerSFC->getAnalysisDeclContext();
649  assert(ADC->getCFGBuildOptions().AddInitializers && "No CFG initializers");
650  (void)ADC;
651 
652  // If the destructor is trivial, it's always safe to inline the constructor.
653  if (Ctor.getDecl()->getParent()->hasTrivialDestructor())
654  break;
655 
656  // For other types, only inline constructors if destructor inlining is
657  // also enabled.
659  return CIP_DisallowedAlways;
660 
661  if (CtorExpr->getConstructionKind() == CXXConstructExpr::CK_Complete) {
662  // If we don't handle temporary destructors, we shouldn't inline
663  // their constructors.
664  if (CallOpts.IsTemporaryCtorOrDtor &&
666  return CIP_DisallowedOnce;
667 
668  // If we did not find the correct this-region, it would be pointless
669  // to inline the constructor. Instead we will simply invalidate
670  // the fake temporary target.
672  return CIP_DisallowedOnce;
673 
674  // If the temporary is lifetime-extended by binding it to a reference-type
675  // field within an aggregate, automatic destructors don't work properly.
677  return CIP_DisallowedOnce;
678  }
679 
680  break;
681  }
682  case CE_CXXDestructor: {
684  return CIP_DisallowedAlways;
685 
686  // Inlining destructors requires building the CFG correctly.
687  const AnalysisDeclContext *ADC = CallerSFC->getAnalysisDeclContext();
688  assert(ADC->getCFGBuildOptions().AddImplicitDtors && "No CFG destructors");
689  (void)ADC;
690 
691  // FIXME: We don't handle constructors or destructors for arrays properly.
692  if (CallOpts.IsArrayCtorOrDtor)
693  return CIP_DisallowedOnce;
694 
695  // Allow disabling temporary destructor inlining with a separate option.
696  if (CallOpts.IsTemporaryCtorOrDtor && !Opts.mayInlineCXXTemporaryDtors())
697  return CIP_DisallowedOnce;
698 
699  // If we did not find the correct this-region, it would be pointless
700  // to inline the destructor. Instead we will simply invalidate
701  // the fake temporary target.
703  return CIP_DisallowedOnce;
704  break;
705  }
706  case CE_CXXAllocator:
707  if (Opts.mayInlineCXXAllocator())
708  break;
709  // Do not inline allocators until we model deallocators.
710  // This is unfortunate, but basically necessary for smart pointers and such.
711  return CIP_DisallowedAlways;
712  case CE_ObjCMessage:
713  if (!Opts.mayInlineObjCMethod())
714  return CIP_DisallowedAlways;
715  if (!(Opts.getIPAMode() == IPAK_DynamicDispatch ||
717  return CIP_DisallowedAlways;
718  break;
719  }
720 
721  return CIP_Allowed;
722 }
723 
724 /// Returns true if the given C++ class contains a member with the given name.
725 static bool hasMember(const ASTContext &Ctx, const CXXRecordDecl *RD,
726  StringRef Name) {
727  const IdentifierInfo &II = Ctx.Idents.get(Name);
728  DeclarationName DeclName = Ctx.DeclarationNames.getIdentifier(&II);
729  if (!RD->lookup(DeclName).empty())
730  return true;
731 
732  CXXBasePaths Paths(false, false, false);
733  if (RD->lookupInBases(
734  [DeclName](const CXXBaseSpecifier *Specifier, CXXBasePath &Path) {
735  return CXXRecordDecl::FindOrdinaryMember(Specifier, Path, DeclName);
736  },
737  Paths))
738  return true;
739 
740  return false;
741 }
742 
743 /// Returns true if the given C++ class is a container or iterator.
744 ///
745 /// Our heuristic for this is whether it contains a method named 'begin()' or a
746 /// nested type named 'iterator' or 'iterator_category'.
747 static bool isContainerClass(const ASTContext &Ctx, const CXXRecordDecl *RD) {
748  return hasMember(Ctx, RD, "begin") ||
749  hasMember(Ctx, RD, "iterator") ||
750  hasMember(Ctx, RD, "iterator_category");
751 }
752 
753 /// Returns true if the given function refers to a method of a C++ container
754 /// or iterator.
755 ///
756 /// We generally do a poor job modeling most containers right now, and might
757 /// prefer not to inline their methods.
758 static bool isContainerMethod(const ASTContext &Ctx,
759  const FunctionDecl *FD) {
760  if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(FD))
761  return isContainerClass(Ctx, MD->getParent());
762  return false;
763 }
764 
765 /// Returns true if the given function is the destructor of a class named
766 /// "shared_ptr".
767 static bool isCXXSharedPtrDtor(const FunctionDecl *FD) {
768  const CXXDestructorDecl *Dtor = dyn_cast<CXXDestructorDecl>(FD);
769  if (!Dtor)
770  return false;
771 
772  const CXXRecordDecl *RD = Dtor->getParent();
773  if (const IdentifierInfo *II = RD->getDeclName().getAsIdentifierInfo())
774  if (II->isStr("shared_ptr"))
775  return true;
776 
777  return false;
778 }
779 
780 /// Returns true if the function in \p CalleeADC may be inlined in general.
781 ///
782 /// This checks static properties of the function, such as its signature and
783 /// CFG, to determine whether the analyzer should ever consider inlining it,
784 /// in any context.
785 static bool mayInlineDecl(AnalysisManager &AMgr,
786  AnalysisDeclContext *CalleeADC) {
787  AnalyzerOptions &Opts = AMgr.getAnalyzerOptions();
788  // FIXME: Do not inline variadic calls.
789  if (CallEvent::isVariadic(CalleeADC->getDecl()))
790  return false;
791 
792  // Check certain C++-related inlining policies.
793  ASTContext &Ctx = CalleeADC->getASTContext();
794  if (Ctx.getLangOpts().CPlusPlus) {
795  if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(CalleeADC->getDecl())) {
796  // Conditionally control the inlining of template functions.
797  if (!Opts.mayInlineTemplateFunctions())
798  if (FD->getTemplatedKind() != FunctionDecl::TK_NonTemplate)
799  return false;
800 
801  // Conditionally control the inlining of C++ standard library functions.
802  if (!Opts.mayInlineCXXStandardLibrary())
803  if (Ctx.getSourceManager().isInSystemHeader(FD->getLocation()))
805  return false;
806 
807  // Conditionally control the inlining of methods on objects that look
808  // like C++ containers.
809  if (!Opts.mayInlineCXXContainerMethods())
810  if (!AMgr.isInCodeFile(FD->getLocation()))
811  if (isContainerMethod(Ctx, FD))
812  return false;
813 
814  // Conditionally control the inlining of the destructor of C++ shared_ptr.
815  // We don't currently do a good job modeling shared_ptr because we can't
816  // see the reference count, so treating as opaque is probably the best
817  // idea.
818  if (!Opts.mayInlineCXXSharedPtrDtor())
819  if (isCXXSharedPtrDtor(FD))
820  return false;
821  }
822  }
823 
824  // It is possible that the CFG cannot be constructed.
825  // Be safe, and check if the CalleeCFG is valid.
826  const CFG *CalleeCFG = CalleeADC->getCFG();
827  if (!CalleeCFG)
828  return false;
829 
830  // Do not inline large functions.
831  if (CalleeCFG->getNumBlockIDs() > Opts.getMaxInlinableSize())
832  return false;
833 
834  // It is possible that the live variables analysis cannot be
835  // run. If so, bail out.
836  if (!CalleeADC->getAnalysis<RelaxedLiveVariables>())
837  return false;
838 
839  return true;
840 }
841 
842 bool ExprEngine::shouldInlineCall(const CallEvent &Call, const Decl *D,
843  const ExplodedNode *Pred,
844  const EvalCallOptions &CallOpts) {
845  if (!D)
846  return false;
847 
849  AnalyzerOptions &Opts = AMgr.options;
851  AnalysisDeclContext *CalleeADC = ADCMgr.getContext(D);
852 
853  // The auto-synthesized bodies are essential to inline as they are
854  // usually small and commonly used. Note: we should do this check early on to
855  // ensure we always inline these calls.
856  if (CalleeADC->isBodyAutosynthesized())
857  return true;
858 
859  if (!AMgr.shouldInlineCall())
860  return false;
861 
862  // Check if this function has been marked as non-inlinable.
863  Optional<bool> MayInline = Engine.FunctionSummaries->mayInline(D);
864  if (MayInline.hasValue()) {
865  if (!MayInline.getValue())
866  return false;
867 
868  } else {
869  // We haven't actually checked the static properties of this function yet.
870  // Do that now, and record our decision in the function summaries.
871  if (mayInlineDecl(getAnalysisManager(), CalleeADC)) {
872  Engine.FunctionSummaries->markMayInline(D);
873  } else {
874  Engine.FunctionSummaries->markShouldNotInline(D);
875  return false;
876  }
877  }
878 
879  // Check if we should inline a call based on its kind.
880  // FIXME: this checks both static and dynamic properties of the call, which
881  // means we're redoing a bit of work that could be cached in the function
882  // summary.
883  CallInlinePolicy CIP = mayInlineCallKind(Call, Pred, Opts, CallOpts);
884  if (CIP != CIP_Allowed) {
885  if (CIP == CIP_DisallowedAlways) {
886  assert(!MayInline.hasValue() || MayInline.getValue());
887  Engine.FunctionSummaries->markShouldNotInline(D);
888  }
889  return false;
890  }
891 
892  const CFG *CalleeCFG = CalleeADC->getCFG();
893 
894  // Do not inline if recursive or we've reached max stack frame count.
895  bool IsRecursive = false;
896  unsigned StackDepth = 0;
897  examineStackFrames(D, Pred->getLocationContext(), IsRecursive, StackDepth);
898  if ((StackDepth >= Opts.InlineMaxStackDepth) &&
899  ((CalleeCFG->getNumBlockIDs() > Opts.getAlwaysInlineSize())
900  || IsRecursive))
901  return false;
902 
903  // Do not inline large functions too many times.
904  if ((Engine.FunctionSummaries->getNumTimesInlined(D) >
905  Opts.getMaxTimesInlineLarge()) &&
906  CalleeCFG->getNumBlockIDs() >=
908  NumReachedInlineCountMax++;
909  return false;
910  }
911 
912  if (HowToInline == Inline_Minimal &&
913  (CalleeCFG->getNumBlockIDs() > Opts.getAlwaysInlineSize()
914  || IsRecursive))
915  return false;
916 
917  return true;
918 }
919 
920 static bool isTrivialObjectAssignment(const CallEvent &Call) {
921  const CXXInstanceCall *ICall = dyn_cast<CXXInstanceCall>(&Call);
922  if (!ICall)
923  return false;
924 
925  const CXXMethodDecl *MD = dyn_cast_or_null<CXXMethodDecl>(ICall->getDecl());
926  if (!MD)
927  return false;
929  return false;
930 
931  return MD->isTrivial();
932 }
933 
935  const CallEvent &CallTemplate,
936  const EvalCallOptions &CallOpts) {
937  // Make sure we have the most recent state attached to the call.
938  ProgramStateRef State = Pred->getState();
939  CallEventRef<> Call = CallTemplate.cloneWithState(State);
940 
941  // Special-case trivial assignment operators.
942  if (isTrivialObjectAssignment(*Call)) {
943  performTrivialCopy(Bldr, Pred, *Call);
944  return;
945  }
946 
947  // Try to inline the call.
948  // The origin expression here is just used as a kind of checksum;
949  // this should still be safe even for CallEvents that don't come from exprs.
950  const Expr *E = Call->getOriginExpr();
951 
952  ProgramStateRef InlinedFailedState = getInlineFailedState(State, E);
953  if (InlinedFailedState) {
954  // If we already tried once and failed, make sure we don't retry later.
955  State = InlinedFailedState;
956  } else {
957  RuntimeDefinition RD = Call->getRuntimeDefinition();
958  const Decl *D = RD.getDecl();
959  if (shouldInlineCall(*Call, D, Pred, CallOpts)) {
960  if (RD.mayHaveOtherDefinitions()) {
962 
963  // Explore with and without inlining the call.
964  if (Options.getIPAMode() == IPAK_DynamicDispatchBifurcate) {
965  BifurcateCall(RD.getDispatchRegion(), *Call, D, Bldr, Pred);
966  return;
967  }
968 
969  // Don't inline if we're not in any dynamic dispatch mode.
970  if (Options.getIPAMode() != IPAK_DynamicDispatch) {
971  conservativeEvalCall(*Call, Bldr, Pred, State);
972  return;
973  }
974  }
975 
976  // We are not bifurcating and we do have a Decl, so just inline.
977  if (inlineCall(*Call, D, Bldr, Pred, State))
978  return;
979  }
980  }
981 
982  // If we can't inline it, handle the return value and invalidate the regions.
983  conservativeEvalCall(*Call, Bldr, Pred, State);
984 }
985 
986 void ExprEngine::BifurcateCall(const MemRegion *BifurReg,
987  const CallEvent &Call, const Decl *D,
988  NodeBuilder &Bldr, ExplodedNode *Pred) {
989  assert(BifurReg);
990  BifurReg = BifurReg->StripCasts();
991 
992  // Check if we've performed the split already - note, we only want
993  // to split the path once per memory region.
994  ProgramStateRef State = Pred->getState();
995  const unsigned *BState =
996  State->get<DynamicDispatchBifurcationMap>(BifurReg);
997  if (BState) {
998  // If we are on "inline path", keep inlining if possible.
999  if (*BState == DynamicDispatchModeInlined)
1000  if (inlineCall(Call, D, Bldr, Pred, State))
1001  return;
1002  // If inline failed, or we are on the path where we assume we
1003  // don't have enough info about the receiver to inline, conjure the
1004  // return value and invalidate the regions.
1005  conservativeEvalCall(Call, Bldr, Pred, State);
1006  return;
1007  }
1008 
1009  // If we got here, this is the first time we process a message to this
1010  // region, so split the path.
1011  ProgramStateRef IState =
1012  State->set<DynamicDispatchBifurcationMap>(BifurReg,
1013  DynamicDispatchModeInlined);
1014  inlineCall(Call, D, Bldr, Pred, IState);
1015 
1016  ProgramStateRef NoIState =
1017  State->set<DynamicDispatchBifurcationMap>(BifurReg,
1018  DynamicDispatchModeConservative);
1019  conservativeEvalCall(Call, Bldr, Pred, NoIState);
1020 
1021  NumOfDynamicDispatchPathSplits++;
1022 }
1023 
1025  ExplodedNodeSet &Dst) {
1026  ExplodedNodeSet dstPreVisit;
1027  getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, RS, *this);
1028 
1029  StmtNodeBuilder B(dstPreVisit, Dst, *currBldrCtx);
1030 
1031  if (RS->getRetValue()) {
1032  for (ExplodedNodeSet::iterator it = dstPreVisit.begin(),
1033  ei = dstPreVisit.end(); it != ei; ++it) {
1034  B.generateNode(RS, *it, (*it)->getState());
1035  }
1036  }
1037 }
AnalysisDeclContextManager & getAnalysisDeclContextManager()
Represents a function declaration or definition.
Definition: Decl.h:1716
unsigned InlineMaxStackDepth
The inlining stack depth limit.
SVal evalDerivedToBase(SVal Derived, const CastExpr *Cast)
Evaluates a chain of derived-to-base casts through the path specified in Cast.
Definition: Store.cpp:248
bool empty() const
Definition: CFG.h:714
A (possibly-)qualified type.
Definition: Type.h:655
MemRegion - The root abstract class for all memory regions.
Definition: MemRegion.h:94
CallEventRef< T > cloneWithState(ProgramStateRef NewState) const
Returns a copy of this CallEvent, but using the given state.
const CXXConstructorDecl * getDecl() const override
Definition: CallEvent.h:856
bool IsTemporaryCtorOrDtor
This call is a constructor or a destructor of a temporary value.
Definition: ExprEngine.h:106
Stmt * getBody() const
Get the body of the Declaration.
succ_iterator succ_begin()
Definition: CFG.h:751
void VisitCallExpr(const CallExpr *CE, ExplodedNode *Pred, ExplodedNodeSet &Dst)
VisitCall - Transfer function for function calls.
void processCallExit(ExplodedNode *Pred) override
Generate the sequence of nodes that simulate the call exit and the post visit for CallExpr...
Stmt - This represents one statement.
Definition: Stmt.h:66
This builder class is useful for generating nodes that resulted from visiting a statement.
Definition: CoreEngine.h:370
ProgramPoint getProgramPoint(bool IsPreVisit=false, const ProgramPointTag *Tag=nullptr) const
Returns an appropriate ProgramPoint for this call.
Definition: CallEvent.cpp:299
Decl - This represents one declaration (or definition), e.g.
Definition: DeclBase.h:86
bool mayInlineCXXTemporaryDtors()
Returns true if C++ temporary destructors should be inlined during analysis.
Represents a point when we begin processing an inlined call.
Definition: ProgramPoint.h:604
Manages the lifetime of CallEvent objects.
Definition: CallEvent.h:1050
static bool isContainerMethod(const ASTContext &Ctx, const FunctionDecl *FD)
Returns true if the given function refers to a method of a C++ container or iterator.
bool isDerivedFrom(const CXXRecordDecl *Base) const
Determine whether this class is derived from the class Base.
Hints for figuring out of a call should be inlined during evalCall().
Definition: ExprEngine.h:96
Represents a call to a C++ constructor.
Definition: ExprCXX.h:1292
bool IsArrayCtorOrDtor
This call is a constructor or a destructor for a single element within an array, a part of array cons...
Definition: ExprEngine.h:103
const NestedNameSpecifier * Specifier
CallEventRef getSimpleCall(const CallExpr *E, ProgramStateRef State, const LocationContext *LCtx)
Definition: CallEvent.cpp:1263
const ProgramStateRef & getState() const
SVal evalCast(SVal val, QualType castTy, QualType originalType)
Represents a path from a specific derived class (which is not represented as part of the path) to a p...
static bool wasDifferentDeclUsedForInlining(CallEventRef<> Call, const StackFrameContext *calleeCtx)
static std::pair< const Stmt *, const CFGBlock * > getLastStmt(const ExplodedNode *Node)
unsigned succ_size() const
Definition: CFG.h:769
const Expr * getOriginExpr() const
Returns the expression whose value will be the result of this call.
Definition: CallEvent.h:249
void takeNodes(const ExplodedNodeSet &S)
Definition: CoreEngine.h:321
ASTContext & getASTContext() const
loc::MemRegionVal getCXXThis(const CXXMethodDecl *D, const StackFrameContext *SFC)
Return a memory region for the &#39;this&#39; object reference.
static Optional< SVal > getObjectUnderConstruction(ProgramStateRef State, const ConstructionContextItem &Item, const LocationContext *LC)
By looking at a certain item that may be potentially part of an object&#39;s ConstructionContext, retrieve such object&#39;s location.
Definition: ExprEngine.cpp:443
static bool hasMember(const ASTContext &Ctx, const CXXRecordDecl *RD, StringRef Name)
Returns true if the given C++ class contains a member with the given name.
void enqueue(ExplodedNodeSet &Set)
Enqueue the given set of nodes onto the work list.
Definition: CoreEngine.cpp:520
void removeDead(ExplodedNode *Node, ExplodedNodeSet &Out, const Stmt *ReferenceStmt, const LocationContext *LC, const Stmt *DiagnosticStmt=nullptr, ProgramPoint::Kind K=ProgramPoint::PreStmtPurgeDeadSymbolsKind)
Run the analyzer&#39;s garbage collection - remove dead symbols and bindings from the state...
Definition: ExprEngine.cpp:622
bool mayInlineTemplateFunctions()
Returns whether or not templated functions may be considered for inlining.
bool isMoveAssignmentOperator() const
Determine whether this is a move assignment operator.
Definition: DeclCXX.cpp:2086
const CFGBlock * getEntry() const
Returns the entry block in the CFG for the entered function.
Definition: ProgramPoint.h:619
DeclarationName getDeclName() const
Get the actual, stored name of the declaration, which may be a special name.
Definition: Decl.h:297
One of these records is kept for each identifier that is lexed.
Holds long-lived AST nodes (such as types and decls) that can be referred to throughout the semantic ...
Definition: ASTContext.h:150
LineState State
AnalysisDeclContext contains the context data for the function or method under analysis.
const Expr * getRetValue() const
Definition: Stmt.cpp:928
static bool isInCodeFile(SourceLocation SL, const SourceManager &SM)
void addPredecessor(ExplodedNode *V, ExplodedGraph &G)
addPredeccessor - Adds a predecessor to the current node, and in tandem add this node as a successor ...
void runCheckersForPostObjCMessage(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const ObjCMethodCall &msg, ExprEngine &Eng, bool wasInlined=false)
Run checkers for post-visiting obj-c messages.
i32 captured_struct **param SharedsTy A type which contains references the shared variables *param Shareds Context with the list of shared variables from the p *TaskFunction *param Data Additional data for task generation like final * state
Optional< T > getLocationAs() const LLVM_LVALUE_FUNCTION
const StackFrameContext * getStackFrame() const
Definition: ProgramPoint.h:184
IdentifierTable & Idents
Definition: ASTContext.h:545
STATISTIC(NumOfDynamicDispatchPathSplits, "The # of times we split the path due to imprecise dynamic dispatch info")
BlockDataRegion - A region that represents a block instance.
Definition: MemRegion.h:668
Represents any expression that calls an Objective-C method.
Definition: CallEvent.h:933
virtual Kind getKind() const =0
Returns the kind of call this is.
static bool isInStdNamespace(const Decl *D)
Returns true if the root namespace of the given declaration is the &#39;std&#39; C++ namespace.
WorkList * getWorkList() const
Definition: CoreEngine.h:165
void runCheckersForPreCall(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const CallEvent &Call, ExprEngine &Eng)
Run checkers for pre-visiting obj-c messages.
CFGElement getCurrentCFGElement()
Return the CFG element corresponding to the worklist element that is currently being processed by Exp...
Definition: ExprEngine.h:644
bool mayInlineCXXContainerMethods()
Returns whether or not methods of C++ container objects may be considered for inlining.
T * getAnalysis()
Return the specified analysis object, lazily running the analysis if necessary.
const LocationContext * getLocationContext() const
const LocationContext * getParent() const
virtual const CXXConstructExpr * getOriginExpr() const
Definition: CallEvent.h:852
unsigned getMinCFGSizeTreatFunctionsAsLarge()
Returns the number of basic blocks a function needs to have to be considered large for the &#39;max-times...
void VisitReturnStmt(const ReturnStmt *R, ExplodedNode *Pred, ExplodedNodeSet &Dst)
VisitReturnStmt - Transfer function logic for return statements.
const CoreEngine & getCoreEngine() const
Definition: ExprEngine.h:392
ExplodedNode * getFirstPred()
bool hasTrivialDestructor() const
Determine whether this class has a trivial destructor (C++ [class.dtor]p3)
Definition: DeclCXX.h:1482
void runCheckersForPostCall(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const CallEvent &Call, ExprEngine &Eng, bool wasInlined=false)
Run checkers for post-visiting obj-c messages.
static bool isCXXSharedPtrDtor(const FunctionDecl *FD)
Returns true if the given function is the destructor of a class named "shared_ptr".
static bool isVariadic(const Decl *D)
Returns true if the given decl is known to be variadic.
Definition: CallEvent.cpp:407
virtual Decl * getCanonicalDecl()
Retrieves the "canonical" declaration of the given declaration.
Definition: DeclBase.h:877
lookup_result lookup(DeclarationName Name) const
lookup - Find the declarations (if any) with the given Name in this context.
Definition: DeclBase.cpp:1545
AnalysisDeclContext * getAnalysisDeclContext(const Decl *D)
CheckerManager & getCheckerManager() const
Definition: ExprEngine.h:186
void removeDeadOnEndOfFunction(NodeBuilderContext &BC, ExplodedNode *Pred, ExplodedNodeSet &Dst)
Remove dead bindings/symbols before exiting a function.
static bool isTrivialObjectAssignment(const CallEvent &Call)
ProgramStateRef bindReturnValue(const CallEvent &Call, const LocationContext *LCtx, ProgramStateRef State)
Create a new state in which the call return value is binded to the call origin expression.
Represents a non-static C++ member function call, no matter how it is written.
Definition: CallEvent.h:662
static SVal adjustReturnValue(SVal V, QualType ExpectedTy, QualType ActualTy, StoreManager &StoreMgr)
Adjusts a return value when the called function&#39;s return type does not match the caller&#39;s expression ...
DeclarationNameTable DeclarationNames
Definition: ASTContext.h:548
Represents a single basic block in a source-level CFG.
Definition: CFG.h:552
Represents a point when we finish the call exit sequence (for inlined call).
Definition: ProgramPoint.h:662
AnalysisDeclContext * getContext(const Decl *D)
void runCheckersForPostStmt(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const Stmt *S, ExprEngine &Eng, bool wasInlined=false)
Run checkers for post-visiting Stmts.
void processBeginOfFunction(NodeBuilderContext &BC, ExplodedNode *Pred, ExplodedNodeSet &Dst, const BlockEdge &L) override
Called by CoreEngine.
Expr - This represents one expression.
Definition: Expr.h:106
IPAKind getIPAMode()
Returns the inter-procedural analysis mode.
Represents a source-level, intra-procedural CFG that represents the control-flow of a Stmt...
Definition: CFG.h:1003
bool isInSystemHeader(SourceLocation Loc) const
Returns if a SourceLocation is in a system header.
const CXXRecordDecl * getPointeeCXXRecordDecl() const
If this is a pointer or reference to a RecordType, return the CXXRecordDecl that the type refers to...
Definition: Type.cpp:1612
CallEventRef getCaller(const StackFrameContext *CalleeCtx, ProgramStateRef State)
Definition: CallEvent.cpp:1284
Represents a C++ destructor within a class.
Definition: DeclCXX.h:2700
AnalyzerOptions & getAnalyzerOptions() override
This is the simplest builder which generates nodes in the ExplodedGraph.
Definition: CoreEngine.h:228
Represents C++ constructor call.
Definition: CFG.h:151
void Add(ExplodedNode *N)
Refers to regular member function and operator calls.
bool mayInlineObjCMethod()
Returns true if ObjectiveC inlining is enabled, false otherwise.
IdentifierInfo * getAsIdentifierInfo() const
getAsIdentifierInfo - Retrieve the IdentifierInfo * stored in this declaration name, or NULL if this declaration name isn&#39;t a simple identifier.
Refers to constructors (implicit or explicit).
QualType getType() const
Definition: Expr.h:128
void runCheckersForEvalCall(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const CallEvent &CE, ExprEngine &Eng)
Run checkers for evaluating a call.
ASTContext & getContext() const
getContext - Return the ASTContext associated with this analysis.
Definition: ExprEngine.h:182
Traits for storing the call processing policy inside GDM.
Definition: ExprEngine.h:817
REGISTER_TRAIT_WITH_PROGRAMSTATE(DynamicDispatchBifurcationMap, CLANG_ENTO_PROGRAMSTATE_MAP(const MemRegion *, unsigned)) bool ExprEngine
ReturnStmt - This represents a return, optionally of an expression: return; return 4;...
Definition: Stmt.h:1476
bool isBodyAutosynthesized() const
Checks if the body of the Decl is generated by the BodyFarm.
ExplodedNode * getNode(const ProgramPoint &L, ProgramStateRef State, bool IsSink=false, bool *IsNew=nullptr)
Retrieve the node associated with a (Location,State) pair, where the &#39;Location&#39; is a ProgramPoint in ...
const StackFrameContext * getStackFrame(LocationContext const *Parent, const Stmt *S, const CFGBlock *Blk, unsigned Idx)
bool isTrivial() const
Whether this function is "trivial" in some specialized C++ senses.
Definition: Decl.h:2052
Enable inlining of dynamically dispatched methods.
bool isNull() const
Return true if this QualType doesn&#39;t point to a type yet.
Definition: Type.h:720
Optional< T > getAs() const
Convert to the specified SVal type, returning None if this SVal is not of the desired type...
Definition: SVals.h:112
While alive, includes the current analysis stack in a crash trace.
CanQualType getCanonicalTypeUnqualified() const
const MemRegion * StripCasts(bool StripBaseCasts=true) const
Definition: MemRegion.cpp:1152
void runCheckersForNewAllocator(const CXXNewExpr *NE, SVal Target, ExplodedNodeSet &Dst, ExplodedNode *Pred, ExprEngine &Eng, bool wasInlined=false)
Run checkers between C++ operator new and constructor calls.
Defines the runtime definition of the called function.
Definition: CallEvent.h:128
QualType getCanonicalType() const
Definition: Type.h:5928
const FunctionDecl * getDecl() const override
Definition: CallEvent.cpp:576
IdentifierInfo & get(StringRef Name)
Return the identifier token info for the specified named identifier.
AnalysisManager & getAnalysisManager() override
Definition: ExprEngine.h:184
const MemRegion * getAsRegion() const
Definition: SVals.cpp:151
Represents a new-expression for memory allocation and constructor calls, e.g: "new CXXNewExpr(foo)"...
Definition: ExprCXX.h:1915
CallEventManager & getCallEventManager()
Definition: ProgramState.h:569
void evalCall(ExplodedNodeSet &Dst, ExplodedNode *Pred, const CallEvent &Call)
Evaluate a call, running pre- and post-call checks and allowing checkers to be responsible for handli...
bool mayInlineCXXStandardLibrary()
Returns whether or not C++ standard library functions may be considered for inlining.
Represents a static or instance method of a struct/union/class.
Definition: DeclCXX.h:2045
bool IsCtorOrDtorWithImproperlyModeledTargetRegion
This call is a constructor or a destructor for which we do not currently compute the this-region corr...
Definition: ExprEngine.h:99
ProgramPoint getLocation() const
getLocation - Returns the edge associated with the given node.
SVal - This represents a symbolic expression, which can be either an L-value or an R-value...
Definition: SVals.h:76
DeclarationName getIdentifier(const IdentifierInfo *ID)
getIdentifier - Create a declaration name that is a simple identifier.
#define CLANG_ENTO_PROGRAMSTATE_MAP(Key, Value)
Helper for registering a map trait.
const Decl * getDecl() const
bool isObjCObjectPointerType() const
Definition: Type.h:6210
Do minimal inlining of callees.
Definition: ExprEngine.h:92
unsigned getNumBlockIDs() const
Returns the total number of BlockIDs allocated (which start at 0).
Definition: CFG.h:1169
Refers to destructors (implicit or explicit).
void runCheckersForPreStmt(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const Stmt *S, ExprEngine &Eng)
Run checkers for pre-visiting Stmts.
static bool isContainerClass(const ASTContext &Ctx, const CXXRecordDecl *RD)
Returns true if the given C++ class is a container or iterator.
const MemRegion * getDispatchRegion()
When other definitions are possible, returns the region whose runtime type determines the method defi...
Definition: CallEvent.h:153
void insert(const ExplodedNodeSet &S)
Optional< T > getAs() const
Convert to the specified CFGElement type, returning None if this CFGElement is not of the desired typ...
Definition: CFG.h:110
static bool mayInlineDecl(AnalysisManager &AMgr, AnalysisDeclContext *CalleeADC)
Returns true if the function in CalleeADC may be inlined in general.
ast_type_traits::DynTypedNode Node
static ProgramStateRef getInlineFailedState(ProgramStateRef State, const Stmt *CallE)
Dataflow Directional Tag Classes.
CFG::BuildOptions & getCFGBuildOptions()
Return the build options used to construct the CFG.
StoreManager & getStoreManager()
Definition: ExprEngine.h:372
const StackFrameContext * getCalleeContext() const
Definition: ProgramPoint.h:614
DeclarationName - The name of a declaration.
const CXXRecordDecl * getParent() const
Returns the parent of this method declaration, which is the class in which this method is defined...
Definition: DeclCXX.h:2165
bool isAmbiguous(CanQualType BaseType)
Determine whether the path from the most-derived type to the given base type is ambiguous (i...
bool isCopyAssignmentOperator() const
Determine whether this is a copy-assignment operator, regardless of whether it was declared implicitl...
Definition: DeclCXX.cpp:2065
bool mayHaveOtherDefinitions()
Check if the definition we have is precise.
Definition: CallEvent.h:149
Represents an abstract call to a function or method along a particular path.
Definition: CallEvent.h:165
ProgramStateManager & getStateManager() override
Definition: ExprEngine.h:370
const Decl * getDecl() const
void defaultEvalCall(NodeBuilder &B, ExplodedNode *Pred, const CallEvent &Call, const EvalCallOptions &CallOpts={})
Default implementation of call evaluation.
bool lookupInBases(BaseMatchesCallback BaseMatches, CXXBasePaths &Paths, bool LookupInDependent=false) const
Look for entities within the base classes of this C++ class, transitively searching all base class su...
T castAs() const
Convert to the specified SVal type, asserting that this SVal is of the desired type.
Definition: SVals.h:104
bool includeTemporaryDtorsInCFG()
Returns whether or not the destructors for C++ temporary objects should be included in the CFG...
static QualType getDeclaredResultType(const Decl *D)
Returns the result type of a function or method declaration.
Definition: CallEvent.cpp:378
CXXBasePath & front()
const StackFrameContext * getStackFrame() const
Represents a base class of a C++ class.
Definition: DeclCXX.h:192
SourceManager & getSourceManager()
Definition: ASTContext.h:651
QualType getResultType() const
Returns the result type, adjusted for references.
Definition: CallEvent.cpp:70
ExplodedNode * generateNode(const ProgramPoint &PP, ProgramStateRef State, ExplodedNode *Pred)
Generates a node in the ExplodedGraph.
Definition: CoreEngine.h:281
Defines the C++ Decl subclasses, other than those for templates (found in DeclTemplate.h) and friends (in DeclFriend.h).
ConstructionContext&#39;s subclasses describe different ways of constructing an object in C++...
Represents a C++ struct/union/class.
Definition: DeclCXX.h:302
bool mayInlineCXXMemberFunction(CXXInlineableMemberKind K)
Returns the option controlling which C++ member functions will be considered for inlining.
bool IsTemporaryLifetimeExtendedViaAggregate
This call is a constructor for a temporary that is lifetime-extended by binding it to a reference-typ...
Definition: ExprEngine.h:111
pred_iterator pred_begin()
QualType getPointerType(QualType T) const
Return the uniqued reference to the type for a pointer to the specified type.
CallExpr - Represents a function call (C99 6.5.2.2, C++ [expr.call]).
Definition: Expr.h:2316
ExplodedNode * generateNode(const Stmt *S, ExplodedNode *Pred, ProgramStateRef St, const ProgramPointTag *tag=nullptr, ProgramPoint::Kind K=ProgramPoint::PostStmtKind)
Definition: CoreEngine.h:399
unsigned getMaxTimesInlineLarge()
Returns the maximum times a large function could be inlined.
ProgramStateRef invalidateRegions(unsigned BlockCount, ProgramStateRef Orig=nullptr) const
Returns a new state with all argument regions invalidated.
Definition: CallEvent.cpp:259
virtual void enqueue(const WorkListUnit &U)=0
const BlockInvocationContext * getBlockInvocationContext(const LocationContext *parent, const BlockDecl *BD, const void *ContextData)
BasePaths - Represents the set of paths from a derived class to one of its (direct or indirect) bases...
const StackFrameContext * getStackFrame() const
bool mayInlineCXXAllocator()
Returns whether or not allocator call may be considered for inlining.
AnalysisPurgeMode AnalysisPurgeOpt
Enable inlining of dynamically dispatched methods, bifurcate paths when exact type info is unavailabl...
bool mayInlineCXXSharedPtrDtor()
Returns whether or not the destructor of C++ &#39;shared_ptr&#39; may be considered for inlining.
Optional< T > getAs() const
Convert to the specified ProgramPoint type, returning None if this ProgramPoint is not of the desired...
Definition: ProgramPoint.h:152
AnalysisDeclContext * getAnalysisDeclContext() const
Represents a call to a C++ constructor.
Definition: CallEvent.h:827
const LangOptions & getLangOpts() const
Definition: ASTContext.h:696
void processCallEnter(NodeBuilderContext &BC, CallEnter CE, ExplodedNode *Pred) override
Generate the entry node of the callee.
CallEventRef< T > cloneWithState(ProgramStateRef State) const
Definition: CallEvent.h:109