clang  9.0.0
ExprEngineCallAndReturn.cpp
Go to the documentation of this file.
1 //=-- ExprEngineCallAndReturn.cpp - Support for call/return -----*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file defines ExprEngine's support for calls and returns.
10 //
11 //===----------------------------------------------------------------------===//
12 
16 #include "clang/AST/DeclCXX.h"
21 #include "llvm/ADT/SmallSet.h"
22 #include "llvm/ADT/Statistic.h"
23 #include "llvm/Support/SaveAndRestore.h"
24 
25 using namespace clang;
26 using namespace ento;
27 
28 #define DEBUG_TYPE "ExprEngine"
29 
30 STATISTIC(NumOfDynamicDispatchPathSplits,
31  "The # of times we split the path due to imprecise dynamic dispatch info");
32 
33 STATISTIC(NumInlinedCalls,
34  "The # of times we inlined a call");
35 
36 STATISTIC(NumReachedInlineCountMax,
37  "The # of times we reached inline count maximum");
38 
40  ExplodedNode *Pred) {
41  // Get the entry block in the CFG of the callee.
42  const StackFrameContext *calleeCtx = CE.getCalleeContext();
43  PrettyStackTraceLocationContext CrashInfo(calleeCtx);
44  const CFGBlock *Entry = CE.getEntry();
45 
46  // Validate the CFG.
47  assert(Entry->empty());
48  assert(Entry->succ_size() == 1);
49 
50  // Get the solitary successor.
51  const CFGBlock *Succ = *(Entry->succ_begin());
52 
53  // Construct an edge representing the starting location in the callee.
54  BlockEdge Loc(Entry, Succ, calleeCtx);
55 
56  ProgramStateRef state = Pred->getState();
57 
58  // Construct a new node, notify checkers that analysis of the function has
59  // begun, and add the resultant nodes to the worklist.
60  bool isNew;
61  ExplodedNode *Node = G.getNode(Loc, state, false, &isNew);
62  Node->addPredecessor(Pred, G);
63  if (isNew) {
64  ExplodedNodeSet DstBegin;
65  processBeginOfFunction(BC, Node, DstBegin, Loc);
66  Engine.enqueue(DstBegin);
67  }
68 }
69 
70 // Find the last statement on the path to the exploded node and the
71 // corresponding Block.
72 static std::pair<const Stmt*,
73  const CFGBlock*> getLastStmt(const ExplodedNode *Node) {
74  const Stmt *S = nullptr;
75  const CFGBlock *Blk = nullptr;
76  const StackFrameContext *SF = Node->getStackFrame();
77 
78  // Back up through the ExplodedGraph until we reach a statement node in this
79  // stack frame.
80  while (Node) {
81  const ProgramPoint &PP = Node->getLocation();
82 
83  if (PP.getStackFrame() == SF) {
84  if (Optional<StmtPoint> SP = PP.getAs<StmtPoint>()) {
85  S = SP->getStmt();
86  break;
87  } else if (Optional<CallExitEnd> CEE = PP.getAs<CallExitEnd>()) {
88  S = CEE->getCalleeContext()->getCallSite();
89  if (S)
90  break;
91 
92  // If there is no statement, this is an implicitly-generated call.
93  // We'll walk backwards over it and then continue the loop to find
94  // an actual statement.
96  do {
97  Node = Node->getFirstPred();
98  CE = Node->getLocationAs<CallEnter>();
99  } while (!CE || CE->getCalleeContext() != CEE->getCalleeContext());
100 
101  // Continue searching the graph.
102  } else if (Optional<BlockEdge> BE = PP.getAs<BlockEdge>()) {
103  Blk = BE->getSrc();
104  }
105  } else if (Optional<CallEnter> CE = PP.getAs<CallEnter>()) {
106  // If we reached the CallEnter for this function, it has no statements.
107  if (CE->getCalleeContext() == SF)
108  break;
109  }
110 
111  if (Node->pred_empty())
112  return std::make_pair(nullptr, nullptr);
113 
114  Node = *Node->pred_begin();
115  }
116 
117  return std::make_pair(S, Blk);
118 }
119 
120 /// Adjusts a return value when the called function's return type does not
121 /// match the caller's expression type. This can happen when a dynamic call
122 /// is devirtualized, and the overriding method has a covariant (more specific)
123 /// return type than the parent's method. For C++ objects, this means we need
124 /// to add base casts.
125 static SVal adjustReturnValue(SVal V, QualType ExpectedTy, QualType ActualTy,
126  StoreManager &StoreMgr) {
127  // For now, the only adjustments we handle apply only to locations.
128  if (!V.getAs<Loc>())
129  return V;
130 
131  // If the types already match, don't do any unnecessary work.
132  ExpectedTy = ExpectedTy.getCanonicalType();
133  ActualTy = ActualTy.getCanonicalType();
134  if (ExpectedTy == ActualTy)
135  return V;
136 
137  // No adjustment is needed between Objective-C pointer types.
138  if (ExpectedTy->isObjCObjectPointerType() &&
139  ActualTy->isObjCObjectPointerType())
140  return V;
141 
142  // C++ object pointers may need "derived-to-base" casts.
143  const CXXRecordDecl *ExpectedClass = ExpectedTy->getPointeeCXXRecordDecl();
144  const CXXRecordDecl *ActualClass = ActualTy->getPointeeCXXRecordDecl();
145  if (ExpectedClass && ActualClass) {
146  CXXBasePaths Paths(/*FindAmbiguities=*/true, /*RecordPaths=*/true,
147  /*DetectVirtual=*/false);
148  if (ActualClass->isDerivedFrom(ExpectedClass, Paths) &&
149  !Paths.isAmbiguous(ActualTy->getCanonicalTypeUnqualified())) {
150  return StoreMgr.evalDerivedToBase(V, Paths.front());
151  }
152  }
153 
154  // Unfortunately, Objective-C does not enforce that overridden methods have
155  // covariant return types, so we can't assert that that never happens.
156  // Be safe and return UnknownVal().
157  return UnknownVal();
158 }
159 
161  ExplodedNode *Pred,
162  ExplodedNodeSet &Dst) {
163  // Find the last statement in the function and the corresponding basic block.
164  const Stmt *LastSt = nullptr;
165  const CFGBlock *Blk = nullptr;
166  std::tie(LastSt, Blk) = getLastStmt(Pred);
167  if (!Blk || !LastSt) {
168  Dst.Add(Pred);
169  return;
170  }
171 
172  // Here, we destroy the current location context. We use the current
173  // function's entire body as a diagnostic statement, with which the program
174  // point will be associated. However, we only want to use LastStmt as a
175  // reference for what to clean up if it's a ReturnStmt; otherwise, everything
176  // is dead.
177  SaveAndRestore<const NodeBuilderContext *> NodeContextRAII(currBldrCtx, &BC);
178  const LocationContext *LCtx = Pred->getLocationContext();
179  removeDead(Pred, Dst, dyn_cast<ReturnStmt>(LastSt), LCtx,
180  LCtx->getAnalysisDeclContext()->getBody(),
182 }
183 
184 static bool wasDifferentDeclUsedForInlining(CallEventRef<> Call,
185  const StackFrameContext *calleeCtx) {
186  const Decl *RuntimeCallee = calleeCtx->getDecl();
187  const Decl *StaticDecl = Call->getDecl();
188  assert(RuntimeCallee);
189  if (!StaticDecl)
190  return true;
191  return RuntimeCallee->getCanonicalDecl() != StaticDecl->getCanonicalDecl();
192 }
193 
194 /// The call exit is simulated with a sequence of nodes, which occur between
195 /// CallExitBegin and CallExitEnd. The following operations occur between the
196 /// two program points:
197 /// 1. CallExitBegin (triggers the start of call exit sequence)
198 /// 2. Bind the return value
199 /// 3. Run Remove dead bindings to clean up the dead symbols from the callee.
200 /// 4. CallExitEnd (switch to the caller context)
201 /// 5. PostStmt<CallExpr>
203  // Step 1 CEBNode was generated before the call.
205  const StackFrameContext *calleeCtx = CEBNode->getStackFrame();
206 
207  // The parent context might not be a stack frame, so make sure we
208  // look up the first enclosing stack frame.
209  const StackFrameContext *callerCtx =
210  calleeCtx->getParent()->getStackFrame();
211 
212  const Stmt *CE = calleeCtx->getCallSite();
213  ProgramStateRef state = CEBNode->getState();
214  // Find the last statement in the function and the corresponding basic block.
215  const Stmt *LastSt = nullptr;
216  const CFGBlock *Blk = nullptr;
217  std::tie(LastSt, Blk) = getLastStmt(CEBNode);
218 
219  // Generate a CallEvent /before/ cleaning the state, so that we can get the
220  // correct value for 'this' (if necessary).
222  CallEventRef<> Call = CEMgr.getCaller(calleeCtx, state);
223 
224  // Step 2: generate node with bound return value: CEBNode -> BindedRetNode.
225 
226  // If the callee returns an expression, bind its value to CallExpr.
227  if (CE) {
228  if (const ReturnStmt *RS = dyn_cast_or_null<ReturnStmt>(LastSt)) {
229  const LocationContext *LCtx = CEBNode->getLocationContext();
230  SVal V = state->getSVal(RS, LCtx);
231 
232  // Ensure that the return type matches the type of the returned Expr.
233  if (wasDifferentDeclUsedForInlining(Call, calleeCtx)) {
234  QualType ReturnedTy =
235  CallEvent::getDeclaredResultType(calleeCtx->getDecl());
236  if (!ReturnedTy.isNull()) {
237  if (const Expr *Ex = dyn_cast<Expr>(CE)) {
238  V = adjustReturnValue(V, Ex->getType(), ReturnedTy,
239  getStoreManager());
240  }
241  }
242  }
243 
244  state = state->BindExpr(CE, callerCtx, V);
245  }
246 
247  // Bind the constructed object value to CXXConstructExpr.
248  if (const CXXConstructExpr *CCE = dyn_cast<CXXConstructExpr>(CE)) {
249  loc::MemRegionVal This =
250  svalBuilder.getCXXThis(CCE->getConstructor()->getParent(), calleeCtx);
251  SVal ThisV = state->getSVal(This);
252  ThisV = state->getSVal(ThisV.castAs<Loc>());
253  state = state->BindExpr(CCE, callerCtx, ThisV);
254  }
255 
256  if (const auto *CNE = dyn_cast<CXXNewExpr>(CE)) {
257  // We are currently evaluating a CXXNewAllocator CFGElement. It takes a
258  // while to reach the actual CXXNewExpr element from here, so keep the
259  // region for later use.
260  // Additionally cast the return value of the inlined operator new
261  // (which is of type 'void *') to the correct object type.
262  SVal AllocV = state->getSVal(CNE, callerCtx);
263  AllocV = svalBuilder.evalCast(
264  AllocV, CNE->getType(),
265  getContext().getPointerType(getContext().VoidTy));
266 
267  state = addObjectUnderConstruction(state, CNE, calleeCtx->getParent(),
268  AllocV);
269  }
270  }
271 
272  // Step 3: BindedRetNode -> CleanedNodes
273  // If we can find a statement and a block in the inlined function, run remove
274  // dead bindings before returning from the call. This is important to ensure
275  // that we report the issues such as leaks in the stack contexts in which
276  // they occurred.
277  ExplodedNodeSet CleanedNodes;
278  if (LastSt && Blk && AMgr.options.AnalysisPurgeOpt != PurgeNone) {
279  static SimpleProgramPointTag retValBind("ExprEngine", "Bind Return Value");
280  PostStmt Loc(LastSt, calleeCtx, &retValBind);
281  bool isNew;
282  ExplodedNode *BindedRetNode = G.getNode(Loc, state, false, &isNew);
283  BindedRetNode->addPredecessor(CEBNode, G);
284  if (!isNew)
285  return;
286 
287  NodeBuilderContext Ctx(getCoreEngine(), Blk, BindedRetNode);
288  currBldrCtx = &Ctx;
289  // Here, we call the Symbol Reaper with 0 statement and callee location
290  // context, telling it to clean up everything in the callee's context
291  // (and its children). We use the callee's function body as a diagnostic
292  // statement, with which the program point will be associated.
293  removeDead(BindedRetNode, CleanedNodes, nullptr, calleeCtx,
294  calleeCtx->getAnalysisDeclContext()->getBody(),
296  currBldrCtx = nullptr;
297  } else {
298  CleanedNodes.Add(CEBNode);
299  }
300 
301  for (ExplodedNodeSet::iterator I = CleanedNodes.begin(),
302  E = CleanedNodes.end(); I != E; ++I) {
303 
304  // Step 4: Generate the CallExit and leave the callee's context.
305  // CleanedNodes -> CEENode
306  CallExitEnd Loc(calleeCtx, callerCtx);
307  bool isNew;
308  ProgramStateRef CEEState = (*I == CEBNode) ? state : (*I)->getState();
309 
310  ExplodedNode *CEENode = G.getNode(Loc, CEEState, false, &isNew);
311  CEENode->addPredecessor(*I, G);
312  if (!isNew)
313  return;
314 
315  // Step 5: Perform the post-condition check of the CallExpr and enqueue the
316  // result onto the work list.
317  // CEENode -> Dst -> WorkList
318  NodeBuilderContext Ctx(Engine, calleeCtx->getCallSiteBlock(), CEENode);
319  SaveAndRestore<const NodeBuilderContext*> NBCSave(currBldrCtx,
320  &Ctx);
321  SaveAndRestore<unsigned> CBISave(currStmtIdx, calleeCtx->getIndex());
322 
323  CallEventRef<> UpdatedCall = Call.cloneWithState(CEEState);
324 
325  ExplodedNodeSet DstPostCall;
326  if (const CXXNewExpr *CNE = dyn_cast_or_null<CXXNewExpr>(CE)) {
327  ExplodedNodeSet DstPostPostCallCallback;
328  getCheckerManager().runCheckersForPostCall(DstPostPostCallCallback,
329  CEENode, *UpdatedCall, *this,
330  /*wasInlined=*/true);
331  for (auto I : DstPostPostCallCallback) {
333  CNE,
334  *getObjectUnderConstruction(I->getState(), CNE,
335  calleeCtx->getParent()),
336  DstPostCall, I, *this,
337  /*wasInlined=*/true);
338  }
339  } else {
340  getCheckerManager().runCheckersForPostCall(DstPostCall, CEENode,
341  *UpdatedCall, *this,
342  /*wasInlined=*/true);
343  }
344  ExplodedNodeSet Dst;
345  if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(Call)) {
346  getCheckerManager().runCheckersForPostObjCMessage(Dst, DstPostCall, *Msg,
347  *this,
348  /*wasInlined=*/true);
349  } else if (CE &&
350  !(isa<CXXNewExpr>(CE) && // Called when visiting CXXNewExpr.
351  AMgr.getAnalyzerOptions().MayInlineCXXAllocator)) {
352  getCheckerManager().runCheckersForPostStmt(Dst, DstPostCall, CE,
353  *this, /*wasInlined=*/true);
354  } else {
355  Dst.insert(DstPostCall);
356  }
357 
358  // Enqueue the next element in the block.
359  for (ExplodedNodeSet::iterator PSI = Dst.begin(), PSE = Dst.end();
360  PSI != PSE; ++PSI) {
361  Engine.getWorkList()->enqueue(*PSI, calleeCtx->getCallSiteBlock(),
362  calleeCtx->getIndex()+1);
363  }
364  }
365 }
366 
367 bool ExprEngine::isSmall(AnalysisDeclContext *ADC) const {
368  // When there are no branches in the function, it means that there's no
369  // exponential complexity introduced by inlining such function.
370  // Such functions also don't trigger various fundamental problems
371  // with our inlining mechanism, such as the problem of
372  // inlined defensive checks. Hence isLinear().
373  const CFG *Cfg = ADC->getCFG();
374  return Cfg->isLinear() || Cfg->size() <= AMgr.options.AlwaysInlineSize;
375 }
376 
377 bool ExprEngine::isLarge(AnalysisDeclContext *ADC) const {
378  const CFG *Cfg = ADC->getCFG();
379  return Cfg->size() >= AMgr.options.MinCFGSizeTreatFunctionsAsLarge;
380 }
381 
382 bool ExprEngine::isHuge(AnalysisDeclContext *ADC) const {
383  const CFG *Cfg = ADC->getCFG();
384  return Cfg->getNumBlockIDs() > AMgr.options.MaxInlinableSize;
385 }
386 
387 void ExprEngine::examineStackFrames(const Decl *D, const LocationContext *LCtx,
388  bool &IsRecursive, unsigned &StackDepth) {
389  IsRecursive = false;
390  StackDepth = 0;
391 
392  while (LCtx) {
393  if (const StackFrameContext *SFC = dyn_cast<StackFrameContext>(LCtx)) {
394  const Decl *DI = SFC->getDecl();
395 
396  // Mark recursive (and mutually recursive) functions and always count
397  // them when measuring the stack depth.
398  if (DI == D) {
399  IsRecursive = true;
400  ++StackDepth;
401  LCtx = LCtx->getParent();
402  continue;
403  }
404 
405  // Do not count the small functions when determining the stack depth.
406  AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(DI);
407  if (!isSmall(CalleeADC))
408  ++StackDepth;
409  }
410  LCtx = LCtx->getParent();
411  }
412 }
413 
414 // The GDM component containing the dynamic dispatch bifurcation info. When
415 // the exact type of the receiver is not known, we want to explore both paths -
416 // one on which we do inline it and the other one on which we don't. This is
417 // done to ensure we do not drop coverage.
418 // This is the map from the receiver region to a bool, specifying either we
419 // consider this region's information precise or not along the given path.
420 namespace {
422  DynamicDispatchModeInlined = 1,
423  DynamicDispatchModeConservative
424  };
425 } // end anonymous namespace
426 
427 REGISTER_MAP_WITH_PROGRAMSTATE(DynamicDispatchBifurcationMap,
428  const MemRegion *, unsigned)
429 
430 bool ExprEngine::inlineCall(const CallEvent &Call, const Decl *D,
431  NodeBuilder &Bldr, ExplodedNode *Pred,
433  assert(D);
434 
435  const LocationContext *CurLC = Pred->getLocationContext();
436  const StackFrameContext *CallerSFC = CurLC->getStackFrame();
437  const LocationContext *ParentOfCallee = CallerSFC;
438  if (Call.getKind() == CE_Block &&
439  !cast<BlockCall>(Call).isConversionFromLambda()) {
440  const BlockDataRegion *BR = cast<BlockCall>(Call).getBlockRegion();
441  assert(BR && "If we have the block definition we should have its region");
442  AnalysisDeclContext *BlockCtx = AMgr.getAnalysisDeclContext(D);
443  ParentOfCallee = BlockCtx->getBlockInvocationContext(CallerSFC,
444  cast<BlockDecl>(D),
445  BR);
446  }
447 
448  // This may be NULL, but that's fine.
449  const Expr *CallE = Call.getOriginExpr();
450 
451  // Construct a new stack frame for the callee.
452  AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(D);
453  const StackFrameContext *CalleeSFC =
454  CalleeADC->getStackFrame(ParentOfCallee, CallE,
455  currBldrCtx->getBlock(),
456  currStmtIdx);
457 
458  CallEnter Loc(CallE, CalleeSFC, CurLC);
459 
460  // Construct a new state which contains the mapping from actual to
461  // formal arguments.
462  State = State->enterStackFrame(Call, CalleeSFC);
463 
464  bool isNew;
465  if (ExplodedNode *N = G.getNode(Loc, State, false, &isNew)) {
466  N->addPredecessor(Pred, G);
467  if (isNew)
468  Engine.getWorkList()->enqueue(N);
469  }
470 
471  // If we decided to inline the call, the successor has been manually
472  // added onto the work list so remove it from the node builder.
473  Bldr.takeNodes(Pred);
474 
475  NumInlinedCalls++;
476  Engine.FunctionSummaries->bumpNumTimesInlined(D);
477 
478  // Mark the decl as visited.
479  if (VisitedCallees)
480  VisitedCallees->insert(D);
481 
482  return true;
483 }
484 
486  const Stmt *CallE) {
487  const void *ReplayState = State->get<ReplayWithoutInlining>();
488  if (!ReplayState)
489  return nullptr;
490 
491  assert(ReplayState == CallE && "Backtracked to the wrong call.");
492  (void)CallE;
493 
494  return State->remove<ReplayWithoutInlining>();
495 }
496 
498  ExplodedNodeSet &dst) {
499  // Perform the previsit of the CallExpr.
500  ExplodedNodeSet dstPreVisit;
501  getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, CE, *this);
502 
503  // Get the call in its initial state. We use this as a template to perform
504  // all the checks.
506  CallEventRef<> CallTemplate
507  = CEMgr.getSimpleCall(CE, Pred->getState(), Pred->getLocationContext());
508 
509  // Evaluate the function call. We try each of the checkers
510  // to see if the can evaluate the function call.
511  ExplodedNodeSet dstCallEvaluated;
512  for (ExplodedNodeSet::iterator I = dstPreVisit.begin(), E = dstPreVisit.end();
513  I != E; ++I) {
514  evalCall(dstCallEvaluated, *I, *CallTemplate);
515  }
516 
517  // Finally, perform the post-condition check of the CallExpr and store
518  // the created nodes in 'Dst'.
519  // Note that if the call was inlined, dstCallEvaluated will be empty.
520  // The post-CallExpr check will occur in processCallExit.
521  getCheckerManager().runCheckersForPostStmt(dst, dstCallEvaluated, CE,
522  *this);
523 }
524 
525 ProgramStateRef ExprEngine::finishArgumentConstruction(ProgramStateRef State,
526  const CallEvent &Call) {
527  const Expr *E = Call.getOriginExpr();
528  // FIXME: Constructors to placement arguments of operator new
529  // are not supported yet.
530  if (!E || isa<CXXNewExpr>(E))
531  return State;
532 
533  const LocationContext *LC = Call.getLocationContext();
534  for (unsigned CallI = 0, CallN = Call.getNumArgs(); CallI != CallN; ++CallI) {
535  unsigned I = Call.getASTArgumentIndex(CallI);
536  if (Optional<SVal> V =
537  getObjectUnderConstruction(State, {E, I}, LC)) {
538  SVal VV = *V;
539  (void)VV;
540  assert(cast<VarRegion>(VV.castAs<loc::MemRegionVal>().getRegion())
541  ->getStackFrame()->getParent()
542  ->getStackFrame() == LC->getStackFrame());
543  State = finishObjectConstruction(State, {E, I}, LC);
544  }
545  }
546 
547  return State;
548 }
549 
550 void ExprEngine::finishArgumentConstruction(ExplodedNodeSet &Dst,
551  ExplodedNode *Pred,
552  const CallEvent &Call) {
553  ProgramStateRef State = Pred->getState();
554  ProgramStateRef CleanedState = finishArgumentConstruction(State, Call);
555  if (CleanedState == State) {
556  Dst.insert(Pred);
557  return;
558  }
559 
560  const Expr *E = Call.getOriginExpr();
561  const LocationContext *LC = Call.getLocationContext();
562  NodeBuilder B(Pred, Dst, *currBldrCtx);
563  static SimpleProgramPointTag Tag("ExprEngine",
564  "Finish argument construction");
565  PreStmt PP(E, LC, &Tag);
566  B.generateNode(PP, CleanedState, Pred);
567 }
568 
570  const CallEvent &Call) {
571  // WARNING: At this time, the state attached to 'Call' may be older than the
572  // state in 'Pred'. This is a minor optimization since CheckerManager will
573  // use an updated CallEvent instance when calling checkers, but if 'Call' is
574  // ever used directly in this function all callers should be updated to pass
575  // the most recent state. (It is probably not worth doing the work here since
576  // for some callers this will not be necessary.)
577 
578  // Run any pre-call checks using the generic call interface.
579  ExplodedNodeSet dstPreVisit;
580  getCheckerManager().runCheckersForPreCall(dstPreVisit, Pred,
581  Call, *this);
582 
583  // Actually evaluate the function call. We try each of the checkers
584  // to see if the can evaluate the function call, and get a callback at
585  // defaultEvalCall if all of them fail.
586  ExplodedNodeSet dstCallEvaluated;
587  getCheckerManager().runCheckersForEvalCall(dstCallEvaluated, dstPreVisit,
588  Call, *this);
589 
590  // If there were other constructors called for object-type arguments
591  // of this call, clean them up.
592  ExplodedNodeSet dstArgumentCleanup;
593  for (auto I : dstCallEvaluated)
594  finishArgumentConstruction(dstArgumentCleanup, I, Call);
595 
596  // Finally, run any post-call checks.
597  getCheckerManager().runCheckersForPostCall(Dst, dstArgumentCleanup,
598  Call, *this);
599 }
600 
602  const LocationContext *LCtx,
603  ProgramStateRef State) {
604  const Expr *E = Call.getOriginExpr();
605  if (!E)
606  return State;
607 
608  // Some method families have known return values.
609  if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(&Call)) {
610  switch (Msg->getMethodFamily()) {
611  default:
612  break;
613  case OMF_autorelease:
614  case OMF_retain:
615  case OMF_self: {
616  // These methods return their receivers.
617  return State->BindExpr(E, LCtx, Msg->getReceiverSVal());
618  }
619  }
620  } else if (const CXXConstructorCall *C = dyn_cast<CXXConstructorCall>(&Call)){
621  SVal ThisV = C->getCXXThisVal();
622  ThisV = State->getSVal(ThisV.castAs<Loc>());
623  return State->BindExpr(E, LCtx, ThisV);
624  }
625 
626  SVal R;
627  QualType ResultTy = Call.getResultType();
628  unsigned Count = currBldrCtx->blockCount();
629  if (auto RTC = getCurrentCFGElement().getAs<CFGCXXRecordTypedCall>()) {
630  // Conjure a temporary if the function returns an object by value.
631  SVal Target;
632  assert(RTC->getStmt() == Call.getOriginExpr());
633  EvalCallOptions CallOpts; // FIXME: We won't really need those.
634  std::tie(State, Target) =
635  prepareForObjectConstruction(Call.getOriginExpr(), State, LCtx,
636  RTC->getConstructionContext(), CallOpts);
637  const MemRegion *TargetR = Target.getAsRegion();
638  assert(TargetR);
639  // Invalidate the region so that it didn't look uninitialized. If this is
640  // a field or element constructor, we do not want to invalidate
641  // the whole structure. Pointer escape is meaningless because
642  // the structure is a product of conservative evaluation
643  // and therefore contains nothing interesting at this point.
645  ITraits.setTrait(TargetR,
647  State = State->invalidateRegions(TargetR, E, Count, LCtx,
648  /* CausesPointerEscape=*/false, nullptr,
649  &Call, &ITraits);
650 
651  R = State->getSVal(Target.castAs<Loc>(), E->getType());
652  } else {
653  // Conjure a symbol if the return value is unknown.
654 
655  // See if we need to conjure a heap pointer instead of
656  // a regular unknown pointer.
657  bool IsHeapPointer = false;
658  if (const auto *CNE = dyn_cast<CXXNewExpr>(E))
659  if (CNE->getOperatorNew()->isReplaceableGlobalAllocationFunction()) {
660  // FIXME: Delegate this to evalCall in MallocChecker?
661  IsHeapPointer = true;
662  }
663 
664  R = IsHeapPointer ? svalBuilder.getConjuredHeapSymbolVal(E, LCtx, Count)
665  : svalBuilder.conjureSymbolVal(nullptr, E, LCtx, ResultTy,
666  Count);
667  }
668  return State->BindExpr(E, LCtx, R);
669 }
670 
671 // Conservatively evaluate call by invalidating regions and binding
672 // a conjured return value.
673 void ExprEngine::conservativeEvalCall(const CallEvent &Call, NodeBuilder &Bldr,
674  ExplodedNode *Pred,
675  ProgramStateRef State) {
676  State = Call.invalidateRegions(currBldrCtx->blockCount(), State);
677  State = bindReturnValue(Call, Pred->getLocationContext(), State);
678 
679  // And make the result node.
680  Bldr.generateNode(Call.getProgramPoint(), State, Pred);
681 }
682 
683 ExprEngine::CallInlinePolicy
684 ExprEngine::mayInlineCallKind(const CallEvent &Call, const ExplodedNode *Pred,
685  AnalyzerOptions &Opts,
686  const ExprEngine::EvalCallOptions &CallOpts) {
687  const LocationContext *CurLC = Pred->getLocationContext();
688  const StackFrameContext *CallerSFC = CurLC->getStackFrame();
689  switch (Call.getKind()) {
690  case CE_Function:
691  case CE_Block:
692  break;
693  case CE_CXXMember:
696  return CIP_DisallowedAlways;
697  break;
698  case CE_CXXConstructor: {
700  return CIP_DisallowedAlways;
701 
702  const CXXConstructorCall &Ctor = cast<CXXConstructorCall>(Call);
703 
704  const CXXConstructExpr *CtorExpr = Ctor.getOriginExpr();
705 
706  auto CCE = getCurrentCFGElement().getAs<CFGConstructor>();
707  const ConstructionContext *CC = CCE ? CCE->getConstructionContext()
708  : nullptr;
709 
710  if (CC && isa<NewAllocatedObjectConstructionContext>(CC) &&
711  !Opts.MayInlineCXXAllocator)
712  return CIP_DisallowedOnce;
713 
714  // FIXME: We don't handle constructors or destructors for arrays properly.
715  // Even once we do, we still need to be careful about implicitly-generated
716  // initializers for array fields in default move/copy constructors.
717  // We still allow construction into ElementRegion targets when they don't
718  // represent array elements.
719  if (CallOpts.IsArrayCtorOrDtor)
720  return CIP_DisallowedOnce;
721 
722  // Inlining constructors requires including initializers in the CFG.
723  const AnalysisDeclContext *ADC = CallerSFC->getAnalysisDeclContext();
724  assert(ADC->getCFGBuildOptions().AddInitializers && "No CFG initializers");
725  (void)ADC;
726 
727  // If the destructor is trivial, it's always safe to inline the constructor.
728  if (Ctor.getDecl()->getParent()->hasTrivialDestructor())
729  break;
730 
731  // For other types, only inline constructors if destructor inlining is
732  // also enabled.
734  return CIP_DisallowedAlways;
735 
736  if (CtorExpr->getConstructionKind() == CXXConstructExpr::CK_Complete) {
737  // If we don't handle temporary destructors, we shouldn't inline
738  // their constructors.
739  if (CallOpts.IsTemporaryCtorOrDtor &&
740  !Opts.ShouldIncludeTemporaryDtorsInCFG)
741  return CIP_DisallowedOnce;
742 
743  // If we did not find the correct this-region, it would be pointless
744  // to inline the constructor. Instead we will simply invalidate
745  // the fake temporary target.
747  return CIP_DisallowedOnce;
748 
749  // If the temporary is lifetime-extended by binding it to a reference-type
750  // field within an aggregate, automatic destructors don't work properly.
752  return CIP_DisallowedOnce;
753  }
754 
755  break;
756  }
757  case CE_CXXDestructor: {
759  return CIP_DisallowedAlways;
760 
761  // Inlining destructors requires building the CFG correctly.
762  const AnalysisDeclContext *ADC = CallerSFC->getAnalysisDeclContext();
763  assert(ADC->getCFGBuildOptions().AddImplicitDtors && "No CFG destructors");
764  (void)ADC;
765 
766  // FIXME: We don't handle constructors or destructors for arrays properly.
767  if (CallOpts.IsArrayCtorOrDtor)
768  return CIP_DisallowedOnce;
769 
770  // Allow disabling temporary destructor inlining with a separate option.
771  if (CallOpts.IsTemporaryCtorOrDtor &&
772  !Opts.MayInlineCXXTemporaryDtors)
773  return CIP_DisallowedOnce;
774 
775  // If we did not find the correct this-region, it would be pointless
776  // to inline the destructor. Instead we will simply invalidate
777  // the fake temporary target.
779  return CIP_DisallowedOnce;
780  break;
781  }
782  case CE_CXXAllocator:
783  if (Opts.MayInlineCXXAllocator)
784  break;
785  // Do not inline allocators until we model deallocators.
786  // This is unfortunate, but basically necessary for smart pointers and such.
787  return CIP_DisallowedAlways;
788  case CE_ObjCMessage:
789  if (!Opts.MayInlineObjCMethod)
790  return CIP_DisallowedAlways;
791  if (!(Opts.getIPAMode() == IPAK_DynamicDispatch ||
793  return CIP_DisallowedAlways;
794  break;
795  }
796 
797  return CIP_Allowed;
798 }
799 
800 /// Returns true if the given C++ class contains a member with the given name.
801 static bool hasMember(const ASTContext &Ctx, const CXXRecordDecl *RD,
802  StringRef Name) {
803  const IdentifierInfo &II = Ctx.Idents.get(Name);
804  DeclarationName DeclName = Ctx.DeclarationNames.getIdentifier(&II);
805  if (!RD->lookup(DeclName).empty())
806  return true;
807 
808  CXXBasePaths Paths(false, false, false);
809  if (RD->lookupInBases(
810  [DeclName](const CXXBaseSpecifier *Specifier, CXXBasePath &Path) {
811  return CXXRecordDecl::FindOrdinaryMember(Specifier, Path, DeclName);
812  },
813  Paths))
814  return true;
815 
816  return false;
817 }
818 
819 /// Returns true if the given C++ class is a container or iterator.
820 ///
821 /// Our heuristic for this is whether it contains a method named 'begin()' or a
822 /// nested type named 'iterator' or 'iterator_category'.
823 static bool isContainerClass(const ASTContext &Ctx, const CXXRecordDecl *RD) {
824  return hasMember(Ctx, RD, "begin") ||
825  hasMember(Ctx, RD, "iterator") ||
826  hasMember(Ctx, RD, "iterator_category");
827 }
828 
829 /// Returns true if the given function refers to a method of a C++ container
830 /// or iterator.
831 ///
832 /// We generally do a poor job modeling most containers right now, and might
833 /// prefer not to inline their methods.
834 static bool isContainerMethod(const ASTContext &Ctx,
835  const FunctionDecl *FD) {
836  if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(FD))
837  return isContainerClass(Ctx, MD->getParent());
838  return false;
839 }
840 
841 /// Returns true if the given function is the destructor of a class named
842 /// "shared_ptr".
843 static bool isCXXSharedPtrDtor(const FunctionDecl *FD) {
844  const CXXDestructorDecl *Dtor = dyn_cast<CXXDestructorDecl>(FD);
845  if (!Dtor)
846  return false;
847 
848  const CXXRecordDecl *RD = Dtor->getParent();
849  if (const IdentifierInfo *II = RD->getDeclName().getAsIdentifierInfo())
850  if (II->isStr("shared_ptr"))
851  return true;
852 
853  return false;
854 }
855 
856 /// Returns true if the function in \p CalleeADC may be inlined in general.
857 ///
858 /// This checks static properties of the function, such as its signature and
859 /// CFG, to determine whether the analyzer should ever consider inlining it,
860 /// in any context.
861 bool ExprEngine::mayInlineDecl(AnalysisDeclContext *CalleeADC) const {
862  AnalyzerOptions &Opts = AMgr.getAnalyzerOptions();
863  // FIXME: Do not inline variadic calls.
864  if (CallEvent::isVariadic(CalleeADC->getDecl()))
865  return false;
866 
867  // Check certain C++-related inlining policies.
868  ASTContext &Ctx = CalleeADC->getASTContext();
869  if (Ctx.getLangOpts().CPlusPlus) {
870  if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(CalleeADC->getDecl())) {
871  // Conditionally control the inlining of template functions.
872  if (!Opts.MayInlineTemplateFunctions)
873  if (FD->getTemplatedKind() != FunctionDecl::TK_NonTemplate)
874  return false;
875 
876  // Conditionally control the inlining of C++ standard library functions.
877  if (!Opts.MayInlineCXXStandardLibrary)
878  if (Ctx.getSourceManager().isInSystemHeader(FD->getLocation()))
880  return false;
881 
882  // Conditionally control the inlining of methods on objects that look
883  // like C++ containers.
884  if (!Opts.MayInlineCXXContainerMethods)
885  if (!AMgr.isInCodeFile(FD->getLocation()))
886  if (isContainerMethod(Ctx, FD))
887  return false;
888 
889  // Conditionally control the inlining of the destructor of C++ shared_ptr.
890  // We don't currently do a good job modeling shared_ptr because we can't
891  // see the reference count, so treating as opaque is probably the best
892  // idea.
893  if (!Opts.MayInlineCXXSharedPtrDtor)
894  if (isCXXSharedPtrDtor(FD))
895  return false;
896  }
897  }
898 
899  // It is possible that the CFG cannot be constructed.
900  // Be safe, and check if the CalleeCFG is valid.
901  const CFG *CalleeCFG = CalleeADC->getCFG();
902  if (!CalleeCFG)
903  return false;
904 
905  // Do not inline large functions.
906  if (isHuge(CalleeADC))
907  return false;
908 
909  // It is possible that the live variables analysis cannot be
910  // run. If so, bail out.
911  if (!CalleeADC->getAnalysis<RelaxedLiveVariables>())
912  return false;
913 
914  return true;
915 }
916 
917 bool ExprEngine::shouldInlineCall(const CallEvent &Call, const Decl *D,
918  const ExplodedNode *Pred,
919  const EvalCallOptions &CallOpts) {
920  if (!D)
921  return false;
922 
923  AnalysisManager &AMgr = getAnalysisManager();
924  AnalyzerOptions &Opts = AMgr.options;
925  AnalysisDeclContextManager &ADCMgr = AMgr.getAnalysisDeclContextManager();
926  AnalysisDeclContext *CalleeADC = ADCMgr.getContext(D);
927 
928  // The auto-synthesized bodies are essential to inline as they are
929  // usually small and commonly used. Note: we should do this check early on to
930  // ensure we always inline these calls.
931  if (CalleeADC->isBodyAutosynthesized())
932  return true;
933 
934  if (!AMgr.shouldInlineCall())
935  return false;
936 
937  // Check if this function has been marked as non-inlinable.
938  Optional<bool> MayInline = Engine.FunctionSummaries->mayInline(D);
939  if (MayInline.hasValue()) {
940  if (!MayInline.getValue())
941  return false;
942 
943  } else {
944  // We haven't actually checked the static properties of this function yet.
945  // Do that now, and record our decision in the function summaries.
946  if (mayInlineDecl(CalleeADC)) {
947  Engine.FunctionSummaries->markMayInline(D);
948  } else {
949  Engine.FunctionSummaries->markShouldNotInline(D);
950  return false;
951  }
952  }
953 
954  // Check if we should inline a call based on its kind.
955  // FIXME: this checks both static and dynamic properties of the call, which
956  // means we're redoing a bit of work that could be cached in the function
957  // summary.
958  CallInlinePolicy CIP = mayInlineCallKind(Call, Pred, Opts, CallOpts);
959  if (CIP != CIP_Allowed) {
960  if (CIP == CIP_DisallowedAlways) {
961  assert(!MayInline.hasValue() || MayInline.getValue());
962  Engine.FunctionSummaries->markShouldNotInline(D);
963  }
964  return false;
965  }
966 
967  // Do not inline if recursive or we've reached max stack frame count.
968  bool IsRecursive = false;
969  unsigned StackDepth = 0;
970  examineStackFrames(D, Pred->getLocationContext(), IsRecursive, StackDepth);
971  if ((StackDepth >= Opts.InlineMaxStackDepth) &&
972  (!isSmall(CalleeADC) || IsRecursive))
973  return false;
974 
975  // Do not inline large functions too many times.
976  if ((Engine.FunctionSummaries->getNumTimesInlined(D) >
977  Opts.MaxTimesInlineLarge) &&
978  isLarge(CalleeADC)) {
979  NumReachedInlineCountMax++;
980  return false;
981  }
982 
983  if (HowToInline == Inline_Minimal && (!isSmall(CalleeADC) || IsRecursive))
984  return false;
985 
986  return true;
987 }
988 
989 static bool isTrivialObjectAssignment(const CallEvent &Call) {
990  const CXXInstanceCall *ICall = dyn_cast<CXXInstanceCall>(&Call);
991  if (!ICall)
992  return false;
993 
994  const CXXMethodDecl *MD = dyn_cast_or_null<CXXMethodDecl>(ICall->getDecl());
995  if (!MD)
996  return false;
998  return false;
999 
1000  return MD->isTrivial();
1001 }
1002 
1004  const CallEvent &CallTemplate,
1005  const EvalCallOptions &CallOpts) {
1006  // Make sure we have the most recent state attached to the call.
1007  ProgramStateRef State = Pred->getState();
1008  CallEventRef<> Call = CallTemplate.cloneWithState(State);
1009 
1010  // Special-case trivial assignment operators.
1011  if (isTrivialObjectAssignment(*Call)) {
1012  performTrivialCopy(Bldr, Pred, *Call);
1013  return;
1014  }
1015 
1016  // Try to inline the call.
1017  // The origin expression here is just used as a kind of checksum;
1018  // this should still be safe even for CallEvents that don't come from exprs.
1019  const Expr *E = Call->getOriginExpr();
1020 
1021  ProgramStateRef InlinedFailedState = getInlineFailedState(State, E);
1022  if (InlinedFailedState) {
1023  // If we already tried once and failed, make sure we don't retry later.
1024  State = InlinedFailedState;
1025  } else {
1026  RuntimeDefinition RD = Call->getRuntimeDefinition();
1027  const Decl *D = RD.getDecl();
1028  if (shouldInlineCall(*Call, D, Pred, CallOpts)) {
1029  if (RD.mayHaveOtherDefinitions()) {
1031 
1032  // Explore with and without inlining the call.
1033  if (Options.getIPAMode() == IPAK_DynamicDispatchBifurcate) {
1034  BifurcateCall(RD.getDispatchRegion(), *Call, D, Bldr, Pred);
1035  return;
1036  }
1037 
1038  // Don't inline if we're not in any dynamic dispatch mode.
1039  if (Options.getIPAMode() != IPAK_DynamicDispatch) {
1040  conservativeEvalCall(*Call, Bldr, Pred, State);
1041  return;
1042  }
1043  }
1044 
1045  // We are not bifurcating and we do have a Decl, so just inline.
1046  if (inlineCall(*Call, D, Bldr, Pred, State))
1047  return;
1048  }
1049  }
1050 
1051  // If we can't inline it, handle the return value and invalidate the regions.
1052  conservativeEvalCall(*Call, Bldr, Pred, State);
1053 }
1054 
1055 void ExprEngine::BifurcateCall(const MemRegion *BifurReg,
1056  const CallEvent &Call, const Decl *D,
1057  NodeBuilder &Bldr, ExplodedNode *Pred) {
1058  assert(BifurReg);
1059  BifurReg = BifurReg->StripCasts();
1060 
1061  // Check if we've performed the split already - note, we only want
1062  // to split the path once per memory region.
1063  ProgramStateRef State = Pred->getState();
1064  const unsigned *BState =
1065  State->get<DynamicDispatchBifurcationMap>(BifurReg);
1066  if (BState) {
1067  // If we are on "inline path", keep inlining if possible.
1068  if (*BState == DynamicDispatchModeInlined)
1069  if (inlineCall(Call, D, Bldr, Pred, State))
1070  return;
1071  // If inline failed, or we are on the path where we assume we
1072  // don't have enough info about the receiver to inline, conjure the
1073  // return value and invalidate the regions.
1074  conservativeEvalCall(Call, Bldr, Pred, State);
1075  return;
1076  }
1077 
1078  // If we got here, this is the first time we process a message to this
1079  // region, so split the path.
1080  ProgramStateRef IState =
1081  State->set<DynamicDispatchBifurcationMap>(BifurReg,
1082  DynamicDispatchModeInlined);
1083  inlineCall(Call, D, Bldr, Pred, IState);
1084 
1085  ProgramStateRef NoIState =
1086  State->set<DynamicDispatchBifurcationMap>(BifurReg,
1087  DynamicDispatchModeConservative);
1088  conservativeEvalCall(Call, Bldr, Pred, NoIState);
1089 
1090  NumOfDynamicDispatchPathSplits++;
1091 }
1092 
1094  ExplodedNodeSet &Dst) {
1095  ExplodedNodeSet dstPreVisit;
1096  getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, RS, *this);
1097 
1098  StmtNodeBuilder B(dstPreVisit, Dst, *currBldrCtx);
1099 
1100  if (RS->getRetValue()) {
1101  for (ExplodedNodeSet::iterator it = dstPreVisit.begin(),
1102  ei = dstPreVisit.end(); it != ei; ++it) {
1103  B.generateNode(RS, *it, (*it)->getState());
1104  }
1105  }
1106 }
Represents a function declaration or definition.
Definition: Decl.h:1748
unsigned InlineMaxStackDepth
The inlining stack depth limit.
bool empty() const
Definition: CFG.h:732
A (possibly-)qualified type.
Definition: Type.h:643
MemRegion - The root abstract class for all memory regions.
Definition: MemRegion.h:94
CallEventRef< T > cloneWithState(ProgramStateRef NewState) const
Returns a copy of this CallEvent, but using the given state.
const CXXConstructorDecl * getDecl() const override
Definition: CallEvent.h:845
bool IsTemporaryCtorOrDtor
This call is a constructor or a destructor of a temporary value.
Definition: ExprEngine.h:106
Stmt * getBody() const
Get the body of the Declaration.
succ_iterator succ_begin()
Definition: CFG.h:769
void VisitCallExpr(const CallExpr *CE, ExplodedNode *Pred, ExplodedNodeSet &Dst)
VisitCall - Transfer function for function calls.
void processCallExit(ExplodedNode *Pred) override
Generate the sequence of nodes that simulate the call exit and the post visit for CallExpr...
Stmt - This represents one statement.
Definition: Stmt.h:66
Information about invalidation for a particular region/symbol.
Definition: MemRegion.h:1439
This builder class is useful for generating nodes that resulted from visiting a statement.
Definition: CoreEngine.h:378
ProgramPoint getProgramPoint(bool IsPreVisit=false, const ProgramPointTag *Tag=nullptr) const
Returns an appropriate ProgramPoint for this call.
Definition: CallEvent.cpp:338
unsigned size() const
Return the total number of CFGBlocks within the CFG This is simply a renaming of the getNumBlockIDs()...
Definition: CFG.h:1207
Decl - This represents one declaration (or definition), e.g.
Definition: DeclBase.h:88
Represents a point when we begin processing an inlined call.
Definition: ProgramPoint.h:630
Manages the lifetime of CallEvent objects.
Definition: CallEvent.h:1133
static bool isContainerMethod(const ASTContext &Ctx, const FunctionDecl *FD)
Returns true if the given function refers to a method of a C++ container or iterator.
IntrusiveRefCntPtr< const ProgramState > ProgramStateRef
bool isDerivedFrom(const CXXRecordDecl *Base) const
Determine whether this class is derived from the class Base.
Hints for figuring out of a call should be inlined during evalCall().
Definition: ExprEngine.h:96
Represents a call to a C++ constructor.
Definition: ExprCXX.h:1331
bool IsArrayCtorOrDtor
This call is a constructor or a destructor for a single element within an array, a part of array cons...
Definition: ExprEngine.h:103
const NestedNameSpecifier * Specifier
CallEventRef getSimpleCall(const CallExpr *E, ProgramStateRef State, const LocationContext *LCtx)
Definition: CallEvent.cpp:1355
const ProgramStateRef & getState() const
SVal evalCast(SVal val, QualType castTy, QualType originalType)
Represents a path from a specific derived class (which is not represented as part of the path) to a p...
static bool wasDifferentDeclUsedForInlining(CallEventRef<> Call, const StackFrameContext *calleeCtx)
static std::pair< const Stmt *, const CFGBlock * > getLastStmt(const ExplodedNode *Node)
unsigned succ_size() const
Definition: CFG.h:787
const Expr * getOriginExpr() const
Returns the expression whose value will be the result of this call.
Definition: CallEvent.h:222
ASTContext & getASTContext() const
loc::MemRegionVal getCXXThis(const CXXMethodDecl *D, const StackFrameContext *SFC)
Return a memory region for the &#39;this&#39; object reference.
void setTrait(SymbolRef Sym, InvalidationKinds IK)
Definition: MemRegion.cpp:1584
static Optional< SVal > getObjectUnderConstruction(ProgramStateRef State, const ConstructionContextItem &Item, const LocationContext *LC)
By looking at a certain item that may be potentially part of an object&#39;s ConstructionContext, retrieve such object&#39;s location.
Definition: ExprEngine.cpp:474
static bool hasMember(const ASTContext &Ctx, const CXXRecordDecl *RD, StringRef Name)
Returns true if the given C++ class contains a member with the given name.
void enqueue(ExplodedNodeSet &Set)
Enqueue the given set of nodes onto the work list.
Definition: CoreEngine.cpp:577
void removeDead(ExplodedNode *Node, ExplodedNodeSet &Out, const Stmt *ReferenceStmt, const LocationContext *LC, const Stmt *DiagnosticStmt=nullptr, ProgramPoint::Kind K=ProgramPoint::PreStmtPurgeDeadSymbolsKind)
Run the analyzer&#39;s garbage collection - remove dead symbols and bindings from the state...
Definition: ExprEngine.cpp:689
bool isMoveAssignmentOperator() const
Determine whether this is a move assignment operator.
Definition: DeclCXX.cpp:2204
const CFGBlock * getEntry() const
Returns the entry block in the CFG for the entered function.
Definition: ProgramPoint.h:645
DeclarationName getDeclName() const
Get the actual, stored name of the declaration, which may be a special name.
Definition: Decl.h:297
One of these records is kept for each identifier that is lexed.
Holds long-lived AST nodes (such as types and decls) that can be referred to throughout the semantic ...
Definition: ASTContext.h:154
LineState State
AnalysisDeclContext contains the context data for the function or method under analysis.
void addPredecessor(ExplodedNode *V, ExplodedGraph &G)
addPredeccessor - Adds a predecessor to the current node, and in tandem add this node as a successor ...
void runCheckersForPostObjCMessage(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const ObjCMethodCall &msg, ExprEngine &Eng, bool wasInlined=false)
Run checkers for post-visiting obj-c messages.
i32 captured_struct **param SharedsTy A type which contains references the shared variables *param Shareds Context with the list of shared variables from the p *TaskFunction *param Data Additional data for task generation like final * state
const StackFrameContext * getStackFrame() const
Definition: ProgramPoint.h:183
IdentifierTable & Idents
Definition: ASTContext.h:569
STATISTIC(NumOfDynamicDispatchPathSplits, "The # of times we split the path due to imprecise dynamic dispatch info")
Represents any expression that calls an Objective-C method.
Definition: CallEvent.h:937
virtual Kind getKind() const =0
Returns the kind of call this is.
static bool isInStdNamespace(const Decl *D)
Returns true if the root namespace of the given declaration is the &#39;std&#39; C++ namespace.
WorkList * getWorkList() const
Definition: CoreEngine.h:171
void runCheckersForPreCall(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const CallEvent &Call, ExprEngine &Eng)
Run checkers for pre-visiting obj-c messages.
CFGElement getCurrentCFGElement()
Return the CFG element corresponding to the worklist element that is currently being processed by Exp...
Definition: ExprEngine.h:659
T * getAnalysis()
Return the specified analysis object, lazily running the analysis if necessary.
const LocationContext * getLocationContext() const
const LocationContext * getParent() const
virtual const CXXConstructExpr * getOriginExpr() const
Definition: CallEvent.h:841
bool isLinear() const
Returns true if the CFG has no branches.
Definition: CFG.cpp:4794
void VisitReturnStmt(const ReturnStmt *R, ExplodedNode *Pred, ExplodedNodeSet &Dst)
VisitReturnStmt - Transfer function logic for return statements.
const CoreEngine & getCoreEngine() const
Definition: ExprEngine.h:408
bool hasTrivialDestructor() const
Determine whether this class has a trivial destructor (C++ [class.dtor]p3)
Definition: DeclCXX.h:1491
void runCheckersForPostCall(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const CallEvent &Call, ExprEngine &Eng, bool wasInlined=false)
Run checkers for post-visiting obj-c messages.
static bool isCXXSharedPtrDtor(const FunctionDecl *FD)
Returns true if the given function is the destructor of a class named "shared_ptr".
static bool isVariadic(const Decl *D)
Returns true if the given decl is known to be variadic.
Definition: CallEvent.cpp:487
virtual Decl * getCanonicalDecl()
Retrieves the "canonical" declaration of the given declaration.
Definition: DeclBase.h:877
lookup_result lookup(DeclarationName Name) const
lookup - Find the declarations (if any) with the given Name in this context.
Definition: DeclBase.cpp:1602
AnalysisDeclContext * getAnalysisDeclContext(const Decl *D)
CheckerManager & getCheckerManager() const
Definition: ExprEngine.h:190
void removeDeadOnEndOfFunction(NodeBuilderContext &BC, ExplodedNode *Pred, ExplodedNodeSet &Dst)
Remove dead bindings/symbols before exiting a function.
static bool isTrivialObjectAssignment(const CallEvent &Call)
ProgramStateRef bindReturnValue(const CallEvent &Call, const LocationContext *LCtx, ProgramStateRef State)
Create a new state in which the call return value is binded to the call origin expression.
Represents a non-static C++ member function call, no matter how it is written.
Definition: CallEvent.h:637
static SVal adjustReturnValue(SVal V, QualType ExpectedTy, QualType ActualTy, StoreManager &StoreMgr)
Adjusts a return value when the called function&#39;s return type does not match the caller&#39;s expression ...
DeclarationNameTable DeclarationNames
Definition: ASTContext.h:572
Represents a single basic block in a source-level CFG.
Definition: CFG.h:570
Represents a point when we finish the call exit sequence (for inlined call).
Definition: ProgramPoint.h:688
AnalysisDeclContext * getContext(const Decl *D)
void runCheckersForPostStmt(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const Stmt *S, ExprEngine &Eng, bool wasInlined=false)
Run checkers for post-visiting Stmts.
const LocationContext * getLocationContext() const
The context in which the call is being evaluated.
Definition: CallEvent.h:212
void processBeginOfFunction(NodeBuilderContext &BC, ExplodedNode *Pred, ExplodedNodeSet &Dst, const BlockEdge &L) override
Called by CoreEngine.
This represents one expression.
Definition: Expr.h:108
Represents a source-level, intra-procedural CFG that represents the control-flow of a Stmt...
Definition: CFG.h:1035
bool isInSystemHeader(SourceLocation Loc) const
Returns if a SourceLocation is in a system header.
const CXXRecordDecl * getPointeeCXXRecordDecl() const
If this is a pointer or reference to a RecordType, return the CXXRecordDecl that the type refers to...
Definition: Type.cpp:1621
CallEventRef getCaller(const StackFrameContext *CalleeCtx, ProgramStateRef State)
Gets an outside caller given a callee context.
Definition: CallEvent.cpp:1376
#define V(N, I)
Definition: ASTContext.h:2907
Represents a C++ destructor within a class.
Definition: DeclCXX.h:2838
AnalyzerOptions & getAnalyzerOptions() override
This is the simplest builder which generates nodes in the ExplodedGraph.
Definition: CoreEngine.h:236
Represents C++ constructor call.
Definition: CFG.h:150
void Add(ExplodedNode *N)
Refers to regular member function and operator calls.
IdentifierInfo * getAsIdentifierInfo() const
Retrieve the IdentifierInfo * stored in this declaration name, or null if this declaration name isn&#39;t...
Refers to constructors (implicit or explicit).
QualType getType() const
Definition: Expr.h:137
void runCheckersForEvalCall(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const CallEvent &CE, ExprEngine &Eng)
Run checkers for evaluating a call.
ASTContext & getContext() const
getContext - Return the ASTContext associated with this analysis.
Definition: ExprEngine.h:182
ReturnStmt - This represents a return, optionally of an expression: return; return 4;...
Definition: Stmt.h:2610
bool isBodyAutosynthesized() const
Checks if the body of the Decl is generated by the BodyFarm.
ExplodedNode * getNode(const ProgramPoint &L, ProgramStateRef State, bool IsSink=false, bool *IsNew=nullptr)
Retrieve the node associated with a (Location,State) pair, where the &#39;Location&#39; is a ProgramPoint in ...
const StackFrameContext * getStackFrame(LocationContext const *Parent, const Stmt *S, const CFGBlock *Blk, unsigned Idx)
bool isTrivial() const
Whether this function is "trivial" in some specialized C++ senses.
Definition: Decl.h:2040
Enable inlining of dynamically dispatched methods.
bool isNull() const
Return true if this QualType doesn&#39;t point to a type yet.
Definition: Type.h:708
IPAKind getIPAMode() const
Returns the inter-procedural analysis mode.
const MemRegion * getRegion() const
Get the underlining region.
Definition: SVals.h:605
While alive, includes the current analysis stack in a crash trace.
CanQualType getCanonicalTypeUnqualified() const
void runCheckersForNewAllocator(const CXXNewExpr *NE, SVal Target, ExplodedNodeSet &Dst, ExplodedNode *Pred, ExprEngine &Eng, bool wasInlined=false)
Run checkers between C++ operator new and constructor calls.
Defines the runtime definition of the called function.
Definition: CallEvent.h:101
QualType getCanonicalType() const
Definition: Type.h:6181
const FunctionDecl * getDecl() const override
Definition: CallEvent.cpp:665
IdentifierInfo & get(StringRef Name)
Return the identifier token info for the specified named identifier.
Expr * getRetValue()
Definition: Stmt.h:2643
AnalysisManager & getAnalysisManager() override
Definition: ExprEngine.h:184
const MemRegion * getAsRegion() const
Definition: SVals.cpp:151
Represents a new-expression for memory allocation and constructor calls, e.g: "new CXXNewExpr(foo)"...
Definition: ExprCXX.h:2005
CallEventManager & getCallEventManager()
Definition: ProgramState.h:526
REGISTER_MAP_WITH_PROGRAMSTATE(DynamicDispatchBifurcationMap, const MemRegion *, unsigned) bool ExprEngine
void evalCall(ExplodedNodeSet &Dst, ExplodedNode *Pred, const CallEvent &Call)
Evaluate a call, running pre- and post-call checks and allowing checkers to be responsible for handli...
Represents a static or instance method of a struct/union/class.
Definition: DeclCXX.h:2114
bool IsCtorOrDtorWithImproperlyModeledTargetRegion
This call is a constructor or a destructor for which we do not currently compute the this-region corr...
Definition: ExprEngine.h:99
SVal - This represents a symbolic expression, which can be either an L-value or an R-value...
Definition: SVals.h:75
DeclarationName getIdentifier(const IdentifierInfo *ID)
Create a declaration name that is a simple identifier.
const Decl * getDecl() const
bool isObjCObjectPointerType() const
Definition: Type.h:6488
Do minimal inlining of callees.
Definition: ExprEngine.h:92
unsigned getNumBlockIDs() const
Returns the total number of BlockIDs allocated (which start at 0).
Definition: CFG.h:1202
Refers to destructors (implicit or explicit).
void runCheckersForPreStmt(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const Stmt *S, ExprEngine &Eng)
Run checkers for pre-visiting Stmts.
static bool isContainerClass(const ASTContext &Ctx, const CXXRecordDecl *RD)
Returns true if the given C++ class is a container or iterator.
const MemRegion * getDispatchRegion()
When other definitions are possible, returns the region whose runtime type determines the method defi...
Definition: CallEvent.h:126
void insert(const ExplodedNodeSet &S)
Optional< T > getAs() const
Convert to the specified CFGElement type, returning None if this CFGElement is not of the desired typ...
Definition: CFG.h:109
ast_type_traits::DynTypedNode Node
pred_iterator pred_begin()
Definition: CFG.h:751
static ProgramStateRef getInlineFailedState(ProgramStateRef State, const Stmt *CallE)
Dataflow Directional Tag Classes.
CFG::BuildOptions & getCFGBuildOptions()
Return the build options used to construct the CFG.
StoreManager & getStoreManager()
Definition: ExprEngine.h:386
const StackFrameContext * getCalleeContext() const
Definition: ProgramPoint.h:640
The name of a declaration.
const CXXRecordDecl * getParent() const
Return the parent of this method declaration, which is the class in which this method is defined...
Definition: DeclCXX.h:2237
const MemRegion * StripCasts(bool StripBaseAndDerivedCasts=true) const
Definition: MemRegion.cpp:1196
bool isAmbiguous(CanQualType BaseType)
Determine whether the path from the most-derived type to the given base type is ambiguous (i...
bool isCopyAssignmentOperator() const
Determine whether this is a copy-assignment operator, regardless of whether it was declared implicitl...
Definition: DeclCXX.cpp:2183
bool mayHaveOtherDefinitions()
Check if the definition we have is precise.
Definition: CallEvent.h:122
Represents an abstract call to a function or method along a particular path.
Definition: CallEvent.h:138
ProgramStateManager & getStateManager() override
Definition: ExprEngine.h:384
const Decl * getDecl() const
void defaultEvalCall(NodeBuilder &B, ExplodedNode *Pred, const CallEvent &Call, const EvalCallOptions &CallOpts={})
Default implementation of call evaluation.
bool lookupInBases(BaseMatchesCallback BaseMatches, CXXBasePaths &Paths, bool LookupInDependent=false) const
Look for entities within the base classes of this C++ class, transitively searching all base class su...
T castAs() const
Convert to the specified SVal type, asserting that this SVal is of the desired type.
Definition: SVals.h:103
virtual unsigned getASTArgumentIndex(unsigned CallArgumentIndex) const
Some call event sub-classes conveniently adjust mismatching AST indices to match parameter indices...
Definition: CallEvent.h:419
static QualType getDeclaredResultType(const Decl *D)
Returns the result type of a function or method declaration.
Definition: CallEvent.cpp:458
bool mayInlineCXXMemberFunction(CXXInlineableMemberKind K) const
Returns the option controlling which C++ member functions will be considered for inlining.
CXXBasePath & front()
const StackFrameContext * getStackFrame() const
Represents a base class of a C++ class.
Definition: DeclCXX.h:192
Stores options for the analyzer from the command line.
SourceManager & getSourceManager()
Definition: ASTContext.h:665
QualType getResultType() const
Returns the result type, adjusted for references.
Definition: CallEvent.cpp:69
ExplodedNode * generateNode(const ProgramPoint &PP, ProgramStateRef State, ExplodedNode *Pred)
Generates a node in the ExplodedGraph.
Definition: CoreEngine.h:289
Defines the C++ Decl subclasses, other than those for templates (found in DeclTemplate.h) and friends (in DeclFriend.h).
ConstructionContext&#39;s subclasses describe different ways of constructing an object in C++...
Represents a C++ struct/union/class.
Definition: DeclCXX.h:300
bool IsTemporaryLifetimeExtendedViaAggregate
This call is a constructor for a temporary that is lifetime-extended by binding it to a reference-typ...
Definition: ExprEngine.h:111
virtual unsigned getNumArgs() const =0
Returns the number of arguments (explicit and implicit).
QualType getPointerType(QualType T) const
Return the uniqued reference to the type for a pointer to the specified type.
CallExpr - Represents a function call (C99 6.5.2.2, C++ [expr.call]).
Definition: Expr.h:2516
ExplodedNode * generateNode(const Stmt *S, ExplodedNode *Pred, ProgramStateRef St, const ProgramPointTag *tag=nullptr, ProgramPoint::Kind K=ProgramPoint::PostStmtKind)
Definition: CoreEngine.h:407
ProgramStateRef invalidateRegions(unsigned BlockCount, ProgramStateRef Orig=nullptr) const
Returns a new state with all argument regions invalidated.
Definition: CallEvent.cpp:284
virtual void enqueue(const WorkListUnit &U)=0
const BlockInvocationContext * getBlockInvocationContext(const LocationContext *parent, const BlockDecl *BD, const void *ContextData)
BasePaths - Represents the set of paths from a derived class to one of its (direct or indirect) bases...
const StackFrameContext * getStackFrame() const
AnalysisPurgeMode AnalysisPurgeOpt
Enable inlining of dynamically dispatched methods, bifurcate paths when exact type info is unavailabl...
Optional< T > getAs() const
Convert to the specified ProgramPoint type, returning None if this ProgramPoint is not of the desired...
Definition: ProgramPoint.h:151
AnalysisDeclContext * getAnalysisDeclContext() const
Represents a call to a C++ constructor.
Definition: CallEvent.h:816
const LangOptions & getLangOpts() const
Definition: ASTContext.h:710
void processCallEnter(NodeBuilderContext &BC, CallEnter CE, ExplodedNode *Pred) override
Generate the entry node of the callee.
CallEventRef< T > cloneWithState(ProgramStateRef State) const
Definition: CallEvent.h:82