Bug Summary

File:tools/clang/lib/StaticAnalyzer/Core/ExprEngineCallAndReturn.cpp
Warning:line 164, column 26
Returning null reference

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -triple x86_64-pc-linux-gnu -analyze -disable-free -disable-llvm-verifier -discard-value-names -main-file-name ExprEngineCallAndReturn.cpp -analyzer-store=region -analyzer-opt-analyze-nested-blocks -analyzer-eagerly-assume -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -mrelocation-model pic -pic-level 2 -mthread-model posix -relaxed-aliasing -fmath-errno -masm-verbose -mconstructor-aliases -munwind-tables -fuse-init-array -target-cpu x86-64 -dwarf-column-info -debugger-tuning=gdb -momit-leaf-frame-pointer -ffunction-sections -fdata-sections -resource-dir /usr/lib/llvm-7/lib/clang/7.0.0 -D _DEBUG -D _GNU_SOURCE -D __STDC_CONSTANT_MACROS -D __STDC_FORMAT_MACROS -D __STDC_LIMIT_MACROS -I /build/llvm-toolchain-snapshot-7~svn326551/build-llvm/tools/clang/lib/StaticAnalyzer/Core -I /build/llvm-toolchain-snapshot-7~svn326551/tools/clang/lib/StaticAnalyzer/Core -I /build/llvm-toolchain-snapshot-7~svn326551/tools/clang/include -I /build/llvm-toolchain-snapshot-7~svn326551/build-llvm/tools/clang/include -I /build/llvm-toolchain-snapshot-7~svn326551/build-llvm/include -I /build/llvm-toolchain-snapshot-7~svn326551/include -U NDEBUG -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/7.3.0/../../../../include/c++/7.3.0 -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/7.3.0/../../../../include/x86_64-linux-gnu/c++/7.3.0 -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/7.3.0/../../../../include/x86_64-linux-gnu/c++/7.3.0 -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/7.3.0/../../../../include/c++/7.3.0/backward -internal-isystem /usr/include/clang/7.0.0/include/ -internal-isystem /usr/local/include -internal-isystem /usr/lib/llvm-7/lib/clang/7.0.0/include -internal-externc-isystem /usr/include/x86_64-linux-gnu -internal-externc-isystem /include -internal-externc-isystem /usr/include -O2 -Wno-unused-parameter -Wwrite-strings -Wno-missing-field-initializers -Wno-long-long -Wno-maybe-uninitialized -Wno-comment -std=c++11 -fdeprecated-macro -fdebug-compilation-dir /build/llvm-toolchain-snapshot-7~svn326551/build-llvm/tools/clang/lib/StaticAnalyzer/Core -ferror-limit 19 -fmessage-length 0 -fvisibility-inlines-hidden -fobjc-runtime=gcc -fno-common -fdiagnostics-show-option -vectorize-loops -vectorize-slp -analyzer-checker optin.performance.Padding -analyzer-output=html -analyzer-config stable-report-filename=true -o /tmp/scan-build-2018-03-02-155150-1477-1 -x c++ /build/llvm-toolchain-snapshot-7~svn326551/tools/clang/lib/StaticAnalyzer/Core/ExprEngineCallAndReturn.cpp

/build/llvm-toolchain-snapshot-7~svn326551/tools/clang/lib/StaticAnalyzer/Core/ExprEngineCallAndReturn.cpp

1//=-- ExprEngineCallAndReturn.cpp - Support for call/return -----*- C++ -*-===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file defines ExprEngine's support for calls and returns.
11//
12//===----------------------------------------------------------------------===//
13
14#include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
15#include "PrettyStackTraceLocationContext.h"
16#include "clang/AST/CXXInheritance.h"
17#include "clang/AST/DeclCXX.h"
18#include "clang/Analysis/Analyses/LiveVariables.h"
19#include "clang/Analysis/ConstructionContext.h"
20#include "clang/StaticAnalyzer/Core/CheckerManager.h"
21#include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
22#include "llvm/ADT/SmallSet.h"
23#include "llvm/ADT/Statistic.h"
24#include "llvm/Support/SaveAndRestore.h"
25
26using namespace clang;
27using namespace ento;
28
29#define DEBUG_TYPE"ExprEngine" "ExprEngine"
30
31STATISTIC(NumOfDynamicDispatchPathSplits,static llvm::Statistic NumOfDynamicDispatchPathSplits = {"ExprEngine"
, "NumOfDynamicDispatchPathSplits", "The # of times we split the path due to imprecise dynamic dispatch info"
, {0}, {false}}
32 "The # of times we split the path due to imprecise dynamic dispatch info")static llvm::Statistic NumOfDynamicDispatchPathSplits = {"ExprEngine"
, "NumOfDynamicDispatchPathSplits", "The # of times we split the path due to imprecise dynamic dispatch info"
, {0}, {false}}
;
33
34STATISTIC(NumInlinedCalls,static llvm::Statistic NumInlinedCalls = {"ExprEngine", "NumInlinedCalls"
, "The # of times we inlined a call", {0}, {false}}
35 "The # of times we inlined a call")static llvm::Statistic NumInlinedCalls = {"ExprEngine", "NumInlinedCalls"
, "The # of times we inlined a call", {0}, {false}}
;
36
37STATISTIC(NumReachedInlineCountMax,static llvm::Statistic NumReachedInlineCountMax = {"ExprEngine"
, "NumReachedInlineCountMax", "The # of times we reached inline count maximum"
, {0}, {false}}
38 "The # of times we reached inline count maximum")static llvm::Statistic NumReachedInlineCountMax = {"ExprEngine"
, "NumReachedInlineCountMax", "The # of times we reached inline count maximum"
, {0}, {false}}
;
39
40void ExprEngine::processCallEnter(NodeBuilderContext& BC, CallEnter CE,
41 ExplodedNode *Pred) {
42 // Get the entry block in the CFG of the callee.
43 const StackFrameContext *calleeCtx = CE.getCalleeContext();
44 PrettyStackTraceLocationContext CrashInfo(calleeCtx);
45 const CFGBlock *Entry = CE.getEntry();
46
47 // Validate the CFG.
48 assert(Entry->empty())(static_cast <bool> (Entry->empty()) ? void (0) : __assert_fail
("Entry->empty()", "/build/llvm-toolchain-snapshot-7~svn326551/tools/clang/lib/StaticAnalyzer/Core/ExprEngineCallAndReturn.cpp"
, 48, __extension__ __PRETTY_FUNCTION__))
;
49 assert(Entry->succ_size() == 1)(static_cast <bool> (Entry->succ_size() == 1) ? void
(0) : __assert_fail ("Entry->succ_size() == 1", "/build/llvm-toolchain-snapshot-7~svn326551/tools/clang/lib/StaticAnalyzer/Core/ExprEngineCallAndReturn.cpp"
, 49, __extension__ __PRETTY_FUNCTION__))
;
50
51 // Get the solitary successor.
52 const CFGBlock *Succ = *(Entry->succ_begin());
53
54 // Construct an edge representing the starting location in the callee.
55 BlockEdge Loc(Entry, Succ, calleeCtx);
56
57 ProgramStateRef state = Pred->getState();
58
59 // Construct a new node, notify checkers that analysis of the function has
60 // begun, and add the resultant nodes to the worklist.
61 bool isNew;
62 ExplodedNode *Node = G.getNode(Loc, state, false, &isNew);
63 Node->addPredecessor(Pred, G);
64 if (isNew) {
65 ExplodedNodeSet DstBegin;
66 processBeginOfFunction(BC, Node, DstBegin, Loc);
67 Engine.enqueue(DstBegin);
68 }
69}
70
71// Find the last statement on the path to the exploded node and the
72// corresponding Block.
73static std::pair<const Stmt*,
74 const CFGBlock*> getLastStmt(const ExplodedNode *Node) {
75 const Stmt *S = nullptr;
76 const CFGBlock *Blk = nullptr;
77 const StackFrameContext *SF =
78 Node->getLocation().getLocationContext()->getCurrentStackFrame();
79
80 // Back up through the ExplodedGraph until we reach a statement node in this
81 // stack frame.
82 while (Node) {
83 const ProgramPoint &PP = Node->getLocation();
84
85 if (PP.getLocationContext()->getCurrentStackFrame() == SF) {
86 if (Optional<StmtPoint> SP = PP.getAs<StmtPoint>()) {
87 S = SP->getStmt();
88 break;
89 } else if (Optional<CallExitEnd> CEE = PP.getAs<CallExitEnd>()) {
90 S = CEE->getCalleeContext()->getCallSite();
91 if (S)
92 break;
93
94 // If there is no statement, this is an implicitly-generated call.
95 // We'll walk backwards over it and then continue the loop to find
96 // an actual statement.
97 Optional<CallEnter> CE;
98 do {
99 Node = Node->getFirstPred();
100 CE = Node->getLocationAs<CallEnter>();
101 } while (!CE || CE->getCalleeContext() != CEE->getCalleeContext());
102
103 // Continue searching the graph.
104 } else if (Optional<BlockEdge> BE = PP.getAs<BlockEdge>()) {
105 Blk = BE->getSrc();
106 }
107 } else if (Optional<CallEnter> CE = PP.getAs<CallEnter>()) {
108 // If we reached the CallEnter for this function, it has no statements.
109 if (CE->getCalleeContext() == SF)
110 break;
111 }
112
113 if (Node->pred_empty())
114 return std::make_pair(nullptr, nullptr);
115
116 Node = *Node->pred_begin();
117 }
118
119 return std::make_pair(S, Blk);
120}
121
122/// Adjusts a return value when the called function's return type does not
123/// match the caller's expression type. This can happen when a dynamic call
124/// is devirtualized, and the overridding method has a covariant (more specific)
125/// return type than the parent's method. For C++ objects, this means we need
126/// to add base casts.
127static SVal adjustReturnValue(SVal V, QualType ExpectedTy, QualType ActualTy,
128 StoreManager &StoreMgr) {
129 // For now, the only adjustments we handle apply only to locations.
130 if (!V.getAs<Loc>())
131 return V;
132
133 // If the types already match, don't do any unnecessary work.
134 ExpectedTy = ExpectedTy.getCanonicalType();
135 ActualTy = ActualTy.getCanonicalType();
136 if (ExpectedTy == ActualTy)
137 return V;
138
139 // No adjustment is needed between Objective-C pointer types.
140 if (ExpectedTy->isObjCObjectPointerType() &&
141 ActualTy->isObjCObjectPointerType())
142 return V;
143
144 // C++ object pointers may need "derived-to-base" casts.
145 const CXXRecordDecl *ExpectedClass = ExpectedTy->getPointeeCXXRecordDecl();
146 const CXXRecordDecl *ActualClass = ActualTy->getPointeeCXXRecordDecl();
147 if (ExpectedClass && ActualClass) {
148 CXXBasePaths Paths(/*FindAmbiguities=*/true, /*RecordPaths=*/true,
149 /*DetectVirtual=*/false);
150 if (ActualClass->isDerivedFrom(ExpectedClass, Paths) &&
151 !Paths.isAmbiguous(ActualTy->getCanonicalTypeUnqualified())) {
152 return StoreMgr.evalDerivedToBase(V, Paths.front());
153 }
154 }
155
156 // Unfortunately, Objective-C does not enforce that overridden methods have
157 // covariant return types, so we can't assert that that never happens.
158 // Be safe and return UnknownVal().
159 return UnknownVal();
160}
161
162void ExprEngine::removeDeadOnEndOfFunction(NodeBuilderContext& BC,
163 ExplodedNode *Pred,
164 ExplodedNodeSet &Dst) {
165 // Find the last statement in the function and the corresponding basic block.
166 const Stmt *LastSt = nullptr;
167 const CFGBlock *Blk = nullptr;
168 std::tie(LastSt, Blk) = getLastStmt(Pred);
169 if (!Blk || !LastSt) {
170 Dst.Add(Pred);
171 return;
172 }
173
174 // Here, we destroy the current location context. We use the current
175 // function's entire body as a diagnostic statement, with which the program
176 // point will be associated. However, we only want to use LastStmt as a
177 // reference for what to clean up if it's a ReturnStmt; otherwise, everything
178 // is dead.
179 SaveAndRestore<const NodeBuilderContext *> NodeContextRAII(currBldrCtx, &BC);
180 const LocationContext *LCtx = Pred->getLocationContext();
181 removeDead(Pred, Dst, dyn_cast<ReturnStmt>(LastSt), LCtx,
182 LCtx->getAnalysisDeclContext()->getBody(),
183 ProgramPoint::PostStmtPurgeDeadSymbolsKind);
184}
185
186static bool wasDifferentDeclUsedForInlining(CallEventRef<> Call,
187 const StackFrameContext *calleeCtx) {
188 const Decl *RuntimeCallee = calleeCtx->getDecl();
189 const Decl *StaticDecl = Call->getDecl();
190 assert(RuntimeCallee)(static_cast <bool> (RuntimeCallee) ? void (0) : __assert_fail
("RuntimeCallee", "/build/llvm-toolchain-snapshot-7~svn326551/tools/clang/lib/StaticAnalyzer/Core/ExprEngineCallAndReturn.cpp"
, 190, __extension__ __PRETTY_FUNCTION__))
;
191 if (!StaticDecl)
192 return true;
193 return RuntimeCallee->getCanonicalDecl() != StaticDecl->getCanonicalDecl();
194}
195
196/// Returns true if the CXXConstructExpr \p E was intended to construct a
197/// prvalue for the region in \p V.
198///
199/// Note that we can't just test for rvalue vs. glvalue because
200/// CXXConstructExprs embedded in DeclStmts and initializers are considered
201/// rvalues by the AST, and the analyzer would like to treat them as lvalues.
202static bool isTemporaryPRValue(const CXXConstructExpr *E, SVal V) {
203 if (E->isGLValue())
204 return false;
205
206 const MemRegion *MR = V.getAsRegion();
207 if (!MR)
208 return false;
209
210 return isa<CXXTempObjectRegion>(MR);
211}
212
213/// The call exit is simulated with a sequence of nodes, which occur between
214/// CallExitBegin and CallExitEnd. The following operations occur between the
215/// two program points:
216/// 1. CallExitBegin (triggers the start of call exit sequence)
217/// 2. Bind the return value
218/// 3. Run Remove dead bindings to clean up the dead symbols from the callee.
219/// 4. CallExitEnd (switch to the caller context)
220/// 5. PostStmt<CallExpr>
221void ExprEngine::processCallExit(ExplodedNode *CEBNode) {
222 // Step 1 CEBNode was generated before the call.
223 PrettyStackTraceLocationContext CrashInfo(CEBNode->getLocationContext());
224 const StackFrameContext *calleeCtx =
225 CEBNode->getLocationContext()->getCurrentStackFrame();
226
227 // The parent context might not be a stack frame, so make sure we
228 // look up the first enclosing stack frame.
229 const StackFrameContext *callerCtx =
230 calleeCtx->getParent()->getCurrentStackFrame();
231
232 const Stmt *CE = calleeCtx->getCallSite();
233 ProgramStateRef state = CEBNode->getState();
234 // Find the last statement in the function and the corresponding basic block.
235 const Stmt *LastSt = nullptr;
236 const CFGBlock *Blk = nullptr;
237 std::tie(LastSt, Blk) = getLastStmt(CEBNode);
238
239 // Generate a CallEvent /before/ cleaning the state, so that we can get the
240 // correct value for 'this' (if necessary).
241 CallEventManager &CEMgr = getStateManager().getCallEventManager();
242 CallEventRef<> Call = CEMgr.getCaller(calleeCtx, state);
243
244 // Step 2: generate node with bound return value: CEBNode -> BindedRetNode.
245
246 // If the callee returns an expression, bind its value to CallExpr.
247 if (CE) {
248 if (const ReturnStmt *RS = dyn_cast_or_null<ReturnStmt>(LastSt)) {
249 const LocationContext *LCtx = CEBNode->getLocationContext();
250 SVal V = state->getSVal(RS, LCtx);
251
252 // Ensure that the return type matches the type of the returned Expr.
253 if (wasDifferentDeclUsedForInlining(Call, calleeCtx)) {
254 QualType ReturnedTy =
255 CallEvent::getDeclaredResultType(calleeCtx->getDecl());
256 if (!ReturnedTy.isNull()) {
257 if (const Expr *Ex = dyn_cast<Expr>(CE)) {
258 V = adjustReturnValue(V, Ex->getType(), ReturnedTy,
259 getStoreManager());
260 }
261 }
262 }
263
264 state = state->BindExpr(CE, callerCtx, V);
265 }
266
267 // Bind the constructed object value to CXXConstructExpr.
268 if (const CXXConstructExpr *CCE = dyn_cast<CXXConstructExpr>(CE)) {
269 loc::MemRegionVal This =
270 svalBuilder.getCXXThis(CCE->getConstructor()->getParent(), calleeCtx);
271 SVal ThisV = state->getSVal(This);
272
273 // If the constructed object is a temporary prvalue, get its bindings.
274 if (isTemporaryPRValue(CCE, ThisV))
275 ThisV = state->getSVal(ThisV.castAs<Loc>());
276
277 state = state->BindExpr(CCE, callerCtx, ThisV);
278 }
279
280 if (const auto *CNE = dyn_cast<CXXNewExpr>(CE)) {
281 // We are currently evaluating a CXXNewAllocator CFGElement. It takes a
282 // while to reach the actual CXXNewExpr element from here, so keep the
283 // region for later use.
284 // Additionally cast the return value of the inlined operator new
285 // (which is of type 'void *') to the correct object type.
286 SVal AllocV = state->getSVal(CNE, callerCtx);
287 AllocV = svalBuilder.evalCast(
288 AllocV, CNE->getType(),
289 getContext().getPointerType(getContext().VoidTy));
290
291 state =
292 setCXXNewAllocatorValue(state, CNE, calleeCtx->getParent(), AllocV);
293 }
294 }
295
296 // Step 3: BindedRetNode -> CleanedNodes
297 // If we can find a statement and a block in the inlined function, run remove
298 // dead bindings before returning from the call. This is important to ensure
299 // that we report the issues such as leaks in the stack contexts in which
300 // they occurred.
301 ExplodedNodeSet CleanedNodes;
302 if (LastSt && Blk && AMgr.options.AnalysisPurgeOpt != PurgeNone) {
303 static SimpleProgramPointTag retValBind("ExprEngine", "Bind Return Value");
304 PostStmt Loc(LastSt, calleeCtx, &retValBind);
305 bool isNew;
306 ExplodedNode *BindedRetNode = G.getNode(Loc, state, false, &isNew);
307 BindedRetNode->addPredecessor(CEBNode, G);
308 if (!isNew)
309 return;
310
311 NodeBuilderContext Ctx(getCoreEngine(), Blk, BindedRetNode);
312 currBldrCtx = &Ctx;
313 // Here, we call the Symbol Reaper with 0 statement and callee location
314 // context, telling it to clean up everything in the callee's context
315 // (and its children). We use the callee's function body as a diagnostic
316 // statement, with which the program point will be associated.
317 removeDead(BindedRetNode, CleanedNodes, nullptr, calleeCtx,
318 calleeCtx->getAnalysisDeclContext()->getBody(),
319 ProgramPoint::PostStmtPurgeDeadSymbolsKind);
320 currBldrCtx = nullptr;
321 } else {
322 CleanedNodes.Add(CEBNode);
323 }
324
325 for (ExplodedNodeSet::iterator I = CleanedNodes.begin(),
326 E = CleanedNodes.end(); I != E; ++I) {
327
328 // Step 4: Generate the CallExit and leave the callee's context.
329 // CleanedNodes -> CEENode
330 CallExitEnd Loc(calleeCtx, callerCtx);
331 bool isNew;
332 ProgramStateRef CEEState = (*I == CEBNode) ? state : (*I)->getState();
333
334 ExplodedNode *CEENode = G.getNode(Loc, CEEState, false, &isNew);
335 CEENode->addPredecessor(*I, G);
336 if (!isNew)
337 return;
338
339 // Step 5: Perform the post-condition check of the CallExpr and enqueue the
340 // result onto the work list.
341 // CEENode -> Dst -> WorkList
342 NodeBuilderContext Ctx(Engine, calleeCtx->getCallSiteBlock(), CEENode);
343 SaveAndRestore<const NodeBuilderContext*> NBCSave(currBldrCtx,
344 &Ctx);
345 SaveAndRestore<unsigned> CBISave(currStmtIdx, calleeCtx->getIndex());
346
347 CallEventRef<> UpdatedCall = Call.cloneWithState(CEEState);
348
349 ExplodedNodeSet DstPostCall;
350 if (const CXXNewExpr *CNE = dyn_cast_or_null<CXXNewExpr>(CE)) {
351 ExplodedNodeSet DstPostPostCallCallback;
352 getCheckerManager().runCheckersForPostCall(DstPostPostCallCallback,
353 CEENode, *UpdatedCall, *this,
354 /*WasInlined=*/true);
355 for (auto I : DstPostPostCallCallback) {
356 getCheckerManager().runCheckersForNewAllocator(
357 CNE, getCXXNewAllocatorValue(I->getState(), CNE,
358 calleeCtx->getParent()),
359 DstPostCall, I, *this,
360 /*WasInlined=*/true);
361 }
362 } else {
363 getCheckerManager().runCheckersForPostCall(DstPostCall, CEENode,
364 *UpdatedCall, *this,
365 /*WasInlined=*/true);
366 }
367 ExplodedNodeSet Dst;
368 if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(Call)) {
369 getCheckerManager().runCheckersForPostObjCMessage(Dst, DstPostCall, *Msg,
370 *this,
371 /*WasInlined=*/true);
372 } else if (CE &&
373 !(isa<CXXNewExpr>(CE) && // Called when visiting CXXNewExpr.
374 AMgr.getAnalyzerOptions().mayInlineCXXAllocator())) {
375 getCheckerManager().runCheckersForPostStmt(Dst, DstPostCall, CE,
376 *this, /*WasInlined=*/true);
377 } else {
378 Dst.insert(DstPostCall);
379 }
380
381 // Enqueue the next element in the block.
382 for (ExplodedNodeSet::iterator PSI = Dst.begin(), PSE = Dst.end();
383 PSI != PSE; ++PSI) {
384 Engine.getWorkList()->enqueue(*PSI, calleeCtx->getCallSiteBlock(),
385 calleeCtx->getIndex()+1);
386 }
387 }
388}
389
390void ExprEngine::examineStackFrames(const Decl *D, const LocationContext *LCtx,
391 bool &IsRecursive, unsigned &StackDepth) {
392 IsRecursive = false;
393 StackDepth = 0;
394
395 while (LCtx) {
396 if (const StackFrameContext *SFC = dyn_cast<StackFrameContext>(LCtx)) {
397 const Decl *DI = SFC->getDecl();
398
399 // Mark recursive (and mutually recursive) functions and always count
400 // them when measuring the stack depth.
401 if (DI == D) {
402 IsRecursive = true;
403 ++StackDepth;
404 LCtx = LCtx->getParent();
405 continue;
406 }
407
408 // Do not count the small functions when determining the stack depth.
409 AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(DI);
410 const CFG *CalleeCFG = CalleeADC->getCFG();
411 if (CalleeCFG->getNumBlockIDs() > AMgr.options.getAlwaysInlineSize())
412 ++StackDepth;
413 }
414 LCtx = LCtx->getParent();
415 }
416}
417
418// The GDM component containing the dynamic dispatch bifurcation info. When
419// the exact type of the receiver is not known, we want to explore both paths -
420// one on which we do inline it and the other one on which we don't. This is
421// done to ensure we do not drop coverage.
422// This is the map from the receiver region to a bool, specifying either we
423// consider this region's information precise or not along the given path.
424namespace {
425 enum DynamicDispatchMode {
426 DynamicDispatchModeInlined = 1,
427 DynamicDispatchModeConservative
428 };
429} // end anonymous namespace
430
431REGISTER_TRAIT_WITH_PROGRAMSTATE(DynamicDispatchBifurcationMap,namespace { class DynamicDispatchBifurcationMap {}; typedef llvm
::ImmutableMap<const MemRegion *, unsigned> DynamicDispatchBifurcationMapTy
; } namespace clang { namespace ento { template <> struct
ProgramStateTrait<DynamicDispatchBifurcationMap> : public
ProgramStatePartialTrait<DynamicDispatchBifurcationMapTy>
{ static void *GDMIndex() { static int Index; return &Index
; } }; } }
432 CLANG_ENTO_PROGRAMSTATE_MAP(const MemRegion *,namespace { class DynamicDispatchBifurcationMap {}; typedef llvm
::ImmutableMap<const MemRegion *, unsigned> DynamicDispatchBifurcationMapTy
; } namespace clang { namespace ento { template <> struct
ProgramStateTrait<DynamicDispatchBifurcationMap> : public
ProgramStatePartialTrait<DynamicDispatchBifurcationMapTy>
{ static void *GDMIndex() { static int Index; return &Index
; } }; } }
433 unsigned))namespace { class DynamicDispatchBifurcationMap {}; typedef llvm
::ImmutableMap<const MemRegion *, unsigned> DynamicDispatchBifurcationMapTy
; } namespace clang { namespace ento { template <> struct
ProgramStateTrait<DynamicDispatchBifurcationMap> : public
ProgramStatePartialTrait<DynamicDispatchBifurcationMapTy>
{ static void *GDMIndex() { static int Index; return &Index
; } }; } }
434
435bool ExprEngine::inlineCall(const CallEvent &Call, const Decl *D,
436 NodeBuilder &Bldr, ExplodedNode *Pred,
437 ProgramStateRef State) {
438 assert(D)(static_cast <bool> (D) ? void (0) : __assert_fail ("D"
, "/build/llvm-toolchain-snapshot-7~svn326551/tools/clang/lib/StaticAnalyzer/Core/ExprEngineCallAndReturn.cpp"
, 438, __extension__ __PRETTY_FUNCTION__))
;
439
440 const LocationContext *CurLC = Pred->getLocationContext();
441 const StackFrameContext *CallerSFC = CurLC->getCurrentStackFrame();
442 const LocationContext *ParentOfCallee = CallerSFC;
443 if (Call.getKind() == CE_Block &&
444 !cast<BlockCall>(Call).isConversionFromLambda()) {
445 const BlockDataRegion *BR = cast<BlockCall>(Call).getBlockRegion();
446 assert(BR && "If we have the block definition we should have its region")(static_cast <bool> (BR && "If we have the block definition we should have its region"
) ? void (0) : __assert_fail ("BR && \"If we have the block definition we should have its region\""
, "/build/llvm-toolchain-snapshot-7~svn326551/tools/clang/lib/StaticAnalyzer/Core/ExprEngineCallAndReturn.cpp"
, 446, __extension__ __PRETTY_FUNCTION__))
;
447 AnalysisDeclContext *BlockCtx = AMgr.getAnalysisDeclContext(D);
448 ParentOfCallee = BlockCtx->getBlockInvocationContext(CallerSFC,
449 cast<BlockDecl>(D),
450 BR);
451 }
452
453 // This may be NULL, but that's fine.
454 const Expr *CallE = Call.getOriginExpr();
455
456 // Construct a new stack frame for the callee.
457 AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(D);
458 const StackFrameContext *CalleeSFC =
459 CalleeADC->getStackFrame(ParentOfCallee, CallE,
460 currBldrCtx->getBlock(),
461 currStmtIdx);
462
463 CallEnter Loc(CallE, CalleeSFC, CurLC);
464
465 // Construct a new state which contains the mapping from actual to
466 // formal arguments.
467 State = State->enterStackFrame(Call, CalleeSFC);
468
469 bool isNew;
470 if (ExplodedNode *N = G.getNode(Loc, State, false, &isNew)) {
471 N->addPredecessor(Pred, G);
472 if (isNew)
473 Engine.getWorkList()->enqueue(N);
474 }
475
476 // If we decided to inline the call, the successor has been manually
477 // added onto the work list so remove it from the node builder.
478 Bldr.takeNodes(Pred);
479
480 NumInlinedCalls++;
481 Engine.FunctionSummaries->bumpNumTimesInlined(D);
482
483 // Mark the decl as visited.
484 if (VisitedCallees)
485 VisitedCallees->insert(D);
486
487 return true;
488}
489
490static ProgramStateRef getInlineFailedState(ProgramStateRef State,
491 const Stmt *CallE) {
492 const void *ReplayState = State->get<ReplayWithoutInlining>();
493 if (!ReplayState)
494 return nullptr;
495
496 assert(ReplayState == CallE && "Backtracked to the wrong call.")(static_cast <bool> (ReplayState == CallE && "Backtracked to the wrong call."
) ? void (0) : __assert_fail ("ReplayState == CallE && \"Backtracked to the wrong call.\""
, "/build/llvm-toolchain-snapshot-7~svn326551/tools/clang/lib/StaticAnalyzer/Core/ExprEngineCallAndReturn.cpp"
, 496, __extension__ __PRETTY_FUNCTION__))
;
497 (void)CallE;
498
499 return State->remove<ReplayWithoutInlining>();
500}
501
502void ExprEngine::VisitCallExpr(const CallExpr *CE, ExplodedNode *Pred,
503 ExplodedNodeSet &dst) {
504 // Perform the previsit of the CallExpr.
505 ExplodedNodeSet dstPreVisit;
506 getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, CE, *this);
507
508 // Get the call in its initial state. We use this as a template to perform
509 // all the checks.
510 CallEventManager &CEMgr = getStateManager().getCallEventManager();
511 CallEventRef<> CallTemplate
512 = CEMgr.getSimpleCall(CE, Pred->getState(), Pred->getLocationContext());
513
514 // Evaluate the function call. We try each of the checkers
515 // to see if the can evaluate the function call.
516 ExplodedNodeSet dstCallEvaluated;
517 for (ExplodedNodeSet::iterator I = dstPreVisit.begin(), E = dstPreVisit.end();
518 I != E; ++I) {
519 evalCall(dstCallEvaluated, *I, *CallTemplate);
520 }
521
522 // Finally, perform the post-condition check of the CallExpr and store
523 // the created nodes in 'Dst'.
524 // Note that if the call was inlined, dstCallEvaluated will be empty.
525 // The post-CallExpr check will occur in processCallExit.
526 getCheckerManager().runCheckersForPostStmt(dst, dstCallEvaluated, CE,
527 *this);
528}
529
530void ExprEngine::evalCall(ExplodedNodeSet &Dst, ExplodedNode *Pred,
531 const CallEvent &Call) {
532 // WARNING: At this time, the state attached to 'Call' may be older than the
533 // state in 'Pred'. This is a minor optimization since CheckerManager will
534 // use an updated CallEvent instance when calling checkers, but if 'Call' is
535 // ever used directly in this function all callers should be updated to pass
536 // the most recent state. (It is probably not worth doing the work here since
537 // for some callers this will not be necessary.)
538
539 // Run any pre-call checks using the generic call interface.
540 ExplodedNodeSet dstPreVisit;
541 getCheckerManager().runCheckersForPreCall(dstPreVisit, Pred, Call, *this);
542
543 // Actually evaluate the function call. We try each of the checkers
544 // to see if the can evaluate the function call, and get a callback at
545 // defaultEvalCall if all of them fail.
546 ExplodedNodeSet dstCallEvaluated;
547 getCheckerManager().runCheckersForEvalCall(dstCallEvaluated, dstPreVisit,
548 Call, *this);
549
550 // Finally, run any post-call checks.
551 getCheckerManager().runCheckersForPostCall(Dst, dstCallEvaluated,
552 Call, *this);
553}
554
555ProgramStateRef ExprEngine::bindReturnValue(const CallEvent &Call,
556 const LocationContext *LCtx,
557 ProgramStateRef State) {
558 const Expr *E = Call.getOriginExpr();
559 if (!E)
560 return State;
561
562 // Some method families have known return values.
563 if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(&Call)) {
564 switch (Msg->getMethodFamily()) {
565 default:
566 break;
567 case OMF_autorelease:
568 case OMF_retain:
569 case OMF_self: {
570 // These methods return their receivers.
571 return State->BindExpr(E, LCtx, Msg->getReceiverSVal());
572 }
573 }
574 } else if (const CXXConstructorCall *C = dyn_cast<CXXConstructorCall>(&Call)){
575 SVal ThisV = C->getCXXThisVal();
576
577 // If the constructed object is a temporary prvalue, get its bindings.
578 if (isTemporaryPRValue(cast<CXXConstructExpr>(E), ThisV))
579 ThisV = State->getSVal(ThisV.castAs<Loc>());
580
581 return State->BindExpr(E, LCtx, ThisV);
582 }
583
584 // Conjure a symbol if the return value is unknown.
585 QualType ResultTy = Call.getResultType();
586 SValBuilder &SVB = getSValBuilder();
587 unsigned Count = currBldrCtx->blockCount();
588
589 // See if we need to conjure a heap pointer instead of
590 // a regular unknown pointer.
591 bool IsHeapPointer = false;
592 if (const auto *CNE = dyn_cast<CXXNewExpr>(E))
593 if (CNE->getOperatorNew()->isReplaceableGlobalAllocationFunction()) {
594 // FIXME: Delegate this to evalCall in MallocChecker?
595 IsHeapPointer = true;
596 }
597
598 SVal R = IsHeapPointer
599 ? SVB.getConjuredHeapSymbolVal(E, LCtx, Count)
600 : SVB.conjureSymbolVal(nullptr, E, LCtx, ResultTy, Count);
601 return State->BindExpr(E, LCtx, R);
602}
603
604// Conservatively evaluate call by invalidating regions and binding
605// a conjured return value.
606void ExprEngine::conservativeEvalCall(const CallEvent &Call, NodeBuilder &Bldr,
607 ExplodedNode *Pred,
608 ProgramStateRef State) {
609 State = Call.invalidateRegions(currBldrCtx->blockCount(), State);
610 State = bindReturnValue(Call, Pred->getLocationContext(), State);
611
612 // And make the result node.
613 Bldr.generateNode(Call.getProgramPoint(), State, Pred);
614}
615
616ExprEngine::CallInlinePolicy
617ExprEngine::mayInlineCallKind(const CallEvent &Call, const ExplodedNode *Pred,
618 AnalyzerOptions &Opts,
619 const ExprEngine::EvalCallOptions &CallOpts) {
620 const LocationContext *CurLC = Pred->getLocationContext();
621 const StackFrameContext *CallerSFC = CurLC->getCurrentStackFrame();
622 switch (Call.getKind()) {
623 case CE_Function:
624 case CE_Block:
625 break;
626 case CE_CXXMember:
627 case CE_CXXMemberOperator:
628 if (!Opts.mayInlineCXXMemberFunction(CIMK_MemberFunctions))
629 return CIP_DisallowedAlways;
630 break;
631 case CE_CXXConstructor: {
632 if (!Opts.mayInlineCXXMemberFunction(CIMK_Constructors))
633 return CIP_DisallowedAlways;
634
635 const CXXConstructorCall &Ctor = cast<CXXConstructorCall>(Call);
636
637 const CXXConstructExpr *CtorExpr = Ctor.getOriginExpr();
638
639 auto CCE = getCurrentCFGElement().getAs<CFGConstructor>();
640 const ConstructionContext *CC = CCE ? CCE->getConstructionContext()
641 : nullptr;
642
643 if (CC && isa<NewAllocatedObjectConstructionContext>(CC) &&
644 !Opts.mayInlineCXXAllocator())
645 return CIP_DisallowedOnce;
646
647 // FIXME: We don't handle constructors or destructors for arrays properly.
648 // Even once we do, we still need to be careful about implicitly-generated
649 // initializers for array fields in default move/copy constructors.
650 // We still allow construction into ElementRegion targets when they don't
651 // represent array elements.
652 if (CallOpts.IsArrayCtorOrDtor)
653 return CIP_DisallowedOnce;
654
655 // Inlining constructors requires including initializers in the CFG.
656 const AnalysisDeclContext *ADC = CallerSFC->getAnalysisDeclContext();
657 assert(ADC->getCFGBuildOptions().AddInitializers && "No CFG initializers")(static_cast <bool> (ADC->getCFGBuildOptions().AddInitializers
&& "No CFG initializers") ? void (0) : __assert_fail
("ADC->getCFGBuildOptions().AddInitializers && \"No CFG initializers\""
, "/build/llvm-toolchain-snapshot-7~svn326551/tools/clang/lib/StaticAnalyzer/Core/ExprEngineCallAndReturn.cpp"
, 657, __extension__ __PRETTY_FUNCTION__))
;
658 (void)ADC;
659
660 // If the destructor is trivial, it's always safe to inline the constructor.
661 if (Ctor.getDecl()->getParent()->hasTrivialDestructor())
662 break;
663
664 // For other types, only inline constructors if destructor inlining is
665 // also enabled.
666 if (!Opts.mayInlineCXXMemberFunction(CIMK_Destructors))
667 return CIP_DisallowedAlways;
668
669 if (CtorExpr->getConstructionKind() == CXXConstructExpr::CK_Complete) {
670 // If we don't handle temporary destructors, we shouldn't inline
671 // their constructors.
672 if (CallOpts.IsTemporaryCtorOrDtor &&
673 !Opts.includeTemporaryDtorsInCFG())
674 return CIP_DisallowedOnce;
675
676 // If we did not find the correct this-region, it would be pointless
677 // to inline the constructor. Instead we will simply invalidate
678 // the fake temporary target.
679 if (CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion)
680 return CIP_DisallowedOnce;
681
682 // If the temporary is lifetime-extended by binding a smaller object
683 // within it to a reference, automatic destructors don't work properly.
684 if (CallOpts.IsTemporaryLifetimeExtendedViaSubobject)
685 return CIP_DisallowedOnce;
686 }
687
688 break;
689 }
690 case CE_CXXDestructor: {
691 if (!Opts.mayInlineCXXMemberFunction(CIMK_Destructors))
692 return CIP_DisallowedAlways;
693
694 // Inlining destructors requires building the CFG correctly.
695 const AnalysisDeclContext *ADC = CallerSFC->getAnalysisDeclContext();
696 assert(ADC->getCFGBuildOptions().AddImplicitDtors && "No CFG destructors")(static_cast <bool> (ADC->getCFGBuildOptions().AddImplicitDtors
&& "No CFG destructors") ? void (0) : __assert_fail (
"ADC->getCFGBuildOptions().AddImplicitDtors && \"No CFG destructors\""
, "/build/llvm-toolchain-snapshot-7~svn326551/tools/clang/lib/StaticAnalyzer/Core/ExprEngineCallAndReturn.cpp"
, 696, __extension__ __PRETTY_FUNCTION__))
;
697 (void)ADC;
698
699 // FIXME: We don't handle constructors or destructors for arrays properly.
700 if (CallOpts.IsArrayCtorOrDtor)
701 return CIP_DisallowedOnce;
702
703 // Allow disabling temporary destructor inlining with a separate option.
704 if (CallOpts.IsTemporaryCtorOrDtor && !Opts.mayInlineCXXTemporaryDtors())
705 return CIP_DisallowedOnce;
706
707 // If we did not find the correct this-region, it would be pointless
708 // to inline the destructor. Instead we will simply invalidate
709 // the fake temporary target.
710 if (CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion)
711 return CIP_DisallowedOnce;
712 break;
713 }
714 case CE_CXXAllocator:
715 if (Opts.mayInlineCXXAllocator())
716 break;
717 // Do not inline allocators until we model deallocators.
718 // This is unfortunate, but basically necessary for smart pointers and such.
719 return CIP_DisallowedAlways;
720 case CE_ObjCMessage:
721 if (!Opts.mayInlineObjCMethod())
722 return CIP_DisallowedAlways;
723 if (!(Opts.getIPAMode() == IPAK_DynamicDispatch ||
724 Opts.getIPAMode() == IPAK_DynamicDispatchBifurcate))
725 return CIP_DisallowedAlways;
726 break;
727 }
728
729 return CIP_Allowed;
730}
731
732/// Returns true if the given C++ class contains a member with the given name.
733static bool hasMember(const ASTContext &Ctx, const CXXRecordDecl *RD,
734 StringRef Name) {
735 const IdentifierInfo &II = Ctx.Idents.get(Name);
736 DeclarationName DeclName = Ctx.DeclarationNames.getIdentifier(&II);
737 if (!RD->lookup(DeclName).empty())
738 return true;
739
740 CXXBasePaths Paths(false, false, false);
741 if (RD->lookupInBases(
742 [DeclName](const CXXBaseSpecifier *Specifier, CXXBasePath &Path) {
743 return CXXRecordDecl::FindOrdinaryMember(Specifier, Path, DeclName);
744 },
745 Paths))
746 return true;
747
748 return false;
749}
750
751/// Returns true if the given C++ class is a container or iterator.
752///
753/// Our heuristic for this is whether it contains a method named 'begin()' or a
754/// nested type named 'iterator' or 'iterator_category'.
755static bool isContainerClass(const ASTContext &Ctx, const CXXRecordDecl *RD) {
756 return hasMember(Ctx, RD, "begin") ||
757 hasMember(Ctx, RD, "iterator") ||
758 hasMember(Ctx, RD, "iterator_category");
759}
760
761/// Returns true if the given function refers to a method of a C++ container
762/// or iterator.
763///
764/// We generally do a poor job modeling most containers right now, and might
765/// prefer not to inline their methods.
766static bool isContainerMethod(const ASTContext &Ctx,
767 const FunctionDecl *FD) {
768 if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(FD))
769 return isContainerClass(Ctx, MD->getParent());
770 return false;
771}
772
773/// Returns true if the given function is the destructor of a class named
774/// "shared_ptr".
775static bool isCXXSharedPtrDtor(const FunctionDecl *FD) {
776 const CXXDestructorDecl *Dtor = dyn_cast<CXXDestructorDecl>(FD);
777 if (!Dtor)
778 return false;
779
780 const CXXRecordDecl *RD = Dtor->getParent();
781 if (const IdentifierInfo *II = RD->getDeclName().getAsIdentifierInfo())
782 if (II->isStr("shared_ptr"))
783 return true;
784
785 return false;
786}
787
788/// Returns true if the function in \p CalleeADC may be inlined in general.
789///
790/// This checks static properties of the function, such as its signature and
791/// CFG, to determine whether the analyzer should ever consider inlining it,
792/// in any context.
793static bool mayInlineDecl(AnalysisDeclContext *CalleeADC,
794 AnalyzerOptions &Opts) {
795 // FIXME: Do not inline variadic calls.
796 if (CallEvent::isVariadic(CalleeADC->getDecl()))
797 return false;
798
799 // Check certain C++-related inlining policies.
800 ASTContext &Ctx = CalleeADC->getASTContext();
801 if (Ctx.getLangOpts().CPlusPlus) {
802 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(CalleeADC->getDecl())) {
803 // Conditionally control the inlining of template functions.
804 if (!Opts.mayInlineTemplateFunctions())
805 if (FD->getTemplatedKind() != FunctionDecl::TK_NonTemplate)
806 return false;
807
808 // Conditionally control the inlining of C++ standard library functions.
809 if (!Opts.mayInlineCXXStandardLibrary())
810 if (Ctx.getSourceManager().isInSystemHeader(FD->getLocation()))
811 if (AnalysisDeclContext::isInStdNamespace(FD))
812 return false;
813
814 // Conditionally control the inlining of methods on objects that look
815 // like C++ containers.
816 if (!Opts.mayInlineCXXContainerMethods())
817 if (!Ctx.getSourceManager().isInMainFile(FD->getLocation()))
818 if (isContainerMethod(Ctx, FD))
819 return false;
820
821 // Conditionally control the inlining of the destructor of C++ shared_ptr.
822 // We don't currently do a good job modeling shared_ptr because we can't
823 // see the reference count, so treating as opaque is probably the best
824 // idea.
825 if (!Opts.mayInlineCXXSharedPtrDtor())
826 if (isCXXSharedPtrDtor(FD))
827 return false;
828 }
829 }
830
831 // It is possible that the CFG cannot be constructed.
832 // Be safe, and check if the CalleeCFG is valid.
833 const CFG *CalleeCFG = CalleeADC->getCFG();
834 if (!CalleeCFG)
835 return false;
836
837 // Do not inline large functions.
838 if (CalleeCFG->getNumBlockIDs() > Opts.getMaxInlinableSize())
839 return false;
840
841 // It is possible that the live variables analysis cannot be
842 // run. If so, bail out.
843 if (!CalleeADC->getAnalysis<RelaxedLiveVariables>())
844 return false;
845
846 return true;
847}
848
849bool ExprEngine::shouldInlineCall(const CallEvent &Call, const Decl *D,
850 const ExplodedNode *Pred,
851 const EvalCallOptions &CallOpts) {
852 if (!D)
853 return false;
854
855 AnalysisManager &AMgr = getAnalysisManager();
856 AnalyzerOptions &Opts = AMgr.options;
857 AnalysisDeclContextManager &ADCMgr = AMgr.getAnalysisDeclContextManager();
858 AnalysisDeclContext *CalleeADC = ADCMgr.getContext(D);
859
860 // The auto-synthesized bodies are essential to inline as they are
861 // usually small and commonly used. Note: we should do this check early on to
862 // ensure we always inline these calls.
863 if (CalleeADC->isBodyAutosynthesized())
864 return true;
865
866 if (!AMgr.shouldInlineCall())
867 return false;
868
869 // Check if this function has been marked as non-inlinable.
870 Optional<bool> MayInline = Engine.FunctionSummaries->mayInline(D);
871 if (MayInline.hasValue()) {
872 if (!MayInline.getValue())
873 return false;
874
875 } else {
876 // We haven't actually checked the static properties of this function yet.
877 // Do that now, and record our decision in the function summaries.
878 if (mayInlineDecl(CalleeADC, Opts)) {
879 Engine.FunctionSummaries->markMayInline(D);
880 } else {
881 Engine.FunctionSummaries->markShouldNotInline(D);
882 return false;
883 }
884 }
885
886 // Check if we should inline a call based on its kind.
887 // FIXME: this checks both static and dynamic properties of the call, which
888 // means we're redoing a bit of work that could be cached in the function
889 // summary.
890 CallInlinePolicy CIP = mayInlineCallKind(Call, Pred, Opts, CallOpts);
891 if (CIP != CIP_Allowed) {
892 if (CIP == CIP_DisallowedAlways) {
893 assert(!MayInline.hasValue() || MayInline.getValue())(static_cast <bool> (!MayInline.hasValue() || MayInline
.getValue()) ? void (0) : __assert_fail ("!MayInline.hasValue() || MayInline.getValue()"
, "/build/llvm-toolchain-snapshot-7~svn326551/tools/clang/lib/StaticAnalyzer/Core/ExprEngineCallAndReturn.cpp"
, 893, __extension__ __PRETTY_FUNCTION__))
;
894 Engine.FunctionSummaries->markShouldNotInline(D);
895 }
896 return false;
897 }
898
899 const CFG *CalleeCFG = CalleeADC->getCFG();
900
901 // Do not inline if recursive or we've reached max stack frame count.
902 bool IsRecursive = false;
903 unsigned StackDepth = 0;
904 examineStackFrames(D, Pred->getLocationContext(), IsRecursive, StackDepth);
905 if ((StackDepth >= Opts.InlineMaxStackDepth) &&
906 ((CalleeCFG->getNumBlockIDs() > Opts.getAlwaysInlineSize())
907 || IsRecursive))
908 return false;
909
910 // Do not inline large functions too many times.
911 if ((Engine.FunctionSummaries->getNumTimesInlined(D) >
912 Opts.getMaxTimesInlineLarge()) &&
913 CalleeCFG->getNumBlockIDs() >=
914 Opts.getMinCFGSizeTreatFunctionsAsLarge()) {
915 NumReachedInlineCountMax++;
916 return false;
917 }
918
919 if (HowToInline == Inline_Minimal &&
920 (CalleeCFG->getNumBlockIDs() > Opts.getAlwaysInlineSize()
921 || IsRecursive))
922 return false;
923
924 return true;
925}
926
927static bool isTrivialObjectAssignment(const CallEvent &Call) {
928 const CXXInstanceCall *ICall = dyn_cast<CXXInstanceCall>(&Call);
929 if (!ICall)
930 return false;
931
932 const CXXMethodDecl *MD = dyn_cast_or_null<CXXMethodDecl>(ICall->getDecl());
933 if (!MD)
934 return false;
935 if (!(MD->isCopyAssignmentOperator() || MD->isMoveAssignmentOperator()))
936 return false;
937
938 return MD->isTrivial();
939}
940
941void ExprEngine::defaultEvalCall(NodeBuilder &Bldr, ExplodedNode *Pred,
942 const CallEvent &CallTemplate,
943 const EvalCallOptions &CallOpts) {
944 // Make sure we have the most recent state attached to the call.
945 ProgramStateRef State = Pred->getState();
946 CallEventRef<> Call = CallTemplate.cloneWithState(State);
1
Null pointer value stored to field 'Obj'
2
Calling copy constructor for 'CallEventRef'
6
Returning from copy constructor for 'CallEventRef'
947
948 // Special-case trivial assignment operators.
949 if (isTrivialObjectAssignment(*Call)) {
7
Calling 'IntrusiveRefCntPtr::operator*'
950 performTrivialCopy(Bldr, Pred, *Call);
951 return;
952 }
953
954 // Try to inline the call.
955 // The origin expression here is just used as a kind of checksum;
956 // this should still be safe even for CallEvents that don't come from exprs.
957 const Expr *E = Call->getOriginExpr();
958
959 ProgramStateRef InlinedFailedState = getInlineFailedState(State, E);
960 if (InlinedFailedState) {
961 // If we already tried once and failed, make sure we don't retry later.
962 State = InlinedFailedState;
963 } else {
964 RuntimeDefinition RD = Call->getRuntimeDefinition();
965 const Decl *D = RD.getDecl();
966 if (shouldInlineCall(*Call, D, Pred, CallOpts)) {
967 if (RD.mayHaveOtherDefinitions()) {
968 AnalyzerOptions &Options = getAnalysisManager().options;
969
970 // Explore with and without inlining the call.
971 if (Options.getIPAMode() == IPAK_DynamicDispatchBifurcate) {
972 BifurcateCall(RD.getDispatchRegion(), *Call, D, Bldr, Pred);
973 return;
974 }
975
976 // Don't inline if we're not in any dynamic dispatch mode.
977 if (Options.getIPAMode() != IPAK_DynamicDispatch) {
978 conservativeEvalCall(*Call, Bldr, Pred, State);
979 return;
980 }
981 }
982
983 // We are not bifurcating and we do have a Decl, so just inline.
984 if (inlineCall(*Call, D, Bldr, Pred, State))
985 return;
986 }
987 }
988
989 // If we can't inline it, handle the return value and invalidate the regions.
990 conservativeEvalCall(*Call, Bldr, Pred, State);
991}
992
993void ExprEngine::BifurcateCall(const MemRegion *BifurReg,
994 const CallEvent &Call, const Decl *D,
995 NodeBuilder &Bldr, ExplodedNode *Pred) {
996 assert(BifurReg)(static_cast <bool> (BifurReg) ? void (0) : __assert_fail
("BifurReg", "/build/llvm-toolchain-snapshot-7~svn326551/tools/clang/lib/StaticAnalyzer/Core/ExprEngineCallAndReturn.cpp"
, 996, __extension__ __PRETTY_FUNCTION__))
;
997 BifurReg = BifurReg->StripCasts();
998
999 // Check if we've performed the split already - note, we only want
1000 // to split the path once per memory region.
1001 ProgramStateRef State = Pred->getState();
1002 const unsigned *BState =
1003 State->get<DynamicDispatchBifurcationMap>(BifurReg);
1004 if (BState) {
1005 // If we are on "inline path", keep inlining if possible.
1006 if (*BState == DynamicDispatchModeInlined)
1007 if (inlineCall(Call, D, Bldr, Pred, State))
1008 return;
1009 // If inline failed, or we are on the path where we assume we
1010 // don't have enough info about the receiver to inline, conjure the
1011 // return value and invalidate the regions.
1012 conservativeEvalCall(Call, Bldr, Pred, State);
1013 return;
1014 }
1015
1016 // If we got here, this is the first time we process a message to this
1017 // region, so split the path.
1018 ProgramStateRef IState =
1019 State->set<DynamicDispatchBifurcationMap>(BifurReg,
1020 DynamicDispatchModeInlined);
1021 inlineCall(Call, D, Bldr, Pred, IState);
1022
1023 ProgramStateRef NoIState =
1024 State->set<DynamicDispatchBifurcationMap>(BifurReg,
1025 DynamicDispatchModeConservative);
1026 conservativeEvalCall(Call, Bldr, Pred, NoIState);
1027
1028 NumOfDynamicDispatchPathSplits++;
1029}
1030
1031void ExprEngine::VisitReturnStmt(const ReturnStmt *RS, ExplodedNode *Pred,
1032 ExplodedNodeSet &Dst) {
1033 ExplodedNodeSet dstPreVisit;
1034 getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, RS, *this);
1035
1036 StmtNodeBuilder B(dstPreVisit, Dst, *currBldrCtx);
1037
1038 if (RS->getRetValue()) {
1039 for (ExplodedNodeSet::iterator it = dstPreVisit.begin(),
1040 ei = dstPreVisit.end(); it != ei; ++it) {
1041 B.generateNode(RS, *it, (*it)->getState());
1042 }
1043 }
1044}

/build/llvm-toolchain-snapshot-7~svn326551/tools/clang/include/clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h

1//===- CallEvent.h - Wrapper for all function and method calls --*- C++ -*-===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10/// \file This file defines CallEvent and its subclasses, which represent path-
11/// sensitive instances of different kinds of function and method calls
12/// (C, C++, and Objective-C).
13//
14//===----------------------------------------------------------------------===//
15
16#ifndef LLVM_CLANG_STATICANALYZER_CORE_PATHSENSITIVE_CALLEVENT_H
17#define LLVM_CLANG_STATICANALYZER_CORE_PATHSENSITIVE_CALLEVENT_H
18
19#include "clang/AST/Decl.h"
20#include "clang/AST/DeclBase.h"
21#include "clang/AST/DeclCXX.h"
22#include "clang/AST/DeclObjC.h"
23#include "clang/AST/Expr.h"
24#include "clang/AST/ExprCXX.h"
25#include "clang/AST/ExprObjC.h"
26#include "clang/AST/Stmt.h"
27#include "clang/AST/Type.h"
28#include "clang/Basic/IdentifierTable.h"
29#include "clang/Basic/LLVM.h"
30#include "clang/Basic/SourceLocation.h"
31#include "clang/Basic/SourceManager.h"
32#include "clang/StaticAnalyzer/Core/PathSensitive/ProgramState.h"
33#include "clang/StaticAnalyzer/Core/PathSensitive/ProgramState_Fwd.h"
34#include "clang/StaticAnalyzer/Core/PathSensitive/SVals.h"
35#include "llvm/ADT/ArrayRef.h"
36#include "llvm/ADT/IntrusiveRefCntPtr.h"
37#include "llvm/ADT/PointerIntPair.h"
38#include "llvm/ADT/PointerUnion.h"
39#include "llvm/ADT/STLExtras.h"
40#include "llvm/ADT/SmallVector.h"
41#include "llvm/ADT/StringRef.h"
42#include "llvm/Support/Allocator.h"
43#include "llvm/Support/Casting.h"
44#include "llvm/Support/ErrorHandling.h"
45#include <cassert>
46#include <limits>
47#include <utility>
48
49namespace clang {
50
51class LocationContext;
52class ProgramPoint;
53class ProgramPointTag;
54class StackFrameContext;
55
56namespace ento {
57
58enum CallEventKind {
59 CE_Function,
60 CE_CXXMember,
61 CE_CXXMemberOperator,
62 CE_CXXDestructor,
63 CE_BEG_CXX_INSTANCE_CALLS = CE_CXXMember,
64 CE_END_CXX_INSTANCE_CALLS = CE_CXXDestructor,
65 CE_CXXConstructor,
66 CE_CXXAllocator,
67 CE_BEG_FUNCTION_CALLS = CE_Function,
68 CE_END_FUNCTION_CALLS = CE_CXXAllocator,
69 CE_Block,
70 CE_ObjCMessage
71};
72
73class CallEvent;
74
75/// This class represents a description of a function call using the number of
76/// arguments and the name of the function.
77class CallDescription {
78 friend CallEvent;
79
80 mutable IdentifierInfo *II = nullptr;
81 mutable bool IsLookupDone = false;
82 StringRef FuncName;
83 unsigned RequiredArgs;
84
85public:
86 const static unsigned NoArgRequirement = std::numeric_limits<unsigned>::max();
87
88 /// \brief Constructs a CallDescription object.
89 ///
90 /// @param FuncName The name of the function that will be matched.
91 ///
92 /// @param RequiredArgs The number of arguments that is expected to match a
93 /// call. Omit this parameter to match every occurance of call with a given
94 /// name regardless the number of arguments.
95 CallDescription(StringRef FuncName, unsigned RequiredArgs = NoArgRequirement)
96 : FuncName(FuncName), RequiredArgs(RequiredArgs) {}
97
98 /// \brief Get the name of the function that this object matches.
99 StringRef getFunctionName() const { return FuncName; }
100};
101
102template<typename T = CallEvent>
103class CallEventRef : public IntrusiveRefCntPtr<const T> {
104public:
105 CallEventRef(const T *Call) : IntrusiveRefCntPtr<const T>(Call) {}
106 CallEventRef(const CallEventRef &Orig) : IntrusiveRefCntPtr<const T>(Orig) {}
3
Calling copy constructor for 'IntrusiveRefCntPtr'
5
Returning from copy constructor for 'IntrusiveRefCntPtr'
107
108 CallEventRef<T> cloneWithState(ProgramStateRef State) const {
109 return this->get()->template cloneWithState<T>(State);
110 }
111
112 // Allow implicit conversions to a superclass type, since CallEventRef
113 // behaves like a pointer-to-const.
114 template <typename SuperT>
115 operator CallEventRef<SuperT> () const {
116 return this->get();
117 }
118};
119
120/// \class RuntimeDefinition
121/// \brief Defines the runtime definition of the called function.
122///
123/// Encapsulates the information we have about which Decl will be used
124/// when the call is executed on the given path. When dealing with dynamic
125/// dispatch, the information is based on DynamicTypeInfo and might not be
126/// precise.
127class RuntimeDefinition {
128 /// The Declaration of the function which could be called at runtime.
129 /// NULL if not available.
130 const Decl *D = nullptr;
131
132 /// The region representing an object (ObjC/C++) on which the method is
133 /// called. With dynamic dispatch, the method definition depends on the
134 /// runtime type of this object. NULL when the DynamicTypeInfo is
135 /// precise.
136 const MemRegion *R = nullptr;
137
138public:
139 RuntimeDefinition() = default;
140 RuntimeDefinition(const Decl *InD): D(InD) {}
141 RuntimeDefinition(const Decl *InD, const MemRegion *InR): D(InD), R(InR) {}
142
143 const Decl *getDecl() { return D; }
144
145 /// \brief Check if the definition we have is precise.
146 /// If not, it is possible that the call dispatches to another definition at
147 /// execution time.
148 bool mayHaveOtherDefinitions() { return R != nullptr; }
149
150 /// When other definitions are possible, returns the region whose runtime type
151 /// determines the method definition.
152 const MemRegion *getDispatchRegion() { return R; }
153};
154
155/// \brief Represents an abstract call to a function or method along a
156/// particular path.
157///
158/// CallEvents are created through the factory methods of CallEventManager.
159///
160/// CallEvents should always be cheap to create and destroy. In order for
161/// CallEventManager to be able to re-use CallEvent-sized memory blocks,
162/// subclasses of CallEvent may not add any data members to the base class.
163/// Use the "Data" and "Location" fields instead.
164class CallEvent {
165public:
166 using Kind = CallEventKind;
167
168private:
169 ProgramStateRef State;
170 const LocationContext *LCtx;
171 llvm::PointerUnion<const Expr *, const Decl *> Origin;
172
173protected:
174 // This is user data for subclasses.
175 const void *Data;
176
177 // This is user data for subclasses.
178 // This should come right before RefCount, so that the two fields can be
179 // packed together on LP64 platforms.
180 SourceLocation Location;
181
182private:
183 template <typename T> friend struct llvm::IntrusiveRefCntPtrInfo;
184
185 mutable unsigned RefCount = 0;
186
187 void Retain() const { ++RefCount; }
188 void Release() const;
189
190protected:
191 friend class CallEventManager;
192
193 CallEvent(const Expr *E, ProgramStateRef state, const LocationContext *lctx)
194 : State(std::move(state)), LCtx(lctx), Origin(E) {}
195
196 CallEvent(const Decl *D, ProgramStateRef state, const LocationContext *lctx)
197 : State(std::move(state)), LCtx(lctx), Origin(D) {}
198
199 // DO NOT MAKE PUBLIC
200 CallEvent(const CallEvent &Original)
201 : State(Original.State), LCtx(Original.LCtx), Origin(Original.Origin),
202 Data(Original.Data), Location(Original.Location) {}
203
204 /// Copies this CallEvent, with vtable intact, into a new block of memory.
205 virtual void cloneTo(void *Dest) const = 0;
206
207 /// \brief Get the value of arbitrary expressions at this point in the path.
208 SVal getSVal(const Stmt *S) const {
209 return getState()->getSVal(S, getLocationContext());
210 }
211
212 using ValueList = SmallVectorImpl<SVal>;
213
214 /// \brief Used to specify non-argument regions that will be invalidated as a
215 /// result of this call.
216 virtual void getExtraInvalidatedValues(ValueList &Values,
217 RegionAndSymbolInvalidationTraits *ETraits) const {}
218
219public:
220 CallEvent &operator=(const CallEvent &) = delete;
221 virtual ~CallEvent() = default;
222
223 /// \brief Returns the kind of call this is.
224 virtual Kind getKind() const = 0;
225
226 /// \brief Returns the declaration of the function or method that will be
227 /// called. May be null.
228 virtual const Decl *getDecl() const {
229 return Origin.dyn_cast<const Decl *>();
230 }
231
232 /// \brief The state in which the call is being evaluated.
233 const ProgramStateRef &getState() const {
234 return State;
235 }
236
237 /// \brief The context in which the call is being evaluated.
238 const LocationContext *getLocationContext() const {
239 return LCtx;
240 }
241
242 /// \brief Returns the definition of the function or method that will be
243 /// called.
244 virtual RuntimeDefinition getRuntimeDefinition() const = 0;
245
246 /// \brief Returns the expression whose value will be the result of this call.
247 /// May be null.
248 const Expr *getOriginExpr() const {
249 return Origin.dyn_cast<const Expr *>();
250 }
251
252 /// \brief Returns the number of arguments (explicit and implicit).
253 ///
254 /// Note that this may be greater than the number of parameters in the
255 /// callee's declaration, and that it may include arguments not written in
256 /// the source.
257 virtual unsigned getNumArgs() const = 0;
258
259 /// \brief Returns true if the callee is known to be from a system header.
260 bool isInSystemHeader() const {
261 const Decl *D = getDecl();
262 if (!D)
263 return false;
264
265 SourceLocation Loc = D->getLocation();
266 if (Loc.isValid()) {
267 const SourceManager &SM =
268 getState()->getStateManager().getContext().getSourceManager();
269 return SM.isInSystemHeader(D->getLocation());
270 }
271
272 // Special case for implicitly-declared global operator new/delete.
273 // These should be considered system functions.
274 if (const auto *FD = dyn_cast<FunctionDecl>(D))
275 return FD->isOverloadedOperator() && FD->isImplicit() && FD->isGlobal();
276
277 return false;
278 }
279
280 /// \brief Returns true if the CallEvent is a call to a function that matches
281 /// the CallDescription.
282 ///
283 /// Note that this function is not intended to be used to match Obj-C method
284 /// calls.
285 bool isCalled(const CallDescription &CD) const;
286
287 /// \brief Returns a source range for the entire call, suitable for
288 /// outputting in diagnostics.
289 virtual SourceRange getSourceRange() const {
290 return getOriginExpr()->getSourceRange();
291 }
292
293 /// \brief Returns the value of a given argument at the time of the call.
294 virtual SVal getArgSVal(unsigned Index) const;
295
296 /// \brief Returns the expression associated with a given argument.
297 /// May be null if this expression does not appear in the source.
298 virtual const Expr *getArgExpr(unsigned Index) const { return nullptr; }
299
300 /// \brief Returns the source range for errors associated with this argument.
301 ///
302 /// May be invalid if the argument is not written in the source.
303 virtual SourceRange getArgSourceRange(unsigned Index) const;
304
305 /// \brief Returns the result type, adjusted for references.
306 QualType getResultType() const;
307
308 /// \brief Returns the return value of the call.
309 ///
310 /// This should only be called if the CallEvent was created using a state in
311 /// which the return value has already been bound to the origin expression.
312 SVal getReturnValue() const;
313
314 /// \brief Returns true if the type of any of the non-null arguments satisfies
315 /// the condition.
316 bool hasNonNullArgumentsWithType(bool (*Condition)(QualType)) const;
317
318 /// \brief Returns true if any of the arguments appear to represent callbacks.
319 bool hasNonZeroCallbackArg() const;
320
321 /// \brief Returns true if any of the arguments is void*.
322 bool hasVoidPointerToNonConstArg() const;
323
324 /// \brief Returns true if any of the arguments are known to escape to long-
325 /// term storage, even if this method will not modify them.
326 // NOTE: The exact semantics of this are still being defined!
327 // We don't really want a list of hardcoded exceptions in the long run,
328 // but we don't want duplicated lists of known APIs in the short term either.
329 virtual bool argumentsMayEscape() const {
330 return hasNonZeroCallbackArg();
331 }
332
333 /// \brief Returns true if the callee is an externally-visible function in the
334 /// top-level namespace, such as \c malloc.
335 ///
336 /// You can use this call to determine that a particular function really is
337 /// a library function and not, say, a C++ member function with the same name.
338 ///
339 /// If a name is provided, the function must additionally match the given
340 /// name.
341 ///
342 /// Note that this deliberately excludes C++ library functions in the \c std
343 /// namespace, but will include C library functions accessed through the
344 /// \c std namespace. This also does not check if the function is declared
345 /// as 'extern "C"', or if it uses C++ name mangling.
346 // FIXME: Add a helper for checking namespaces.
347 // FIXME: Move this down to AnyFunctionCall once checkers have more
348 // precise callbacks.
349 bool isGlobalCFunction(StringRef SpecificName = StringRef()) const;
350
351 /// \brief Returns the name of the callee, if its name is a simple identifier.
352 ///
353 /// Note that this will fail for Objective-C methods, blocks, and C++
354 /// overloaded operators. The former is named by a Selector rather than a
355 /// simple identifier, and the latter two do not have names.
356 // FIXME: Move this down to AnyFunctionCall once checkers have more
357 // precise callbacks.
358 const IdentifierInfo *getCalleeIdentifier() const {
359 const auto *ND = dyn_cast_or_null<NamedDecl>(getDecl());
360 if (!ND)
361 return nullptr;
362 return ND->getIdentifier();
363 }
364
365 /// \brief Returns an appropriate ProgramPoint for this call.
366 ProgramPoint getProgramPoint(bool IsPreVisit = false,
367 const ProgramPointTag *Tag = nullptr) const;
368
369 /// \brief Returns a new state with all argument regions invalidated.
370 ///
371 /// This accepts an alternate state in case some processing has already
372 /// occurred.
373 ProgramStateRef invalidateRegions(unsigned BlockCount,
374 ProgramStateRef Orig = nullptr) const;
375
376 using FrameBindingTy = std::pair<Loc, SVal>;
377 using BindingsTy = SmallVectorImpl<FrameBindingTy>;
378
379 /// Populates the given SmallVector with the bindings in the callee's stack
380 /// frame at the start of this call.
381 virtual void getInitialStackFrameContents(const StackFrameContext *CalleeCtx,
382 BindingsTy &Bindings) const = 0;
383
384 /// Returns a copy of this CallEvent, but using the given state.
385 template <typename T>
386 CallEventRef<T> cloneWithState(ProgramStateRef NewState) const;
387
388 /// Returns a copy of this CallEvent, but using the given state.
389 CallEventRef<> cloneWithState(ProgramStateRef NewState) const {
390 return cloneWithState<CallEvent>(NewState);
391 }
392
393 /// \brief Returns true if this is a statement is a function or method call
394 /// of some kind.
395 static bool isCallStmt(const Stmt *S);
396
397 /// \brief Returns the result type of a function or method declaration.
398 ///
399 /// This will return a null QualType if the result type cannot be determined.
400 static QualType getDeclaredResultType(const Decl *D);
401
402 /// \brief Returns true if the given decl is known to be variadic.
403 ///
404 /// \p D must not be null.
405 static bool isVariadic(const Decl *D);
406
407 // Iterator access to formal parameters and their types.
408private:
409 struct GetTypeFn {
410 QualType operator()(ParmVarDecl *PD) const { return PD->getType(); }
411 };
412
413public:
414 /// Return call's formal parameters.
415 ///
416 /// Remember that the number of formal parameters may not match the number
417 /// of arguments for all calls. However, the first parameter will always
418 /// correspond with the argument value returned by \c getArgSVal(0).
419 virtual ArrayRef<ParmVarDecl *> parameters() const = 0;
420
421 using param_type_iterator =
422 llvm::mapped_iterator<ArrayRef<ParmVarDecl *>::iterator, GetTypeFn>;
423
424 /// Returns an iterator over the types of the call's formal parameters.
425 ///
426 /// This uses the callee decl found by default name lookup rather than the
427 /// definition because it represents a public interface, and probably has
428 /// more annotations.
429 param_type_iterator param_type_begin() const {
430 return llvm::map_iterator(parameters().begin(), GetTypeFn());
431 }
432 /// \sa param_type_begin()
433 param_type_iterator param_type_end() const {
434 return llvm::map_iterator(parameters().end(), GetTypeFn());
435 }
436
437 // For debugging purposes only
438 void dump(raw_ostream &Out) const;
439 void dump() const;
440};
441
442/// \brief Represents a call to any sort of function that might have a
443/// FunctionDecl.
444class AnyFunctionCall : public CallEvent {
445protected:
446 AnyFunctionCall(const Expr *E, ProgramStateRef St,
447 const LocationContext *LCtx)
448 : CallEvent(E, St, LCtx) {}
449 AnyFunctionCall(const Decl *D, ProgramStateRef St,
450 const LocationContext *LCtx)
451 : CallEvent(D, St, LCtx) {}
452 AnyFunctionCall(const AnyFunctionCall &Other) = default;
453
454public:
455 // This function is overridden by subclasses, but they must return
456 // a FunctionDecl.
457 const FunctionDecl *getDecl() const override {
458 return cast<FunctionDecl>(CallEvent::getDecl());
459 }
460
461 RuntimeDefinition getRuntimeDefinition() const override;
462
463 bool argumentsMayEscape() const override;
464
465 void getInitialStackFrameContents(const StackFrameContext *CalleeCtx,
466 BindingsTy &Bindings) const override;
467
468 ArrayRef<ParmVarDecl *> parameters() const override;
469
470 static bool classof(const CallEvent *CA) {
471 return CA->getKind() >= CE_BEG_FUNCTION_CALLS &&
472 CA->getKind() <= CE_END_FUNCTION_CALLS;
473 }
474};
475
476/// \brief Represents a C function or static C++ member function call.
477///
478/// Example: \c fun()
479class SimpleFunctionCall : public AnyFunctionCall {
480 friend class CallEventManager;
481
482protected:
483 SimpleFunctionCall(const CallExpr *CE, ProgramStateRef St,
484 const LocationContext *LCtx)
485 : AnyFunctionCall(CE, St, LCtx) {}
486 SimpleFunctionCall(const SimpleFunctionCall &Other) = default;
487
488 void cloneTo(void *Dest) const override {
489 new (Dest) SimpleFunctionCall(*this);
490 }
491
492public:
493 virtual const CallExpr *getOriginExpr() const {
494 return cast<CallExpr>(AnyFunctionCall::getOriginExpr());
495 }
496
497 const FunctionDecl *getDecl() const override;
498
499 unsigned getNumArgs() const override { return getOriginExpr()->getNumArgs(); }
500
501 const Expr *getArgExpr(unsigned Index) const override {
502 return getOriginExpr()->getArg(Index);
503 }
504
505 Kind getKind() const override { return CE_Function; }
506
507 static bool classof(const CallEvent *CA) {
508 return CA->getKind() == CE_Function;
509 }
510};
511
512/// \brief Represents a call to a block.
513///
514/// Example: <tt>^{ /* ... */ }()</tt>
515class BlockCall : public CallEvent {
516 friend class CallEventManager;
517
518protected:
519 BlockCall(const CallExpr *CE, ProgramStateRef St,
520 const LocationContext *LCtx)
521 : CallEvent(CE, St, LCtx) {}
522 BlockCall(const BlockCall &Other) = default;
523
524 void cloneTo(void *Dest) const override { new (Dest) BlockCall(*this); }
525
526 void getExtraInvalidatedValues(ValueList &Values,
527 RegionAndSymbolInvalidationTraits *ETraits) const override;
528
529public:
530 virtual const CallExpr *getOriginExpr() const {
531 return cast<CallExpr>(CallEvent::getOriginExpr());
532 }
533
534 unsigned getNumArgs() const override { return getOriginExpr()->getNumArgs(); }
535
536 const Expr *getArgExpr(unsigned Index) const override {
537 return getOriginExpr()->getArg(Index);
538 }
539
540 /// \brief Returns the region associated with this instance of the block.
541 ///
542 /// This may be NULL if the block's origin is unknown.
543 const BlockDataRegion *getBlockRegion() const;
544
545 const BlockDecl *getDecl() const override {
546 const BlockDataRegion *BR = getBlockRegion();
547 if (!BR)
548 return nullptr;
549 return BR->getDecl();
550 }
551
552 bool isConversionFromLambda() const {
553 const BlockDecl *BD = getDecl();
554 if (!BD)
555 return false;
556
557 return BD->isConversionFromLambda();
558 }
559
560 /// \brief For a block converted from a C++ lambda, returns the block
561 /// VarRegion for the variable holding the captured C++ lambda record.
562 const VarRegion *getRegionStoringCapturedLambda() const {
563 assert(isConversionFromLambda())(static_cast <bool> (isConversionFromLambda()) ? void (
0) : __assert_fail ("isConversionFromLambda()", "/build/llvm-toolchain-snapshot-7~svn326551/tools/clang/include/clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
, 563, __extension__ __PRETTY_FUNCTION__))
;
564 const BlockDataRegion *BR = getBlockRegion();
565 assert(BR && "Block converted from lambda must have a block region")(static_cast <bool> (BR && "Block converted from lambda must have a block region"
) ? void (0) : __assert_fail ("BR && \"Block converted from lambda must have a block region\""
, "/build/llvm-toolchain-snapshot-7~svn326551/tools/clang/include/clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
, 565, __extension__ __PRETTY_FUNCTION__))
;
566
567 auto I = BR->referenced_vars_begin();
568 assert(I != BR->referenced_vars_end())(static_cast <bool> (I != BR->referenced_vars_end())
? void (0) : __assert_fail ("I != BR->referenced_vars_end()"
, "/build/llvm-toolchain-snapshot-7~svn326551/tools/clang/include/clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
, 568, __extension__ __PRETTY_FUNCTION__))
;
569
570 return I.getCapturedRegion();
571 }
572
573 RuntimeDefinition getRuntimeDefinition() const override {
574 if (!isConversionFromLambda())
575 return RuntimeDefinition(getDecl());
576
577 // Clang converts lambdas to blocks with an implicit user-defined
578 // conversion operator method on the lambda record that looks (roughly)
579 // like:
580 //
581 // typedef R(^block_type)(P1, P2, ...);
582 // operator block_type() const {
583 // auto Lambda = *this;
584 // return ^(P1 p1, P2 p2, ...){
585 // /* return Lambda(p1, p2, ...); */
586 // };
587 // }
588 //
589 // Here R is the return type of the lambda and P1, P2, ... are
590 // its parameter types. 'Lambda' is a fake VarDecl captured by the block
591 // that is initialized to a copy of the lambda.
592 //
593 // Sema leaves the body of a lambda-converted block empty (it is
594 // produced by CodeGen), so we can't analyze it directly. Instead, we skip
595 // the block body and analyze the operator() method on the captured lambda.
596 const VarDecl *LambdaVD = getRegionStoringCapturedLambda()->getDecl();
597 const CXXRecordDecl *LambdaDecl = LambdaVD->getType()->getAsCXXRecordDecl();
598 CXXMethodDecl* LambdaCallOperator = LambdaDecl->getLambdaCallOperator();
599
600 return RuntimeDefinition(LambdaCallOperator);
601 }
602
603 bool argumentsMayEscape() const override {
604 return true;
605 }
606
607 void getInitialStackFrameContents(const StackFrameContext *CalleeCtx,
608 BindingsTy &Bindings) const override;
609
610 ArrayRef<ParmVarDecl*> parameters() const override;
611
612 Kind getKind() const override { return CE_Block; }
613
614 static bool classof(const CallEvent *CA) {
615 return CA->getKind() == CE_Block;
616 }
617};
618
619/// \brief Represents a non-static C++ member function call, no matter how
620/// it is written.
621class CXXInstanceCall : public AnyFunctionCall {
622protected:
623 CXXInstanceCall(const CallExpr *CE, ProgramStateRef St,
624 const LocationContext *LCtx)
625 : AnyFunctionCall(CE, St, LCtx) {}
626 CXXInstanceCall(const FunctionDecl *D, ProgramStateRef St,
627 const LocationContext *LCtx)
628 : AnyFunctionCall(D, St, LCtx) {}
629 CXXInstanceCall(const CXXInstanceCall &Other) = default;
630
631 void getExtraInvalidatedValues(ValueList &Values,
632 RegionAndSymbolInvalidationTraits *ETraits) const override;
633
634public:
635 /// \brief Returns the expression representing the implicit 'this' object.
636 virtual const Expr *getCXXThisExpr() const { return nullptr; }
637
638 /// \brief Returns the value of the implicit 'this' object.
639 virtual SVal getCXXThisVal() const;
640
641 const FunctionDecl *getDecl() const override;
642
643 RuntimeDefinition getRuntimeDefinition() const override;
644
645 void getInitialStackFrameContents(const StackFrameContext *CalleeCtx,
646 BindingsTy &Bindings) const override;
647
648 static bool classof(const CallEvent *CA) {
649 return CA->getKind() >= CE_BEG_CXX_INSTANCE_CALLS &&
650 CA->getKind() <= CE_END_CXX_INSTANCE_CALLS;
651 }
652};
653
654/// \brief Represents a non-static C++ member function call.
655///
656/// Example: \c obj.fun()
657class CXXMemberCall : public CXXInstanceCall {
658 friend class CallEventManager;
659
660protected:
661 CXXMemberCall(const CXXMemberCallExpr *CE, ProgramStateRef St,
662 const LocationContext *LCtx)
663 : CXXInstanceCall(CE, St, LCtx) {}
664 CXXMemberCall(const CXXMemberCall &Other) = default;
665
666 void cloneTo(void *Dest) const override { new (Dest) CXXMemberCall(*this); }
667
668public:
669 virtual const CXXMemberCallExpr *getOriginExpr() const {
670 return cast<CXXMemberCallExpr>(CXXInstanceCall::getOriginExpr());
671 }
672
673 unsigned getNumArgs() const override {
674 if (const CallExpr *CE = getOriginExpr())
675 return CE->getNumArgs();
676 return 0;
677 }
678
679 const Expr *getArgExpr(unsigned Index) const override {
680 return getOriginExpr()->getArg(Index);
681 }
682
683 const Expr *getCXXThisExpr() const override;
684
685 RuntimeDefinition getRuntimeDefinition() const override;
686
687 Kind getKind() const override { return CE_CXXMember; }
688
689 static bool classof(const CallEvent *CA) {
690 return CA->getKind() == CE_CXXMember;
691 }
692};
693
694/// \brief Represents a C++ overloaded operator call where the operator is
695/// implemented as a non-static member function.
696///
697/// Example: <tt>iter + 1</tt>
698class CXXMemberOperatorCall : public CXXInstanceCall {
699 friend class CallEventManager;
700
701protected:
702 CXXMemberOperatorCall(const CXXOperatorCallExpr *CE, ProgramStateRef St,
703 const LocationContext *LCtx)
704 : CXXInstanceCall(CE, St, LCtx) {}
705 CXXMemberOperatorCall(const CXXMemberOperatorCall &Other) = default;
706
707 void cloneTo(void *Dest) const override {
708 new (Dest) CXXMemberOperatorCall(*this);
709 }
710
711public:
712 virtual const CXXOperatorCallExpr *getOriginExpr() const {
713 return cast<CXXOperatorCallExpr>(CXXInstanceCall::getOriginExpr());
714 }
715
716 unsigned getNumArgs() const override {
717 return getOriginExpr()->getNumArgs() - 1;
718 }
719
720 const Expr *getArgExpr(unsigned Index) const override {
721 return getOriginExpr()->getArg(Index + 1);
722 }
723
724 const Expr *getCXXThisExpr() const override;
725
726 Kind getKind() const override { return CE_CXXMemberOperator; }
727
728 static bool classof(const CallEvent *CA) {
729 return CA->getKind() == CE_CXXMemberOperator;
730 }
731};
732
733/// \brief Represents an implicit call to a C++ destructor.
734///
735/// This can occur at the end of a scope (for automatic objects), at the end
736/// of a full-expression (for temporaries), or as part of a delete.
737class CXXDestructorCall : public CXXInstanceCall {
738 friend class CallEventManager;
739
740protected:
741 using DtorDataTy = llvm::PointerIntPair<const MemRegion *, 1, bool>;
742
743 /// Creates an implicit destructor.
744 ///
745 /// \param DD The destructor that will be called.
746 /// \param Trigger The statement whose completion causes this destructor call.
747 /// \param Target The object region to be destructed.
748 /// \param St The path-sensitive state at this point in the program.
749 /// \param LCtx The location context at this point in the program.
750 CXXDestructorCall(const CXXDestructorDecl *DD, const Stmt *Trigger,
751 const MemRegion *Target, bool IsBaseDestructor,
752 ProgramStateRef St, const LocationContext *LCtx)
753 : CXXInstanceCall(DD, St, LCtx) {
754 Data = DtorDataTy(Target, IsBaseDestructor).getOpaqueValue();
755 Location = Trigger->getLocEnd();
756 }
757
758 CXXDestructorCall(const CXXDestructorCall &Other) = default;
759
760 void cloneTo(void *Dest) const override {new (Dest) CXXDestructorCall(*this);}
761
762public:
763 SourceRange getSourceRange() const override { return Location; }
764 unsigned getNumArgs() const override { return 0; }
765
766 RuntimeDefinition getRuntimeDefinition() const override;
767
768 /// \brief Returns the value of the implicit 'this' object.
769 SVal getCXXThisVal() const override;
770
771 /// Returns true if this is a call to a base class destructor.
772 bool isBaseDestructor() const {
773 return DtorDataTy::getFromOpaqueValue(Data).getInt();
774 }
775
776 Kind getKind() const override { return CE_CXXDestructor; }
777
778 static bool classof(const CallEvent *CA) {
779 return CA->getKind() == CE_CXXDestructor;
780 }
781};
782
783/// \brief Represents a call to a C++ constructor.
784///
785/// Example: \c T(1)
786class CXXConstructorCall : public AnyFunctionCall {
787 friend class CallEventManager;
788
789protected:
790 /// Creates a constructor call.
791 ///
792 /// \param CE The constructor expression as written in the source.
793 /// \param Target The region where the object should be constructed. If NULL,
794 /// a new symbolic region will be used.
795 /// \param St The path-sensitive state at this point in the program.
796 /// \param LCtx The location context at this point in the program.
797 CXXConstructorCall(const CXXConstructExpr *CE, const MemRegion *Target,
798 ProgramStateRef St, const LocationContext *LCtx)
799 : AnyFunctionCall(CE, St, LCtx) {
800 Data = Target;
801 }
802
803 CXXConstructorCall(const CXXConstructorCall &Other) = default;
804
805 void cloneTo(void *Dest) const override { new (Dest) CXXConstructorCall(*this); }
806
807 void getExtraInvalidatedValues(ValueList &Values,
808 RegionAndSymbolInvalidationTraits *ETraits) const override;
809
810public:
811 virtual const CXXConstructExpr *getOriginExpr() const {
812 return cast<CXXConstructExpr>(AnyFunctionCall::getOriginExpr());
813 }
814
815 const CXXConstructorDecl *getDecl() const override {
816 return getOriginExpr()->getConstructor();
817 }
818
819 unsigned getNumArgs() const override { return getOriginExpr()->getNumArgs(); }
820
821 const Expr *getArgExpr(unsigned Index) const override {
822 return getOriginExpr()->getArg(Index);
823 }
824
825 /// \brief Returns the value of the implicit 'this' object.
826 SVal getCXXThisVal() const;
827
828 void getInitialStackFrameContents(const StackFrameContext *CalleeCtx,
829 BindingsTy &Bindings) const override;
830
831 Kind getKind() const override { return CE_CXXConstructor; }
832
833 static bool classof(const CallEvent *CA) {
834 return CA->getKind() == CE_CXXConstructor;
835 }
836};
837
838/// \brief Represents the memory allocation call in a C++ new-expression.
839///
840/// This is a call to "operator new".
841class CXXAllocatorCall : public AnyFunctionCall {
842 friend class CallEventManager;
843
844protected:
845 CXXAllocatorCall(const CXXNewExpr *E, ProgramStateRef St,
846 const LocationContext *LCtx)
847 : AnyFunctionCall(E, St, LCtx) {}
848 CXXAllocatorCall(const CXXAllocatorCall &Other) = default;
849
850 void cloneTo(void *Dest) const override { new (Dest) CXXAllocatorCall(*this); }
851
852public:
853 virtual const CXXNewExpr *getOriginExpr() const {
854 return cast<CXXNewExpr>(AnyFunctionCall::getOriginExpr());
855 }
856
857 const FunctionDecl *getDecl() const override {
858 return getOriginExpr()->getOperatorNew();
859 }
860
861 unsigned getNumArgs() const override {
862 return getOriginExpr()->getNumPlacementArgs() + 1;
863 }
864
865 const Expr *getArgExpr(unsigned Index) const override {
866 // The first argument of an allocator call is the size of the allocation.
867 if (Index == 0)
868 return nullptr;
869 return getOriginExpr()->getPlacementArg(Index - 1);
870 }
871
872 Kind getKind() const override { return CE_CXXAllocator; }
873
874 static bool classof(const CallEvent *CE) {
875 return CE->getKind() == CE_CXXAllocator;
876 }
877};
878
879/// \brief Represents the ways an Objective-C message send can occur.
880//
881// Note to maintainers: OCM_Message should always be last, since it does not
882// need to fit in the Data field's low bits.
883enum ObjCMessageKind {
884 OCM_PropertyAccess,
885 OCM_Subscript,
886 OCM_Message
887};
888
889/// \brief Represents any expression that calls an Objective-C method.
890///
891/// This includes all of the kinds listed in ObjCMessageKind.
892class ObjCMethodCall : public CallEvent {
893 friend class CallEventManager;
894
895 const PseudoObjectExpr *getContainingPseudoObjectExpr() const;
896
897protected:
898 ObjCMethodCall(const ObjCMessageExpr *Msg, ProgramStateRef St,
899 const LocationContext *LCtx)
900 : CallEvent(Msg, St, LCtx) {
901 Data = nullptr;
902 }
903
904 ObjCMethodCall(const ObjCMethodCall &Other) = default;
905
906 void cloneTo(void *Dest) const override { new (Dest) ObjCMethodCall(*this); }
907
908 void getExtraInvalidatedValues(ValueList &Values,
909 RegionAndSymbolInvalidationTraits *ETraits) const override;
910
911 /// Check if the selector may have multiple definitions (may have overrides).
912 virtual bool canBeOverridenInSubclass(ObjCInterfaceDecl *IDecl,
913 Selector Sel) const;
914
915public:
916 virtual const ObjCMessageExpr *getOriginExpr() const {
917 return cast<ObjCMessageExpr>(CallEvent::getOriginExpr());
918 }
919
920 const ObjCMethodDecl *getDecl() const override {
921 return getOriginExpr()->getMethodDecl();
922 }
923
924 unsigned getNumArgs() const override {
925 return getOriginExpr()->getNumArgs();
926 }
927
928 const Expr *getArgExpr(unsigned Index) const override {
929 return getOriginExpr()->getArg(Index);
930 }
931
932 bool isInstanceMessage() const {
933 return getOriginExpr()->isInstanceMessage();
934 }
935
936 ObjCMethodFamily getMethodFamily() const {
937 return getOriginExpr()->getMethodFamily();
938 }
939
940 Selector getSelector() const {
941 return getOriginExpr()->getSelector();
942 }
943
944 SourceRange getSourceRange() const override;
945
946 /// \brief Returns the value of the receiver at the time of this call.
947 SVal getReceiverSVal() const;
948
949 /// \brief Return the value of 'self' if available.
950 SVal getSelfSVal() const;
951
952 /// \brief Get the interface for the receiver.
953 ///
954 /// This works whether this is an instance message or a class message.
955 /// However, it currently just uses the static type of the receiver.
956 const ObjCInterfaceDecl *getReceiverInterface() const {
957 return getOriginExpr()->getReceiverInterface();
958 }
959
960 /// \brief Checks if the receiver refers to 'self' or 'super'.
961 bool isReceiverSelfOrSuper() const;
962
963 /// Returns how the message was written in the source (property access,
964 /// subscript, or explicit message send).
965 ObjCMessageKind getMessageKind() const;
966
967 /// Returns true if this property access or subscript is a setter (has the
968 /// form of an assignment).
969 bool isSetter() const {
970 switch (getMessageKind()) {
971 case OCM_Message:
972 llvm_unreachable("This is not a pseudo-object access!")::llvm::llvm_unreachable_internal("This is not a pseudo-object access!"
, "/build/llvm-toolchain-snapshot-7~svn326551/tools/clang/include/clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
, 972)
;
973 case OCM_PropertyAccess:
974 return getNumArgs() > 0;
975 case OCM_Subscript:
976 return getNumArgs() > 1;
977 }
978 llvm_unreachable("Unknown message kind")::llvm::llvm_unreachable_internal("Unknown message kind", "/build/llvm-toolchain-snapshot-7~svn326551/tools/clang/include/clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
, 978)
;
979 }
980
981 // Returns the property accessed by this method, either explicitly via
982 // property syntax or implicitly via a getter or setter method. Returns
983 // nullptr if the call is not a prooperty access.
984 const ObjCPropertyDecl *getAccessedProperty() const;
985
986 RuntimeDefinition getRuntimeDefinition() const override;
987
988 bool argumentsMayEscape() const override;
989
990 void getInitialStackFrameContents(const StackFrameContext *CalleeCtx,
991 BindingsTy &Bindings) const override;
992
993 ArrayRef<ParmVarDecl*> parameters() const override;
994
995 Kind getKind() const override { return CE_ObjCMessage; }
996
997 static bool classof(const CallEvent *CA) {
998 return CA->getKind() == CE_ObjCMessage;
999 }
1000};
1001
1002/// \brief Manages the lifetime of CallEvent objects.
1003///
1004/// CallEventManager provides a way to create arbitrary CallEvents "on the
1005/// stack" as if they were value objects by keeping a cache of CallEvent-sized
1006/// memory blocks. The CallEvents created by CallEventManager are only valid
1007/// for the lifetime of the OwnedCallEvent that holds them; right now these
1008/// objects cannot be copied and ownership cannot be transferred.
1009class CallEventManager {
1010 friend class CallEvent;
1011
1012 llvm::BumpPtrAllocator &Alloc;
1013 SmallVector<void *, 8> Cache;
1014
1015 using CallEventTemplateTy = SimpleFunctionCall;
1016
1017 void reclaim(const void *Memory) {
1018 Cache.push_back(const_cast<void *>(Memory));
1019 }
1020
1021 /// Returns memory that can be initialized as a CallEvent.
1022 void *allocate() {
1023 if (Cache.empty())
1024 return Alloc.Allocate<CallEventTemplateTy>();
1025 else
1026 return Cache.pop_back_val();
1027 }
1028
1029 template <typename T, typename Arg>
1030 T *create(Arg A, ProgramStateRef St, const LocationContext *LCtx) {
1031 static_assert(sizeof(T) == sizeof(CallEventTemplateTy),
1032 "CallEvent subclasses are not all the same size");
1033 return new (allocate()) T(A, St, LCtx);
1034 }
1035
1036 template <typename T, typename Arg1, typename Arg2>
1037 T *create(Arg1 A1, Arg2 A2, ProgramStateRef St, const LocationContext *LCtx) {
1038 static_assert(sizeof(T) == sizeof(CallEventTemplateTy),
1039 "CallEvent subclasses are not all the same size");
1040 return new (allocate()) T(A1, A2, St, LCtx);
1041 }
1042
1043 template <typename T, typename Arg1, typename Arg2, typename Arg3>
1044 T *create(Arg1 A1, Arg2 A2, Arg3 A3, ProgramStateRef St,
1045 const LocationContext *LCtx) {
1046 static_assert(sizeof(T) == sizeof(CallEventTemplateTy),
1047 "CallEvent subclasses are not all the same size");
1048 return new (allocate()) T(A1, A2, A3, St, LCtx);
1049 }
1050
1051 template <typename T, typename Arg1, typename Arg2, typename Arg3,
1052 typename Arg4>
1053 T *create(Arg1 A1, Arg2 A2, Arg3 A3, Arg4 A4, ProgramStateRef St,
1054 const LocationContext *LCtx) {
1055 static_assert(sizeof(T) == sizeof(CallEventTemplateTy),
1056 "CallEvent subclasses are not all the same size");
1057 return new (allocate()) T(A1, A2, A3, A4, St, LCtx);
1058 }
1059
1060public:
1061 CallEventManager(llvm::BumpPtrAllocator &alloc) : Alloc(alloc) {}
1062
1063 CallEventRef<>
1064 getCaller(const StackFrameContext *CalleeCtx, ProgramStateRef State);
1065
1066 CallEventRef<>
1067 getSimpleCall(const CallExpr *E, ProgramStateRef State,
1068 const LocationContext *LCtx);
1069
1070 CallEventRef<ObjCMethodCall>
1071 getObjCMethodCall(const ObjCMessageExpr *E, ProgramStateRef State,
1072 const LocationContext *LCtx) {
1073 return create<ObjCMethodCall>(E, State, LCtx);
1074 }
1075
1076 CallEventRef<CXXConstructorCall>
1077 getCXXConstructorCall(const CXXConstructExpr *E, const MemRegion *Target,
1078 ProgramStateRef State, const LocationContext *LCtx) {
1079 return create<CXXConstructorCall>(E, Target, State, LCtx);
1080 }
1081
1082 CallEventRef<CXXDestructorCall>
1083 getCXXDestructorCall(const CXXDestructorDecl *DD, const Stmt *Trigger,
1084 const MemRegion *Target, bool IsBase,
1085 ProgramStateRef State, const LocationContext *LCtx) {
1086 return create<CXXDestructorCall>(DD, Trigger, Target, IsBase, State, LCtx);
1087 }
1088
1089 CallEventRef<CXXAllocatorCall>
1090 getCXXAllocatorCall(const CXXNewExpr *E, ProgramStateRef State,
1091 const LocationContext *LCtx) {
1092 return create<CXXAllocatorCall>(E, State, LCtx);
1093 }
1094};
1095
1096template <typename T>
1097CallEventRef<T> CallEvent::cloneWithState(ProgramStateRef NewState) const {
1098 assert(isa<T>(*this) && "Cloning to unrelated type")(static_cast <bool> (isa<T>(*this) && "Cloning to unrelated type"
) ? void (0) : __assert_fail ("isa<T>(*this) && \"Cloning to unrelated type\""
, "/build/llvm-toolchain-snapshot-7~svn326551/tools/clang/include/clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
, 1098, __extension__ __PRETTY_FUNCTION__))
;
1099 static_assert(sizeof(T) == sizeof(CallEvent),
1100 "Subclasses may not add fields");
1101
1102 if (NewState == State)
1103 return cast<T>(this);
1104
1105 CallEventManager &Mgr = State->getStateManager().getCallEventManager();
1106 T *Copy = static_cast<T *>(Mgr.allocate());
1107 cloneTo(Copy);
1108 assert(Copy->getKind() == this->getKind() && "Bad copy")(static_cast <bool> (Copy->getKind() == this->getKind
() && "Bad copy") ? void (0) : __assert_fail ("Copy->getKind() == this->getKind() && \"Bad copy\""
, "/build/llvm-toolchain-snapshot-7~svn326551/tools/clang/include/clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
, 1108, __extension__ __PRETTY_FUNCTION__))
;
1109
1110 Copy->State = NewState;
1111 return Copy;
1112}
1113
1114inline void CallEvent::Release() const {
1115 assert(RefCount > 0 && "Reference count is already zero.")(static_cast <bool> (RefCount > 0 && "Reference count is already zero."
) ? void (0) : __assert_fail ("RefCount > 0 && \"Reference count is already zero.\""
, "/build/llvm-toolchain-snapshot-7~svn326551/tools/clang/include/clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
, 1115, __extension__ __PRETTY_FUNCTION__))
;
1116 --RefCount;
1117
1118 if (RefCount > 0)
1119 return;
1120
1121 CallEventManager &Mgr = State->getStateManager().getCallEventManager();
1122 Mgr.reclaim(this);
1123
1124 this->~CallEvent();
1125}
1126
1127} // namespace ento
1128
1129} // namespace clang
1130
1131namespace llvm {
1132
1133// Support isa<>, cast<>, and dyn_cast<> for CallEventRef.
1134template<class T> struct simplify_type< clang::ento::CallEventRef<T>> {
1135 using SimpleType = const T *;
1136
1137 static SimpleType
1138 getSimplifiedValue(clang::ento::CallEventRef<T> Val) {
1139 return Val.get();
1140 }
1141};
1142
1143} // namespace llvm
1144
1145#endif // LLVM_CLANG_STATICANALYZER_CORE_PATHSENSITIVE_CALLEVENT_H

/build/llvm-toolchain-snapshot-7~svn326551/include/llvm/ADT/IntrusiveRefCntPtr.h

1//==- llvm/ADT/IntrusiveRefCntPtr.h - Smart Refcounting Pointer --*- C++ -*-==//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file defines the RefCountedBase, ThreadSafeRefCountedBase, and
11// IntrusiveRefCntPtr classes.
12//
13// IntrusiveRefCntPtr is a smart pointer to an object which maintains a
14// reference count. (ThreadSafe)RefCountedBase is a mixin class that adds a
15// refcount member variable and methods for updating the refcount. An object
16// that inherits from (ThreadSafe)RefCountedBase deletes itself when its
17// refcount hits zero.
18//
19// For example:
20//
21// class MyClass : public RefCountedBase<MyClass> {};
22//
23// void foo() {
24// // Constructing an IntrusiveRefCntPtr increases the pointee's refcount by
25// // 1 (from 0 in this case).
26// IntrusiveRefCntPtr<MyClass> Ptr1(new MyClass());
27//
28// // Copying an IntrusiveRefCntPtr increases the pointee's refcount by 1.
29// IntrusiveRefCntPtr<MyClass> Ptr2(Ptr1);
30//
31// // Constructing an IntrusiveRefCntPtr has no effect on the object's
32// // refcount. After a move, the moved-from pointer is null.
33// IntrusiveRefCntPtr<MyClass> Ptr3(std::move(Ptr1));
34// assert(Ptr1 == nullptr);
35//
36// // Clearing an IntrusiveRefCntPtr decreases the pointee's refcount by 1.
37// Ptr2.reset();
38//
39// // The object deletes itself when we return from the function, because
40// // Ptr3's destructor decrements its refcount to 0.
41// }
42//
43// You can use IntrusiveRefCntPtr with isa<T>(), dyn_cast<T>(), etc.:
44//
45// IntrusiveRefCntPtr<MyClass> Ptr(new MyClass());
46// OtherClass *Other = dyn_cast<OtherClass>(Ptr); // Ptr.get() not required
47//
48// IntrusiveRefCntPtr works with any class that
49//
50// - inherits from (ThreadSafe)RefCountedBase,
51// - has Retain() and Release() methods, or
52// - specializes IntrusiveRefCntPtrInfo.
53//
54//===----------------------------------------------------------------------===//
55
56#ifndef LLVM_ADT_INTRUSIVEREFCNTPTR_H
57#define LLVM_ADT_INTRUSIVEREFCNTPTR_H
58
59#include <atomic>
60#include <cassert>
61#include <cstddef>
62
63namespace llvm {
64
65/// A CRTP mixin class that adds reference counting to a type.
66///
67/// The lifetime of an object which inherits from RefCountedBase is managed by
68/// calls to Release() and Retain(), which increment and decrement the object's
69/// refcount, respectively. When a Release() call decrements the refcount to 0,
70/// the object deletes itself.
71template <class Derived> class RefCountedBase {
72 mutable unsigned RefCount = 0;
73
74public:
75 RefCountedBase() = default;
76 RefCountedBase(const RefCountedBase &) {}
77
78 void Retain() const { ++RefCount; }
79
80 void Release() const {
81 assert(RefCount > 0 && "Reference count is already zero.")(static_cast <bool> (RefCount > 0 && "Reference count is already zero."
) ? void (0) : __assert_fail ("RefCount > 0 && \"Reference count is already zero.\""
, "/build/llvm-toolchain-snapshot-7~svn326551/include/llvm/ADT/IntrusiveRefCntPtr.h"
, 81, __extension__ __PRETTY_FUNCTION__))
;
82 if (--RefCount == 0)
83 delete static_cast<const Derived *>(this);
84 }
85};
86
87/// A thread-safe version of \c RefCountedBase.
88template <class Derived> class ThreadSafeRefCountedBase {
89 mutable std::atomic<int> RefCount;
90
91protected:
92 ThreadSafeRefCountedBase() : RefCount(0) {}
93
94public:
95 void Retain() const { RefCount.fetch_add(1, std::memory_order_relaxed); }
96
97 void Release() const {
98 int NewRefCount = RefCount.fetch_sub(1, std::memory_order_acq_rel) - 1;
99 assert(NewRefCount >= 0 && "Reference count was already zero.")(static_cast <bool> (NewRefCount >= 0 && "Reference count was already zero."
) ? void (0) : __assert_fail ("NewRefCount >= 0 && \"Reference count was already zero.\""
, "/build/llvm-toolchain-snapshot-7~svn326551/include/llvm/ADT/IntrusiveRefCntPtr.h"
, 99, __extension__ __PRETTY_FUNCTION__))
;
100 if (NewRefCount == 0)
101 delete static_cast<const Derived *>(this);
102 }
103};
104
105/// Class you can specialize to provide custom retain/release functionality for
106/// a type.
107///
108/// Usually specializing this class is not necessary, as IntrusiveRefCntPtr
109/// works with any type which defines Retain() and Release() functions -- you
110/// can define those functions yourself if RefCountedBase doesn't work for you.
111///
112/// One case when you might want to specialize this type is if you have
113/// - Foo.h defines type Foo and includes Bar.h, and
114/// - Bar.h uses IntrusiveRefCntPtr<Foo> in inline functions.
115///
116/// Because Foo.h includes Bar.h, Bar.h can't include Foo.h in order to pull in
117/// the declaration of Foo. Without the declaration of Foo, normally Bar.h
118/// wouldn't be able to use IntrusiveRefCntPtr<Foo>, which wants to call
119/// T::Retain and T::Release.
120///
121/// To resolve this, Bar.h could include a third header, FooFwd.h, which
122/// forward-declares Foo and specializes IntrusiveRefCntPtrInfo<Foo>. Then
123/// Bar.h could use IntrusiveRefCntPtr<Foo>, although it still couldn't call any
124/// functions on Foo itself, because Foo would be an incomplete type.
125template <typename T> struct IntrusiveRefCntPtrInfo {
126 static void retain(T *obj) { obj->Retain(); }
127 static void release(T *obj) { obj->Release(); }
128};
129
130/// A smart pointer to a reference-counted object that inherits from
131/// RefCountedBase or ThreadSafeRefCountedBase.
132///
133/// This class increments its pointee's reference count when it is created, and
134/// decrements its refcount when it's destroyed (or is changed to point to a
135/// different object).
136template <typename T> class IntrusiveRefCntPtr {
137 T *Obj = nullptr;
138
139public:
140 using element_type = T;
141
142 explicit IntrusiveRefCntPtr() = default;
143 IntrusiveRefCntPtr(T *obj) : Obj(obj) { retain(); }
144 IntrusiveRefCntPtr(const IntrusiveRefCntPtr &S) : Obj(S.Obj) { retain(); }
4
Null pointer value stored to 'Call.Obj'
145 IntrusiveRefCntPtr(IntrusiveRefCntPtr &&S) : Obj(S.Obj) { S.Obj = nullptr; }
146
147 template <class X>
148 IntrusiveRefCntPtr(IntrusiveRefCntPtr<X> &&S) : Obj(S.get()) {
149 S.Obj = nullptr;
150 }
151
152 template <class X>
153 IntrusiveRefCntPtr(const IntrusiveRefCntPtr<X> &S) : Obj(S.get()) {
154 retain();
155 }
156
157 ~IntrusiveRefCntPtr() { release(); }
158
159 IntrusiveRefCntPtr &operator=(IntrusiveRefCntPtr S) {
160 swap(S);
161 return *this;
162 }
163
164 T &operator*() const { return *Obj; }
8
Returning null reference
165 T *operator->() const { return Obj; }
166 T *get() const { return Obj; }
167 explicit operator bool() const { return Obj; }
168
169 void swap(IntrusiveRefCntPtr &other) {
170 T *tmp = other.Obj;
171 other.Obj = Obj;
172 Obj = tmp;
173 }
174
175 void reset() {
176 release();
177 Obj = nullptr;
178 }
179
180 void resetWithoutRelease() { Obj = nullptr; }
181
182private:
183 void retain() {
184 if (Obj)
185 IntrusiveRefCntPtrInfo<T>::retain(Obj);
186 }
187
188 void release() {
189 if (Obj)
190 IntrusiveRefCntPtrInfo<T>::release(Obj);
191 }
192
193 template <typename X> friend class IntrusiveRefCntPtr;
194};
195
196template <class T, class U>
197inline bool operator==(const IntrusiveRefCntPtr<T> &A,
198 const IntrusiveRefCntPtr<U> &B) {
199 return A.get() == B.get();
200}
201
202template <class T, class U>
203inline bool operator!=(const IntrusiveRefCntPtr<T> &A,
204 const IntrusiveRefCntPtr<U> &B) {
205 return A.get() != B.get();
206}
207
208template <class T, class U>
209inline bool operator==(const IntrusiveRefCntPtr<T> &A, U *B) {
210 return A.get() == B;
211}
212
213template <class T, class U>
214inline bool operator!=(const IntrusiveRefCntPtr<T> &A, U *B) {
215 return A.get() != B;
216}
217
218template <class T, class U>
219inline bool operator==(T *A, const IntrusiveRefCntPtr<U> &B) {
220 return A == B.get();
221}
222
223template <class T, class U>
224inline bool operator!=(T *A, const IntrusiveRefCntPtr<U> &B) {
225 return A != B.get();
226}
227
228template <class T>
229bool operator==(std::nullptr_t A, const IntrusiveRefCntPtr<T> &B) {
230 return !B;
231}
232
233template <class T>
234bool operator==(const IntrusiveRefCntPtr<T> &A, std::nullptr_t B) {
235 return B == A;
236}
237
238template <class T>
239bool operator!=(std::nullptr_t A, const IntrusiveRefCntPtr<T> &B) {
240 return !(A == B);
241}
242
243template <class T>
244bool operator!=(const IntrusiveRefCntPtr<T> &A, std::nullptr_t B) {
245 return !(A == B);
246}
247
248// Make IntrusiveRefCntPtr work with dyn_cast, isa, and the other idioms from
249// Casting.h.
250template <typename From> struct simplify_type;
251
252template <class T> struct simplify_type<IntrusiveRefCntPtr<T>> {
253 using SimpleType = T *;
254
255 static SimpleType getSimplifiedValue(IntrusiveRefCntPtr<T> &Val) {
256 return Val.get();
257 }
258};
259
260template <class T> struct simplify_type<const IntrusiveRefCntPtr<T>> {
261 using SimpleType = /*const*/ T *;
262
263 static SimpleType getSimplifiedValue(const IntrusiveRefCntPtr<T> &Val) {
264 return Val.get();
265 }
266};
267
268} // end namespace llvm
269
270#endif // LLVM_ADT_INTRUSIVEREFCNTPTR_H