Bug Summary

File:clang/lib/StaticAnalyzer/Core/ExprEngineCXX.cpp
Warning:line 497, column 40
Called C++ object pointer is null

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -cc1 -triple x86_64-pc-linux-gnu -analyze -disable-free -disable-llvm-verifier -discard-value-names -main-file-name ExprEngineCXX.cpp -analyzer-store=region -analyzer-opt-analyze-nested-blocks -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -analyzer-config-compatibility-mode=true -mrelocation-model pic -pic-level 2 -mframe-pointer=none -relaxed-aliasing -fmath-errno -fno-rounding-math -mconstructor-aliases -munwind-tables -target-cpu x86-64 -tune-cpu generic -debugger-tuning=gdb -ffunction-sections -fdata-sections -fcoverage-compilation-dir=/build/llvm-toolchain-snapshot-14~++20210903100615+fd66b44ec19e/build-llvm/tools/clang/lib/StaticAnalyzer/Core -resource-dir /usr/lib/llvm-14/lib/clang/14.0.0 -D _GNU_SOURCE -D __STDC_CONSTANT_MACROS -D __STDC_FORMAT_MACROS -D __STDC_LIMIT_MACROS -I /build/llvm-toolchain-snapshot-14~++20210903100615+fd66b44ec19e/build-llvm/tools/clang/lib/StaticAnalyzer/Core -I /build/llvm-toolchain-snapshot-14~++20210903100615+fd66b44ec19e/clang/lib/StaticAnalyzer/Core -I /build/llvm-toolchain-snapshot-14~++20210903100615+fd66b44ec19e/clang/include -I /build/llvm-toolchain-snapshot-14~++20210903100615+fd66b44ec19e/build-llvm/tools/clang/include -I /build/llvm-toolchain-snapshot-14~++20210903100615+fd66b44ec19e/build-llvm/include -I /build/llvm-toolchain-snapshot-14~++20210903100615+fd66b44ec19e/llvm/include -D NDEBUG -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/10/../../../../include/c++/10 -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/10/../../../../include/x86_64-linux-gnu/c++/10 -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/10/../../../../include/c++/10/backward -internal-isystem /usr/lib/llvm-14/lib/clang/14.0.0/include -internal-isystem /usr/local/include -internal-isystem /usr/lib/gcc/x86_64-linux-gnu/10/../../../../x86_64-linux-gnu/include -internal-externc-isystem /usr/include/x86_64-linux-gnu -internal-externc-isystem /include -internal-externc-isystem /usr/include -O2 -Wno-unused-parameter -Wwrite-strings -Wno-missing-field-initializers -Wno-long-long -Wno-maybe-uninitialized -Wno-class-memaccess -Wno-redundant-move -Wno-pessimizing-move -Wno-noexcept-type -Wno-comment -std=c++14 -fdeprecated-macro -fdebug-compilation-dir=/build/llvm-toolchain-snapshot-14~++20210903100615+fd66b44ec19e/build-llvm/tools/clang/lib/StaticAnalyzer/Core -fdebug-prefix-map=/build/llvm-toolchain-snapshot-14~++20210903100615+fd66b44ec19e=. -ferror-limit 19 -fvisibility-inlines-hidden -stack-protector 2 -fgnuc-version=4.2.1 -vectorize-loops -vectorize-slp -analyzer-output=html -analyzer-config stable-report-filename=true -faddrsig -D__GCC_HAVE_DWARF2_CFI_ASM=1 -o /tmp/scan-build-2021-09-04-040900-46481-1 -x c++ /build/llvm-toolchain-snapshot-14~++20210903100615+fd66b44ec19e/clang/lib/StaticAnalyzer/Core/ExprEngineCXX.cpp
1//===- ExprEngineCXX.cpp - ExprEngine support for C++ -----------*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file defines the C++ expression evaluation engine.
10//
11//===----------------------------------------------------------------------===//
12
13#include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
14#include "clang/Analysis/ConstructionContext.h"
15#include "clang/AST/DeclCXX.h"
16#include "clang/AST/StmtCXX.h"
17#include "clang/AST/ParentMap.h"
18#include "clang/Basic/PrettyStackTrace.h"
19#include "clang/StaticAnalyzer/Core/CheckerManager.h"
20#include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h"
21#include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
22
23using namespace clang;
24using namespace ento;
25
26void ExprEngine::CreateCXXTemporaryObject(const MaterializeTemporaryExpr *ME,
27 ExplodedNode *Pred,
28 ExplodedNodeSet &Dst) {
29 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
30 const Expr *tempExpr = ME->getSubExpr()->IgnoreParens();
31 ProgramStateRef state = Pred->getState();
32 const LocationContext *LCtx = Pred->getLocationContext();
33
34 state = createTemporaryRegionIfNeeded(state, LCtx, tempExpr, ME);
35 Bldr.generateNode(ME, Pred, state);
36}
37
38// FIXME: This is the sort of code that should eventually live in a Core
39// checker rather than as a special case in ExprEngine.
40void ExprEngine::performTrivialCopy(NodeBuilder &Bldr, ExplodedNode *Pred,
41 const CallEvent &Call) {
42 SVal ThisVal;
43 bool AlwaysReturnsLValue;
44 const CXXRecordDecl *ThisRD = nullptr;
45 if (const CXXConstructorCall *Ctor = dyn_cast<CXXConstructorCall>(&Call)) {
46 assert(Ctor->getDecl()->isTrivial())(static_cast<void> (0));
47 assert(Ctor->getDecl()->isCopyOrMoveConstructor())(static_cast<void> (0));
48 ThisVal = Ctor->getCXXThisVal();
49 ThisRD = Ctor->getDecl()->getParent();
50 AlwaysReturnsLValue = false;
51 } else {
52 assert(cast<CXXMethodDecl>(Call.getDecl())->isTrivial())(static_cast<void> (0));
53 assert(cast<CXXMethodDecl>(Call.getDecl())->getOverloadedOperator() ==(static_cast<void> (0))
54 OO_Equal)(static_cast<void> (0));
55 ThisVal = cast<CXXInstanceCall>(Call).getCXXThisVal();
56 ThisRD = cast<CXXMethodDecl>(Call.getDecl())->getParent();
57 AlwaysReturnsLValue = true;
58 }
59
60 assert(ThisRD)(static_cast<void> (0));
61 if (ThisRD->isEmpty()) {
62 // Do nothing for empty classes. Otherwise it'd retrieve an UnknownVal
63 // and bind it and RegionStore would think that the actual value
64 // in this region at this offset is unknown.
65 return;
66 }
67
68 const LocationContext *LCtx = Pred->getLocationContext();
69
70 ExplodedNodeSet Dst;
71 Bldr.takeNodes(Pred);
72
73 SVal V = Call.getArgSVal(0);
74
75 // If the value being copied is not unknown, load from its location to get
76 // an aggregate rvalue.
77 if (Optional<Loc> L = V.getAs<Loc>())
78 V = Pred->getState()->getSVal(*L);
79 else
80 assert(V.isUnknownOrUndef())(static_cast<void> (0));
81
82 const Expr *CallExpr = Call.getOriginExpr();
83 evalBind(Dst, CallExpr, Pred, ThisVal, V, true);
84
85 PostStmt PS(CallExpr, LCtx);
86 for (ExplodedNodeSet::iterator I = Dst.begin(), E = Dst.end();
87 I != E; ++I) {
88 ProgramStateRef State = (*I)->getState();
89 if (AlwaysReturnsLValue)
90 State = State->BindExpr(CallExpr, LCtx, ThisVal);
91 else
92 State = bindReturnValue(Call, LCtx, State);
93 Bldr.generateNode(PS, State, *I);
94 }
95}
96
97
98SVal ExprEngine::makeZeroElementRegion(ProgramStateRef State, SVal LValue,
99 QualType &Ty, bool &IsArray) {
100 SValBuilder &SVB = State->getStateManager().getSValBuilder();
101 ASTContext &Ctx = SVB.getContext();
102
103 while (const ArrayType *AT = Ctx.getAsArrayType(Ty)) {
104 Ty = AT->getElementType();
105 LValue = State->getLValue(Ty, SVB.makeZeroArrayIndex(), LValue);
106 IsArray = true;
107 }
108
109 return LValue;
110}
111
112SVal ExprEngine::computeObjectUnderConstruction(
113 const Expr *E, ProgramStateRef State, const LocationContext *LCtx,
114 const ConstructionContext *CC, EvalCallOptions &CallOpts) {
115 SValBuilder &SVB = getSValBuilder();
116 MemRegionManager &MRMgr = SVB.getRegionManager();
117 ASTContext &ACtx = SVB.getContext();
118
119 // Compute the target region by exploring the construction context.
120 if (CC) {
121 switch (CC->getKind()) {
122 case ConstructionContext::CXX17ElidedCopyVariableKind:
123 case ConstructionContext::SimpleVariableKind: {
124 const auto *DSCC = cast<VariableConstructionContext>(CC);
125 const auto *DS = DSCC->getDeclStmt();
126 const auto *Var = cast<VarDecl>(DS->getSingleDecl());
127 QualType Ty = Var->getType();
128 return makeZeroElementRegion(State, State->getLValue(Var, LCtx), Ty,
129 CallOpts.IsArrayCtorOrDtor);
130 }
131 case ConstructionContext::CXX17ElidedCopyConstructorInitializerKind:
132 case ConstructionContext::SimpleConstructorInitializerKind: {
133 const auto *ICC = cast<ConstructorInitializerConstructionContext>(CC);
134 const auto *Init = ICC->getCXXCtorInitializer();
135 const CXXMethodDecl *CurCtor = cast<CXXMethodDecl>(LCtx->getDecl());
136 Loc ThisPtr = SVB.getCXXThis(CurCtor, LCtx->getStackFrame());
137 SVal ThisVal = State->getSVal(ThisPtr);
138 if (Init->isBaseInitializer()) {
139 const auto *ThisReg = cast<SubRegion>(ThisVal.getAsRegion());
140 const CXXRecordDecl *BaseClass =
141 Init->getBaseClass()->getAsCXXRecordDecl();
142 const auto *BaseReg =
143 MRMgr.getCXXBaseObjectRegion(BaseClass, ThisReg,
144 Init->isBaseVirtual());
145 return SVB.makeLoc(BaseReg);
146 }
147 if (Init->isDelegatingInitializer())
148 return ThisVal;
149
150 const ValueDecl *Field;
151 SVal FieldVal;
152 if (Init->isIndirectMemberInitializer()) {
153 Field = Init->getIndirectMember();
154 FieldVal = State->getLValue(Init->getIndirectMember(), ThisVal);
155 } else {
156 Field = Init->getMember();
157 FieldVal = State->getLValue(Init->getMember(), ThisVal);
158 }
159
160 QualType Ty = Field->getType();
161 return makeZeroElementRegion(State, FieldVal, Ty,
162 CallOpts.IsArrayCtorOrDtor);
163 }
164 case ConstructionContext::NewAllocatedObjectKind: {
165 if (AMgr.getAnalyzerOptions().MayInlineCXXAllocator) {
166 const auto *NECC = cast<NewAllocatedObjectConstructionContext>(CC);
167 const auto *NE = NECC->getCXXNewExpr();
168 SVal V = *getObjectUnderConstruction(State, NE, LCtx);
169 if (const SubRegion *MR =
170 dyn_cast_or_null<SubRegion>(V.getAsRegion())) {
171 if (NE->isArray()) {
172 // TODO: In fact, we need to call the constructor for every
173 // allocated element, not just the first one!
174 CallOpts.IsArrayCtorOrDtor = true;
175 return loc::MemRegionVal(getStoreManager().GetElementZeroRegion(
176 MR, NE->getType()->getPointeeType()));
177 }
178 return V;
179 }
180 // TODO: Detect when the allocator returns a null pointer.
181 // Constructor shall not be called in this case.
182 }
183 break;
184 }
185 case ConstructionContext::SimpleReturnedValueKind:
186 case ConstructionContext::CXX17ElidedCopyReturnedValueKind: {
187 // The temporary is to be managed by the parent stack frame.
188 // So build it in the parent stack frame if we're not in the
189 // top frame of the analysis.
190 const StackFrameContext *SFC = LCtx->getStackFrame();
191 if (const LocationContext *CallerLCtx = SFC->getParent()) {
192 auto RTC = (*SFC->getCallSiteBlock())[SFC->getIndex()]
193 .getAs<CFGCXXRecordTypedCall>();
194 if (!RTC) {
195 // We were unable to find the correct construction context for the
196 // call in the parent stack frame. This is equivalent to not being
197 // able to find construction context at all.
198 break;
199 }
200 if (isa<BlockInvocationContext>(CallerLCtx)) {
201 // Unwrap block invocation contexts. They're mostly part of
202 // the current stack frame.
203 CallerLCtx = CallerLCtx->getParent();
204 assert(!isa<BlockInvocationContext>(CallerLCtx))(static_cast<void> (0));
205 }
206 return computeObjectUnderConstruction(
207 cast<Expr>(SFC->getCallSite()), State, CallerLCtx,
208 RTC->getConstructionContext(), CallOpts);
209 } else {
210 // We are on the top frame of the analysis. We do not know where is the
211 // object returned to. Conjure a symbolic region for the return value.
212 // TODO: We probably need a new MemRegion kind to represent the storage
213 // of that SymbolicRegion, so that we cound produce a fancy symbol
214 // instead of an anonymous conjured symbol.
215 // TODO: Do we need to track the region to avoid having it dead
216 // too early? It does die too early, at least in C++17, but because
217 // putting anything into a SymbolicRegion causes an immediate escape,
218 // it doesn't cause any leak false positives.
219 const auto *RCC = cast<ReturnedValueConstructionContext>(CC);
220 // Make sure that this doesn't coincide with any other symbol
221 // conjured for the returned expression.
222 static const int TopLevelSymRegionTag = 0;
223 const Expr *RetE = RCC->getReturnStmt()->getRetValue();
224 assert(RetE && "Void returns should not have a construction context")(static_cast<void> (0));
225 QualType ReturnTy = RetE->getType();
226 QualType RegionTy = ACtx.getPointerType(ReturnTy);
227 return SVB.conjureSymbolVal(&TopLevelSymRegionTag, RetE, SFC, RegionTy,
228 currBldrCtx->blockCount());
229 }
230 llvm_unreachable("Unhandled return value construction context!")__builtin_unreachable();
231 }
232 case ConstructionContext::ElidedTemporaryObjectKind: {
233 assert(AMgr.getAnalyzerOptions().ShouldElideConstructors)(static_cast<void> (0));
234 const auto *TCC = cast<ElidedTemporaryObjectConstructionContext>(CC);
235
236 // Support pre-C++17 copy elision. We'll have the elidable copy
237 // constructor in the AST and in the CFG, but we'll skip it
238 // and construct directly into the final object. This call
239 // also sets the CallOpts flags for us.
240 // If the elided copy/move constructor is not supported, there's still
241 // benefit in trying to model the non-elided constructor.
242 // Stash our state before trying to elide, as it'll get overwritten.
243 ProgramStateRef PreElideState = State;
244 EvalCallOptions PreElideCallOpts = CallOpts;
245
246 SVal V = computeObjectUnderConstruction(
247 TCC->getConstructorAfterElision(), State, LCtx,
248 TCC->getConstructionContextAfterElision(), CallOpts);
249
250 // FIXME: This definition of "copy elision has not failed" is unreliable.
251 // It doesn't indicate that the constructor will actually be inlined
252 // later; this is still up to evalCall() to decide.
253 if (!CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion)
254 return V;
255
256 // Copy elision failed. Revert the changes and proceed as if we have
257 // a simple temporary.
258 CallOpts = PreElideCallOpts;
259 CallOpts.IsElidableCtorThatHasNotBeenElided = true;
260 LLVM_FALLTHROUGH[[gnu::fallthrough]];
261 }
262 case ConstructionContext::SimpleTemporaryObjectKind: {
263 const auto *TCC = cast<TemporaryObjectConstructionContext>(CC);
264 const MaterializeTemporaryExpr *MTE = TCC->getMaterializedTemporaryExpr();
265
266 CallOpts.IsTemporaryCtorOrDtor = true;
267 if (MTE) {
268 if (const ValueDecl *VD = MTE->getExtendingDecl()) {
269 assert(MTE->getStorageDuration() != SD_FullExpression)(static_cast<void> (0));
270 if (!VD->getType()->isReferenceType()) {
271 // We're lifetime-extended by a surrounding aggregate.
272 // Automatic destructors aren't quite working in this case
273 // on the CFG side. We should warn the caller about that.
274 // FIXME: Is there a better way to retrieve this information from
275 // the MaterializeTemporaryExpr?
276 CallOpts.IsTemporaryLifetimeExtendedViaAggregate = true;
277 }
278 }
279
280 if (MTE->getStorageDuration() == SD_Static ||
281 MTE->getStorageDuration() == SD_Thread)
282 return loc::MemRegionVal(MRMgr.getCXXStaticTempObjectRegion(E));
283 }
284
285 return loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(E, LCtx));
286 }
287 case ConstructionContext::ArgumentKind: {
288 // Arguments are technically temporaries.
289 CallOpts.IsTemporaryCtorOrDtor = true;
290
291 const auto *ACC = cast<ArgumentConstructionContext>(CC);
292 const Expr *E = ACC->getCallLikeExpr();
293 unsigned Idx = ACC->getIndex();
294
295 CallEventManager &CEMgr = getStateManager().getCallEventManager();
296 auto getArgLoc = [&](CallEventRef<> Caller) -> Optional<SVal> {
297 const LocationContext *FutureSFC =
298 Caller->getCalleeStackFrame(currBldrCtx->blockCount());
299 // Return early if we are unable to reliably foresee
300 // the future stack frame.
301 if (!FutureSFC)
302 return None;
303
304 // This should be equivalent to Caller->getDecl() for now, but
305 // FutureSFC->getDecl() is likely to support better stuff (like
306 // virtual functions) earlier.
307 const Decl *CalleeD = FutureSFC->getDecl();
308
309 // FIXME: Support for variadic arguments is not implemented here yet.
310 if (CallEvent::isVariadic(CalleeD))
311 return None;
312
313 // Operator arguments do not correspond to operator parameters
314 // because this-argument is implemented as a normal argument in
315 // operator call expressions but not in operator declarations.
316 const TypedValueRegion *TVR = Caller->getParameterLocation(
317 *Caller->getAdjustedParameterIndex(Idx), currBldrCtx->blockCount());
318 if (!TVR)
319 return None;
320
321 return loc::MemRegionVal(TVR);
322 };
323
324 if (const auto *CE = dyn_cast<CallExpr>(E)) {
325 CallEventRef<> Caller = CEMgr.getSimpleCall(CE, State, LCtx);
326 if (Optional<SVal> V = getArgLoc(Caller))
327 return *V;
328 else
329 break;
330 } else if (const auto *CCE = dyn_cast<CXXConstructExpr>(E)) {
331 // Don't bother figuring out the target region for the future
332 // constructor because we won't need it.
333 CallEventRef<> Caller =
334 CEMgr.getCXXConstructorCall(CCE, /*Target=*/nullptr, State, LCtx);
335 if (Optional<SVal> V = getArgLoc(Caller))
336 return *V;
337 else
338 break;
339 } else if (const auto *ME = dyn_cast<ObjCMessageExpr>(E)) {
340 CallEventRef<> Caller = CEMgr.getObjCMethodCall(ME, State, LCtx);
341 if (Optional<SVal> V = getArgLoc(Caller))
342 return *V;
343 else
344 break;
345 }
346 }
347 } // switch (CC->getKind())
348 }
349
350 // If we couldn't find an existing region to construct into, assume we're
351 // constructing a temporary. Notify the caller of our failure.
352 CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true;
353 return loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(E, LCtx));
354}
355
356ProgramStateRef ExprEngine::updateObjectsUnderConstruction(
357 SVal V, const Expr *E, ProgramStateRef State, const LocationContext *LCtx,
358 const ConstructionContext *CC, const EvalCallOptions &CallOpts) {
359 if (CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion) {
360 // Sounds like we failed to find the target region and therefore
361 // copy elision failed. There's nothing we can do about it here.
362 return State;
363 }
364
365 // See if we're constructing an existing region by looking at the
366 // current construction context.
367 assert(CC && "Computed target region without construction context?")(static_cast<void> (0));
368 switch (CC->getKind()) {
369 case ConstructionContext::CXX17ElidedCopyVariableKind:
370 case ConstructionContext::SimpleVariableKind: {
371 const auto *DSCC = cast<VariableConstructionContext>(CC);
372 return addObjectUnderConstruction(State, DSCC->getDeclStmt(), LCtx, V);
373 }
374 case ConstructionContext::CXX17ElidedCopyConstructorInitializerKind:
375 case ConstructionContext::SimpleConstructorInitializerKind: {
376 const auto *ICC = cast<ConstructorInitializerConstructionContext>(CC);
377 const auto *Init = ICC->getCXXCtorInitializer();
378 // Base and delegating initializers handled above
379 assert(Init->isAnyMemberInitializer() &&(static_cast<void> (0))
380 "Base and delegating initializers should have been handled by"(static_cast<void> (0))
381 "computeObjectUnderConstruction()")(static_cast<void> (0));
382 return addObjectUnderConstruction(State, Init, LCtx, V);
383 }
384 case ConstructionContext::NewAllocatedObjectKind: {
385 return State;
386 }
387 case ConstructionContext::SimpleReturnedValueKind:
388 case ConstructionContext::CXX17ElidedCopyReturnedValueKind: {
389 const StackFrameContext *SFC = LCtx->getStackFrame();
390 const LocationContext *CallerLCtx = SFC->getParent();
391 if (!CallerLCtx) {
392 // No extra work is necessary in top frame.
393 return State;
394 }
395
396 auto RTC = (*SFC->getCallSiteBlock())[SFC->getIndex()]
397 .getAs<CFGCXXRecordTypedCall>();
398 assert(RTC && "Could not have had a target region without it")(static_cast<void> (0));
399 if (isa<BlockInvocationContext>(CallerLCtx)) {
400 // Unwrap block invocation contexts. They're mostly part of
401 // the current stack frame.
402 CallerLCtx = CallerLCtx->getParent();
403 assert(!isa<BlockInvocationContext>(CallerLCtx))(static_cast<void> (0));
404 }
405
406 return updateObjectsUnderConstruction(V,
407 cast<Expr>(SFC->getCallSite()), State, CallerLCtx,
408 RTC->getConstructionContext(), CallOpts);
409 }
410 case ConstructionContext::ElidedTemporaryObjectKind: {
411 assert(AMgr.getAnalyzerOptions().ShouldElideConstructors)(static_cast<void> (0));
412 if (!CallOpts.IsElidableCtorThatHasNotBeenElided) {
413 const auto *TCC = cast<ElidedTemporaryObjectConstructionContext>(CC);
414 State = updateObjectsUnderConstruction(
415 V, TCC->getConstructorAfterElision(), State, LCtx,
416 TCC->getConstructionContextAfterElision(), CallOpts);
417
418 // Remember that we've elided the constructor.
419 State = addObjectUnderConstruction(
420 State, TCC->getConstructorAfterElision(), LCtx, V);
421
422 // Remember that we've elided the destructor.
423 if (const auto *BTE = TCC->getCXXBindTemporaryExpr())
424 State = elideDestructor(State, BTE, LCtx);
425
426 // Instead of materialization, shamelessly return
427 // the final object destination.
428 if (const auto *MTE = TCC->getMaterializedTemporaryExpr())
429 State = addObjectUnderConstruction(State, MTE, LCtx, V);
430
431 return State;
432 }
433 // If we decided not to elide the constructor, proceed as if
434 // it's a simple temporary.
435 LLVM_FALLTHROUGH[[gnu::fallthrough]];
436 }
437 case ConstructionContext::SimpleTemporaryObjectKind: {
438 const auto *TCC = cast<TemporaryObjectConstructionContext>(CC);
439 if (const auto *BTE = TCC->getCXXBindTemporaryExpr())
440 State = addObjectUnderConstruction(State, BTE, LCtx, V);
441
442 if (const auto *MTE = TCC->getMaterializedTemporaryExpr())
443 State = addObjectUnderConstruction(State, MTE, LCtx, V);
444
445 return State;
446 }
447 case ConstructionContext::ArgumentKind: {
448 const auto *ACC = cast<ArgumentConstructionContext>(CC);
449 if (const auto *BTE = ACC->getCXXBindTemporaryExpr())
450 State = addObjectUnderConstruction(State, BTE, LCtx, V);
451
452 return addObjectUnderConstruction(
453 State, {ACC->getCallLikeExpr(), ACC->getIndex()}, LCtx, V);
454 }
455 }
456 llvm_unreachable("Unhandled construction context!")__builtin_unreachable();
457}
458
459void ExprEngine::handleConstructor(const Expr *E,
460 ExplodedNode *Pred,
461 ExplodedNodeSet &destNodes) {
462 const auto *CE = dyn_cast<CXXConstructExpr>(E);
2
Assuming 'E' is not a 'CXXConstructExpr'
463 const auto *CIE = dyn_cast<CXXInheritedCtorInitExpr>(E);
3
Assuming 'E' is not a 'CXXInheritedCtorInitExpr'
4
'CIE' initialized to a null pointer value
464 assert(CE || CIE)(static_cast<void> (0));
465
466 const LocationContext *LCtx = Pred->getLocationContext();
467 ProgramStateRef State = Pred->getState();
468
469 SVal Target = UnknownVal();
470
471 if (CE
4.1
'CE' is null
) {
5
Taking false branch
472 if (Optional<SVal> ElidedTarget =
473 getObjectUnderConstruction(State, CE, LCtx)) {
474 // We've previously modeled an elidable constructor by pretending that it
475 // in fact constructs into the correct target. This constructor can
476 // therefore be skipped.
477 Target = *ElidedTarget;
478 StmtNodeBuilder Bldr(Pred, destNodes, *currBldrCtx);
479 State = finishObjectConstruction(State, CE, LCtx);
480 if (auto L = Target.getAs<Loc>())
481 State = State->BindExpr(CE, LCtx, State->getSVal(*L, CE->getType()));
482 Bldr.generateNode(CE, Pred, State);
483 return;
484 }
485 }
486
487 // FIXME: Handle arrays, which run the same constructor for every element.
488 // For now, we just run the first constructor (which should still invalidate
489 // the entire array).
490
491 EvalCallOptions CallOpts;
492 auto C = getCurrentCFGElement().getAs<CFGConstructor>();
493 assert(C || getCurrentCFGElement().getAs<CFGStmt>())(static_cast<void> (0));
494 const ConstructionContext *CC = C ? C->getConstructionContext() : nullptr;
6
Assuming the condition is false
7
'?' condition is false
495
496 const CXXConstructExpr::ConstructionKind CK =
497 CE
7.1
'CE' is null
? CE->getConstructionKind() : CIE->getConstructionKind();
8
'?' condition is false
9
Called C++ object pointer is null
498 switch (CK) {
499 case CXXConstructExpr::CK_Complete: {
500 // Inherited constructors are always base class constructors.
501 assert(CE && !CIE && "A complete constructor is inherited?!")(static_cast<void> (0));
502
503 // The target region is found from construction context.
504 std::tie(State, Target) =
505 handleConstructionContext(CE, State, LCtx, CC, CallOpts);
506 break;
507 }
508 case CXXConstructExpr::CK_VirtualBase: {
509 // Make sure we are not calling virtual base class initializers twice.
510 // Only the most-derived object should initialize virtual base classes.
511 const auto *OuterCtor = dyn_cast_or_null<CXXConstructExpr>(
512 LCtx->getStackFrame()->getCallSite());
513 assert((static_cast<void> (0))
514 (!OuterCtor ||(static_cast<void> (0))
515 OuterCtor->getConstructionKind() == CXXConstructExpr::CK_Complete ||(static_cast<void> (0))
516 OuterCtor->getConstructionKind() == CXXConstructExpr::CK_Delegating) &&(static_cast<void> (0))
517 ("This virtual base should have already been initialized by "(static_cast<void> (0))
518 "the most derived class!"))(static_cast<void> (0));
519 (void)OuterCtor;
520 LLVM_FALLTHROUGH[[gnu::fallthrough]];
521 }
522 case CXXConstructExpr::CK_NonVirtualBase:
523 // In C++17, classes with non-virtual bases may be aggregates, so they would
524 // be initialized as aggregates without a constructor call, so we may have
525 // a base class constructed directly into an initializer list without
526 // having the derived-class constructor call on the previous stack frame.
527 // Initializer lists may be nested into more initializer lists that
528 // correspond to surrounding aggregate initializations.
529 // FIXME: For now this code essentially bails out. We need to find the
530 // correct target region and set it.
531 // FIXME: Instead of relying on the ParentMap, we should have the
532 // trigger-statement (InitListExpr in this case) passed down from CFG or
533 // otherwise always available during construction.
534 if (dyn_cast_or_null<InitListExpr>(LCtx->getParentMap().getParent(E))) {
535 MemRegionManager &MRMgr = getSValBuilder().getRegionManager();
536 Target = loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(E, LCtx));
537 CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true;
538 break;
539 }
540 LLVM_FALLTHROUGH[[gnu::fallthrough]];
541 case CXXConstructExpr::CK_Delegating: {
542 const CXXMethodDecl *CurCtor = cast<CXXMethodDecl>(LCtx->getDecl());
543 Loc ThisPtr = getSValBuilder().getCXXThis(CurCtor,
544 LCtx->getStackFrame());
545 SVal ThisVal = State->getSVal(ThisPtr);
546
547 if (CK == CXXConstructExpr::CK_Delegating) {
548 Target = ThisVal;
549 } else {
550 // Cast to the base type.
551 bool IsVirtual = (CK == CXXConstructExpr::CK_VirtualBase);
552 SVal BaseVal =
553 getStoreManager().evalDerivedToBase(ThisVal, E->getType(), IsVirtual);
554 Target = BaseVal;
555 }
556 break;
557 }
558 }
559
560 if (State != Pred->getState()) {
561 static SimpleProgramPointTag T("ExprEngine",
562 "Prepare for object construction");
563 ExplodedNodeSet DstPrepare;
564 StmtNodeBuilder BldrPrepare(Pred, DstPrepare, *currBldrCtx);
565 BldrPrepare.generateNode(E, Pred, State, &T, ProgramPoint::PreStmtKind);
566 assert(DstPrepare.size() <= 1)(static_cast<void> (0));
567 if (DstPrepare.size() == 0)
568 return;
569 Pred = *BldrPrepare.begin();
570 }
571
572 const MemRegion *TargetRegion = Target.getAsRegion();
573 CallEventManager &CEMgr = getStateManager().getCallEventManager();
574 CallEventRef<> Call =
575 CIE ? (CallEventRef<>)CEMgr.getCXXInheritedConstructorCall(
576 CIE, TargetRegion, State, LCtx)
577 : (CallEventRef<>)CEMgr.getCXXConstructorCall(
578 CE, TargetRegion, State, LCtx);
579
580 ExplodedNodeSet DstPreVisit;
581 getCheckerManager().runCheckersForPreStmt(DstPreVisit, Pred, E, *this);
582
583 ExplodedNodeSet PreInitialized;
584 if (CE) {
585 // FIXME: Is it possible and/or useful to do this before PreStmt?
586 StmtNodeBuilder Bldr(DstPreVisit, PreInitialized, *currBldrCtx);
587 for (ExplodedNodeSet::iterator I = DstPreVisit.begin(),
588 E = DstPreVisit.end();
589 I != E; ++I) {
590 ProgramStateRef State = (*I)->getState();
591 if (CE->requiresZeroInitialization()) {
592 // FIXME: Once we properly handle constructors in new-expressions, we'll
593 // need to invalidate the region before setting a default value, to make
594 // sure there aren't any lingering bindings around. This probably needs
595 // to happen regardless of whether or not the object is zero-initialized
596 // to handle random fields of a placement-initialized object picking up
597 // old bindings. We might only want to do it when we need to, though.
598 // FIXME: This isn't actually correct for arrays -- we need to zero-
599 // initialize the entire array, not just the first element -- but our
600 // handling of arrays everywhere else is weak as well, so this shouldn't
601 // actually make things worse. Placement new makes this tricky as well,
602 // since it's then possible to be initializing one part of a multi-
603 // dimensional array.
604 State = State->bindDefaultZero(Target, LCtx);
605 }
606
607 Bldr.generateNode(CE, *I, State, /*tag=*/nullptr,
608 ProgramPoint::PreStmtKind);
609 }
610 } else {
611 PreInitialized = DstPreVisit;
612 }
613
614 ExplodedNodeSet DstPreCall;
615 getCheckerManager().runCheckersForPreCall(DstPreCall, PreInitialized,
616 *Call, *this);
617
618 ExplodedNodeSet DstEvaluated;
619
620 if (CE && CE->getConstructor()->isTrivial() &&
621 CE->getConstructor()->isCopyOrMoveConstructor() &&
622 !CallOpts.IsArrayCtorOrDtor) {
623 StmtNodeBuilder Bldr(DstPreCall, DstEvaluated, *currBldrCtx);
624 // FIXME: Handle other kinds of trivial constructors as well.
625 for (ExplodedNodeSet::iterator I = DstPreCall.begin(), E = DstPreCall.end();
626 I != E; ++I)
627 performTrivialCopy(Bldr, *I, *Call);
628
629 } else {
630 for (ExplodedNodeSet::iterator I = DstPreCall.begin(), E = DstPreCall.end();
631 I != E; ++I)
632 getCheckerManager().runCheckersForEvalCall(DstEvaluated, *I, *Call, *this,
633 CallOpts);
634 }
635
636 // If the CFG was constructed without elements for temporary destructors
637 // and the just-called constructor created a temporary object then
638 // stop exploration if the temporary object has a noreturn constructor.
639 // This can lose coverage because the destructor, if it were present
640 // in the CFG, would be called at the end of the full expression or
641 // later (for life-time extended temporaries) -- but avoids infeasible
642 // paths when no-return temporary destructors are used for assertions.
643 ExplodedNodeSet DstEvaluatedPostProcessed;
644 StmtNodeBuilder Bldr(DstEvaluated, DstEvaluatedPostProcessed, *currBldrCtx);
645 const AnalysisDeclContext *ADC = LCtx->getAnalysisDeclContext();
646 if (!ADC->getCFGBuildOptions().AddTemporaryDtors) {
647 if (llvm::isa_and_nonnull<CXXTempObjectRegion>(TargetRegion) &&
648 cast<CXXConstructorDecl>(Call->getDecl())
649 ->getParent()
650 ->isAnyDestructorNoReturn()) {
651
652 // If we've inlined the constructor, then DstEvaluated would be empty.
653 // In this case we still want a sink, which could be implemented
654 // in processCallExit. But we don't have that implemented at the moment,
655 // so if you hit this assertion, see if you can avoid inlining
656 // the respective constructor when analyzer-config cfg-temporary-dtors
657 // is set to false.
658 // Otherwise there's nothing wrong with inlining such constructor.
659 assert(!DstEvaluated.empty() &&(static_cast<void> (0))
660 "We should not have inlined this constructor!")(static_cast<void> (0));
661
662 for (ExplodedNode *N : DstEvaluated) {
663 Bldr.generateSink(E, N, N->getState());
664 }
665
666 // There is no need to run the PostCall and PostStmt checker
667 // callbacks because we just generated sinks on all nodes in th
668 // frontier.
669 return;
670 }
671 }
672
673 ExplodedNodeSet DstPostArgumentCleanup;
674 for (ExplodedNode *I : DstEvaluatedPostProcessed)
675 finishArgumentConstruction(DstPostArgumentCleanup, I, *Call);
676
677 // If there were other constructors called for object-type arguments
678 // of this constructor, clean them up.
679 ExplodedNodeSet DstPostCall;
680 getCheckerManager().runCheckersForPostCall(DstPostCall,
681 DstPostArgumentCleanup,
682 *Call, *this);
683 getCheckerManager().runCheckersForPostStmt(destNodes, DstPostCall, E, *this);
684}
685
686void ExprEngine::VisitCXXConstructExpr(const CXXConstructExpr *CE,
687 ExplodedNode *Pred,
688 ExplodedNodeSet &Dst) {
689 handleConstructor(CE, Pred, Dst);
690}
691
692void ExprEngine::VisitCXXInheritedCtorInitExpr(
693 const CXXInheritedCtorInitExpr *CE, ExplodedNode *Pred,
694 ExplodedNodeSet &Dst) {
695 handleConstructor(CE, Pred, Dst);
1
Calling 'ExprEngine::handleConstructor'
696}
697
698void ExprEngine::VisitCXXDestructor(QualType ObjectType,
699 const MemRegion *Dest,
700 const Stmt *S,
701 bool IsBaseDtor,
702 ExplodedNode *Pred,
703 ExplodedNodeSet &Dst,
704 EvalCallOptions &CallOpts) {
705 assert(S && "A destructor without a trigger!")(static_cast<void> (0));
706 const LocationContext *LCtx = Pred->getLocationContext();
707 ProgramStateRef State = Pred->getState();
708
709 const CXXRecordDecl *RecordDecl = ObjectType->getAsCXXRecordDecl();
710 assert(RecordDecl && "Only CXXRecordDecls should have destructors")(static_cast<void> (0));
711 const CXXDestructorDecl *DtorDecl = RecordDecl->getDestructor();
712 // FIXME: There should always be a Decl, otherwise the destructor call
713 // shouldn't have been added to the CFG in the first place.
714 if (!DtorDecl) {
715 // Skip the invalid destructor. We cannot simply return because
716 // it would interrupt the analysis instead.
717 static SimpleProgramPointTag T("ExprEngine", "SkipInvalidDestructor");
718 // FIXME: PostImplicitCall with a null decl may crash elsewhere anyway.
719 PostImplicitCall PP(/*Decl=*/nullptr, S->getEndLoc(), LCtx, &T);
720 NodeBuilder Bldr(Pred, Dst, *currBldrCtx);
721 Bldr.generateNode(PP, Pred->getState(), Pred);
722 return;
723 }
724
725 if (!Dest) {
726 // We're trying to destroy something that is not a region. This may happen
727 // for a variety of reasons (unknown target region, concrete integer instead
728 // of target region, etc.). The current code makes an attempt to recover.
729 // FIXME: We probably don't really need to recover when we're dealing
730 // with concrete integers specifically.
731 CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true;
732 if (const Expr *E = dyn_cast_or_null<Expr>(S)) {
733 Dest = MRMgr.getCXXTempObjectRegion(E, Pred->getLocationContext());
734 } else {
735 static SimpleProgramPointTag T("ExprEngine", "SkipInvalidDestructor");
736 NodeBuilder Bldr(Pred, Dst, *currBldrCtx);
737 Bldr.generateSink(Pred->getLocation().withTag(&T),
738 Pred->getState(), Pred);
739 return;
740 }
741 }
742
743 CallEventManager &CEMgr = getStateManager().getCallEventManager();
744 CallEventRef<CXXDestructorCall> Call =
745 CEMgr.getCXXDestructorCall(DtorDecl, S, Dest, IsBaseDtor, State, LCtx);
746
747 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),
748 Call->getSourceRange().getBegin(),
749 "Error evaluating destructor");
750
751 ExplodedNodeSet DstPreCall;
752 getCheckerManager().runCheckersForPreCall(DstPreCall, Pred,
753 *Call, *this);
754
755 ExplodedNodeSet DstInvalidated;
756 StmtNodeBuilder Bldr(DstPreCall, DstInvalidated, *currBldrCtx);
757 for (ExplodedNodeSet::iterator I = DstPreCall.begin(), E = DstPreCall.end();
758 I != E; ++I)
759 defaultEvalCall(Bldr, *I, *Call, CallOpts);
760
761 getCheckerManager().runCheckersForPostCall(Dst, DstInvalidated,
762 *Call, *this);
763}
764
765void ExprEngine::VisitCXXNewAllocatorCall(const CXXNewExpr *CNE,
766 ExplodedNode *Pred,
767 ExplodedNodeSet &Dst) {
768 ProgramStateRef State = Pred->getState();
769 const LocationContext *LCtx = Pred->getLocationContext();
770 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),
771 CNE->getBeginLoc(),
772 "Error evaluating New Allocator Call");
773 CallEventManager &CEMgr = getStateManager().getCallEventManager();
774 CallEventRef<CXXAllocatorCall> Call =
775 CEMgr.getCXXAllocatorCall(CNE, State, LCtx);
776
777 ExplodedNodeSet DstPreCall;
778 getCheckerManager().runCheckersForPreCall(DstPreCall, Pred,
779 *Call, *this);
780
781 ExplodedNodeSet DstPostCall;
782 StmtNodeBuilder CallBldr(DstPreCall, DstPostCall, *currBldrCtx);
783 for (ExplodedNode *I : DstPreCall) {
784 // FIXME: Provide evalCall for checkers?
785 defaultEvalCall(CallBldr, I, *Call);
786 }
787 // If the call is inlined, DstPostCall will be empty and we bail out now.
788
789 // Store return value of operator new() for future use, until the actual
790 // CXXNewExpr gets processed.
791 ExplodedNodeSet DstPostValue;
792 StmtNodeBuilder ValueBldr(DstPostCall, DstPostValue, *currBldrCtx);
793 for (ExplodedNode *I : DstPostCall) {
794 // FIXME: Because CNE serves as the "call site" for the allocator (due to
795 // lack of a better expression in the AST), the conjured return value symbol
796 // is going to be of the same type (C++ object pointer type). Technically
797 // this is not correct because the operator new's prototype always says that
798 // it returns a 'void *'. So we should change the type of the symbol,
799 // and then evaluate the cast over the symbolic pointer from 'void *' to
800 // the object pointer type. But without changing the symbol's type it
801 // is breaking too much to evaluate the no-op symbolic cast over it, so we
802 // skip it for now.
803 ProgramStateRef State = I->getState();
804 SVal RetVal = State->getSVal(CNE, LCtx);
805
806 // If this allocation function is not declared as non-throwing, failures
807 // /must/ be signalled by exceptions, and thus the return value will never
808 // be NULL. -fno-exceptions does not influence this semantics.
809 // FIXME: GCC has a -fcheck-new option, which forces it to consider the case
810 // where new can return NULL. If we end up supporting that option, we can
811 // consider adding a check for it here.
812 // C++11 [basic.stc.dynamic.allocation]p3.
813 if (const FunctionDecl *FD = CNE->getOperatorNew()) {
814 QualType Ty = FD->getType();
815 if (const auto *ProtoType = Ty->getAs<FunctionProtoType>())
816 if (!ProtoType->isNothrow())
817 State = State->assume(RetVal.castAs<DefinedOrUnknownSVal>(), true);
818 }
819
820 ValueBldr.generateNode(
821 CNE, I, addObjectUnderConstruction(State, CNE, LCtx, RetVal));
822 }
823
824 ExplodedNodeSet DstPostPostCallCallback;
825 getCheckerManager().runCheckersForPostCall(DstPostPostCallCallback,
826 DstPostValue, *Call, *this);
827 for (ExplodedNode *I : DstPostPostCallCallback) {
828 getCheckerManager().runCheckersForNewAllocator(*Call, Dst, I, *this);
829 }
830}
831
832void ExprEngine::VisitCXXNewExpr(const CXXNewExpr *CNE, ExplodedNode *Pred,
833 ExplodedNodeSet &Dst) {
834 // FIXME: Much of this should eventually migrate to CXXAllocatorCall.
835 // Also, we need to decide how allocators actually work -- they're not
836 // really part of the CXXNewExpr because they happen BEFORE the
837 // CXXConstructExpr subexpression. See PR12014 for some discussion.
838
839 unsigned blockCount = currBldrCtx->blockCount();
840 const LocationContext *LCtx = Pred->getLocationContext();
841 SVal symVal = UnknownVal();
842 FunctionDecl *FD = CNE->getOperatorNew();
843
844 bool IsStandardGlobalOpNewFunction =
845 FD->isReplaceableGlobalAllocationFunction();
846
847 ProgramStateRef State = Pred->getState();
848
849 // Retrieve the stored operator new() return value.
850 if (AMgr.getAnalyzerOptions().MayInlineCXXAllocator) {
851 symVal = *getObjectUnderConstruction(State, CNE, LCtx);
852 State = finishObjectConstruction(State, CNE, LCtx);
853 }
854
855 // We assume all standard global 'operator new' functions allocate memory in
856 // heap. We realize this is an approximation that might not correctly model
857 // a custom global allocator.
858 if (symVal.isUnknown()) {
859 if (IsStandardGlobalOpNewFunction)
860 symVal = svalBuilder.getConjuredHeapSymbolVal(CNE, LCtx, blockCount);
861 else
862 symVal = svalBuilder.conjureSymbolVal(nullptr, CNE, LCtx, CNE->getType(),
863 blockCount);
864 }
865
866 CallEventManager &CEMgr = getStateManager().getCallEventManager();
867 CallEventRef<CXXAllocatorCall> Call =
868 CEMgr.getCXXAllocatorCall(CNE, State, LCtx);
869
870 if (!AMgr.getAnalyzerOptions().MayInlineCXXAllocator) {
871 // Invalidate placement args.
872 // FIXME: Once we figure out how we want allocators to work,
873 // we should be using the usual pre-/(default-)eval-/post-call checkers
874 // here.
875 State = Call->invalidateRegions(blockCount);
876 if (!State)
877 return;
878
879 // If this allocation function is not declared as non-throwing, failures
880 // /must/ be signalled by exceptions, and thus the return value will never
881 // be NULL. -fno-exceptions does not influence this semantics.
882 // FIXME: GCC has a -fcheck-new option, which forces it to consider the case
883 // where new can return NULL. If we end up supporting that option, we can
884 // consider adding a check for it here.
885 // C++11 [basic.stc.dynamic.allocation]p3.
886 if (FD) {
887 QualType Ty = FD->getType();
888 if (const auto *ProtoType = Ty->getAs<FunctionProtoType>())
889 if (!ProtoType->isNothrow())
890 if (auto dSymVal = symVal.getAs<DefinedOrUnknownSVal>())
891 State = State->assume(*dSymVal, true);
892 }
893 }
894
895 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
896
897 SVal Result = symVal;
898
899 if (CNE->isArray()) {
900 // FIXME: allocating an array requires simulating the constructors.
901 // For now, just return a symbolicated region.
902 if (const auto *NewReg = cast_or_null<SubRegion>(symVal.getAsRegion())) {
903 QualType ObjTy = CNE->getType()->getPointeeType();
904 const ElementRegion *EleReg =
905 getStoreManager().GetElementZeroRegion(NewReg, ObjTy);
906 Result = loc::MemRegionVal(EleReg);
907 }
908 State = State->BindExpr(CNE, Pred->getLocationContext(), Result);
909 Bldr.generateNode(CNE, Pred, State);
910 return;
911 }
912
913 // FIXME: Once we have proper support for CXXConstructExprs inside
914 // CXXNewExpr, we need to make sure that the constructed object is not
915 // immediately invalidated here. (The placement call should happen before
916 // the constructor call anyway.)
917 if (FD && FD->isReservedGlobalPlacementOperator()) {
918 // Non-array placement new should always return the placement location.
919 SVal PlacementLoc = State->getSVal(CNE->getPlacementArg(0), LCtx);
920 Result = svalBuilder.evalCast(PlacementLoc, CNE->getType(),
921 CNE->getPlacementArg(0)->getType());
922 }
923
924 // Bind the address of the object, then check to see if we cached out.
925 State = State->BindExpr(CNE, LCtx, Result);
926 ExplodedNode *NewN = Bldr.generateNode(CNE, Pred, State);
927 if (!NewN)
928 return;
929
930 // If the type is not a record, we won't have a CXXConstructExpr as an
931 // initializer. Copy the value over.
932 if (const Expr *Init = CNE->getInitializer()) {
933 if (!isa<CXXConstructExpr>(Init)) {
934 assert(Bldr.getResults().size() == 1)(static_cast<void> (0));
935 Bldr.takeNodes(NewN);
936 evalBind(Dst, CNE, NewN, Result, State->getSVal(Init, LCtx),
937 /*FirstInit=*/IsStandardGlobalOpNewFunction);
938 }
939 }
940}
941
942void ExprEngine::VisitCXXDeleteExpr(const CXXDeleteExpr *CDE,
943 ExplodedNode *Pred, ExplodedNodeSet &Dst) {
944
945 CallEventManager &CEMgr = getStateManager().getCallEventManager();
946 CallEventRef<CXXDeallocatorCall> Call = CEMgr.getCXXDeallocatorCall(
947 CDE, Pred->getState(), Pred->getLocationContext());
948
949 ExplodedNodeSet DstPreCall;
950 getCheckerManager().runCheckersForPreCall(DstPreCall, Pred, *Call, *this);
951
952 getCheckerManager().runCheckersForPostCall(Dst, DstPreCall, *Call, *this);
953}
954
955void ExprEngine::VisitCXXCatchStmt(const CXXCatchStmt *CS, ExplodedNode *Pred,
956 ExplodedNodeSet &Dst) {
957 const VarDecl *VD = CS->getExceptionDecl();
958 if (!VD) {
959 Dst.Add(Pred);
960 return;
961 }
962
963 const LocationContext *LCtx = Pred->getLocationContext();
964 SVal V = svalBuilder.conjureSymbolVal(CS, LCtx, VD->getType(),
965 currBldrCtx->blockCount());
966 ProgramStateRef state = Pred->getState();
967 state = state->bindLoc(state->getLValue(VD, LCtx), V, LCtx);
968
969 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
970 Bldr.generateNode(CS, Pred, state);
971}
972
973void ExprEngine::VisitCXXThisExpr(const CXXThisExpr *TE, ExplodedNode *Pred,
974 ExplodedNodeSet &Dst) {
975 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx);
976
977 // Get the this object region from StoreManager.
978 const LocationContext *LCtx = Pred->getLocationContext();
979 const MemRegion *R =
980 svalBuilder.getRegionManager().getCXXThisRegion(
981 getContext().getCanonicalType(TE->getType()),
982 LCtx);
983
984 ProgramStateRef state = Pred->getState();
985 SVal V = state->getSVal(loc::MemRegionVal(R));
986 Bldr.generateNode(TE, Pred, state->BindExpr(TE, LCtx, V));
987}
988
989void ExprEngine::VisitLambdaExpr(const LambdaExpr *LE, ExplodedNode *Pred,
990 ExplodedNodeSet &Dst) {
991 const LocationContext *LocCtxt = Pred->getLocationContext();
992
993 // Get the region of the lambda itself.
994 const MemRegion *R = svalBuilder.getRegionManager().getCXXTempObjectRegion(
995 LE, LocCtxt);
996 SVal V = loc::MemRegionVal(R);
997
998 ProgramStateRef State = Pred->getState();
999
1000 // If we created a new MemRegion for the lambda, we should explicitly bind
1001 // the captures.
1002 CXXRecordDecl::field_iterator CurField = LE->getLambdaClass()->field_begin();
1003 for (LambdaExpr::const_capture_init_iterator i = LE->capture_init_begin(),
1004 e = LE->capture_init_end();
1005 i != e; ++i, ++CurField) {
1006 FieldDecl *FieldForCapture = *CurField;
1007 SVal FieldLoc = State->getLValue(FieldForCapture, V);
1008
1009 SVal InitVal;
1010 if (!FieldForCapture->hasCapturedVLAType()) {
1011 Expr *InitExpr = *i;
1012 assert(InitExpr && "Capture missing initialization expression")(static_cast<void> (0));
1013 InitVal = State->getSVal(InitExpr, LocCtxt);
1014 } else {
1015 // The field stores the length of a captured variable-length array.
1016 // These captures don't have initialization expressions; instead we
1017 // get the length from the VLAType size expression.
1018 Expr *SizeExpr = FieldForCapture->getCapturedVLAType()->getSizeExpr();
1019 InitVal = State->getSVal(SizeExpr, LocCtxt);
1020 }
1021
1022 State = State->bindLoc(FieldLoc, InitVal, LocCtxt);
1023 }
1024
1025 // Decay the Loc into an RValue, because there might be a
1026 // MaterializeTemporaryExpr node above this one which expects the bound value
1027 // to be an RValue.
1028 SVal LambdaRVal = State->getSVal(R);
1029
1030 ExplodedNodeSet Tmp;
1031 StmtNodeBuilder Bldr(Pred, Tmp, *currBldrCtx);
1032 // FIXME: is this the right program point kind?
1033 Bldr.generateNode(LE, Pred,
1034 State->BindExpr(LE, LocCtxt, LambdaRVal),
1035 nullptr, ProgramPoint::PostLValueKind);
1036
1037 // FIXME: Move all post/pre visits to ::Visit().
1038 getCheckerManager().runCheckersForPostStmt(Dst, Tmp, LE, *this);
1039}