1 | //===- ExprEngineCXX.cpp - ExprEngine support for C++ -----------*- C++ -*-===// |
2 | // |
3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
4 | // See https://llvm.org/LICENSE.txt for license information. |
5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
6 | // |
7 | //===----------------------------------------------------------------------===// |
8 | // |
9 | // This file defines the C++ expression evaluation engine. |
10 | // |
11 | //===----------------------------------------------------------------------===// |
12 | |
13 | #include "clang/AST/DeclCXX.h" |
14 | #include "clang/AST/ParentMap.h" |
15 | #include "clang/AST/StmtCXX.h" |
16 | #include "clang/Analysis/ConstructionContext.h" |
17 | #include "clang/Basic/PrettyStackTrace.h" |
18 | #include "clang/StaticAnalyzer/Core/CheckerManager.h" |
19 | #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h" |
20 | #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h" |
21 | #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h" |
22 | #include "clang/StaticAnalyzer/Core/PathSensitive/SVals.h" |
23 | #include "llvm/ADT/STLExtras.h" |
24 | #include "llvm/ADT/Sequence.h" |
25 | #include <optional> |
26 | |
27 | using namespace clang; |
28 | using namespace ento; |
29 | |
30 | void ExprEngine::CreateCXXTemporaryObject(const MaterializeTemporaryExpr *ME, |
31 | ExplodedNode *Pred, |
32 | ExplodedNodeSet &Dst) { |
33 | StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); |
34 | const Expr *tempExpr = ME->getSubExpr()->IgnoreParens(); |
35 | ProgramStateRef state = Pred->getState(); |
36 | const LocationContext *LCtx = Pred->getLocationContext(); |
37 | |
38 | state = createTemporaryRegionIfNeeded(state, LCtx, tempExpr, ME); |
39 | Bldr.generateNode(ME, Pred, state); |
40 | } |
41 | |
42 | // FIXME: This is the sort of code that should eventually live in a Core |
43 | // checker rather than as a special case in ExprEngine. |
44 | void ExprEngine::performTrivialCopy(NodeBuilder &Bldr, ExplodedNode *Pred, |
45 | const CallEvent &Call) { |
46 | SVal ThisVal; |
47 | bool AlwaysReturnsLValue; |
48 | const CXXRecordDecl *ThisRD = nullptr; |
49 | if (const CXXConstructorCall *Ctor = dyn_cast<CXXConstructorCall>(Val: &Call)) { |
50 | assert(Ctor->getDecl()->isTrivial()); |
51 | assert(Ctor->getDecl()->isCopyOrMoveConstructor()); |
52 | ThisVal = Ctor->getCXXThisVal(); |
53 | ThisRD = Ctor->getDecl()->getParent(); |
54 | AlwaysReturnsLValue = false; |
55 | } else { |
56 | assert(cast<CXXMethodDecl>(Call.getDecl())->isTrivial()); |
57 | assert(cast<CXXMethodDecl>(Call.getDecl())->getOverloadedOperator() == |
58 | OO_Equal); |
59 | ThisVal = cast<CXXInstanceCall>(Val: Call).getCXXThisVal(); |
60 | ThisRD = cast<CXXMethodDecl>(Val: Call.getDecl())->getParent(); |
61 | AlwaysReturnsLValue = true; |
62 | } |
63 | |
64 | const LocationContext *LCtx = Pred->getLocationContext(); |
65 | const Expr *CallExpr = Call.getOriginExpr(); |
66 | |
67 | ExplodedNodeSet Dst; |
68 | Bldr.takeNodes(N: Pred); |
69 | |
70 | assert(ThisRD); |
71 | if (!ThisRD->isEmpty()) { |
72 | // Load the source value only for non-empty classes. |
73 | // Otherwise it'd retrieve an UnknownVal |
74 | // and bind it and RegionStore would think that the actual value |
75 | // in this region at this offset is unknown. |
76 | SVal V = Call.getArgSVal(Index: 0); |
77 | |
78 | // If the value being copied is not unknown, load from its location to get |
79 | // an aggregate rvalue. |
80 | if (std::optional<Loc> L = V.getAs<Loc>()) |
81 | V = Pred->getState()->getSVal(LV: *L); |
82 | else |
83 | assert(V.isUnknownOrUndef()); |
84 | evalBind(Dst, CallExpr, Pred, ThisVal, V, true); |
85 | } else { |
86 | Dst.Add(N: Pred); |
87 | } |
88 | |
89 | PostStmt PS(CallExpr, LCtx); |
90 | for (ExplodedNode *N : Dst) { |
91 | ProgramStateRef State = N->getState(); |
92 | if (AlwaysReturnsLValue) |
93 | State = State->BindExpr(CallExpr, LCtx, ThisVal); |
94 | else |
95 | State = bindReturnValue(Call, LCtx, State); |
96 | Bldr.generateNode(PP: PS, State, Pred: N); |
97 | } |
98 | } |
99 | |
100 | SVal ExprEngine::makeElementRegion(ProgramStateRef State, SVal LValue, |
101 | QualType &Ty, bool &IsArray, unsigned Idx) { |
102 | SValBuilder &SVB = State->getStateManager().getSValBuilder(); |
103 | ASTContext &Ctx = SVB.getContext(); |
104 | |
105 | if (const ArrayType *AT = Ctx.getAsArrayType(T: Ty)) { |
106 | while (AT) { |
107 | Ty = AT->getElementType(); |
108 | AT = dyn_cast<ArrayType>(Val: AT->getElementType()); |
109 | } |
110 | LValue = State->getLValue(ElementType: Ty, Idx: SVB.makeArrayIndex(idx: Idx), Base: LValue); |
111 | IsArray = true; |
112 | } |
113 | |
114 | return LValue; |
115 | } |
116 | |
117 | // In case when the prvalue is returned from the function (kind is one of |
118 | // SimpleReturnedValueKind, CXX17ElidedCopyReturnedValueKind), then |
119 | // it's materialization happens in context of the caller. |
120 | // We pass BldrCtx explicitly, as currBldrCtx always refers to callee's context. |
121 | SVal ExprEngine::computeObjectUnderConstruction( |
122 | const Expr *E, ProgramStateRef State, const NodeBuilderContext *BldrCtx, |
123 | const LocationContext *LCtx, const ConstructionContext *CC, |
124 | EvalCallOptions &CallOpts, unsigned Idx) { |
125 | |
126 | SValBuilder &SVB = getSValBuilder(); |
127 | MemRegionManager &MRMgr = SVB.getRegionManager(); |
128 | ASTContext &ACtx = SVB.getContext(); |
129 | |
130 | // Compute the target region by exploring the construction context. |
131 | if (CC) { |
132 | switch (CC->getKind()) { |
133 | case ConstructionContext::CXX17ElidedCopyVariableKind: |
134 | case ConstructionContext::SimpleVariableKind: { |
135 | const auto *DSCC = cast<VariableConstructionContext>(Val: CC); |
136 | const auto *DS = DSCC->getDeclStmt(); |
137 | const auto *Var = cast<VarDecl>(Val: DS->getSingleDecl()); |
138 | QualType Ty = Var->getType(); |
139 | return makeElementRegion(State, LValue: State->getLValue(VD: Var, LC: LCtx), Ty, |
140 | IsArray&: CallOpts.IsArrayCtorOrDtor, Idx); |
141 | } |
142 | case ConstructionContext::CXX17ElidedCopyConstructorInitializerKind: |
143 | case ConstructionContext::SimpleConstructorInitializerKind: { |
144 | const auto *ICC = cast<ConstructorInitializerConstructionContext>(Val: CC); |
145 | const auto *Init = ICC->getCXXCtorInitializer(); |
146 | const CXXMethodDecl *CurCtor = cast<CXXMethodDecl>(Val: LCtx->getDecl()); |
147 | Loc ThisPtr = SVB.getCXXThis(D: CurCtor, SFC: LCtx->getStackFrame()); |
148 | SVal ThisVal = State->getSVal(LV: ThisPtr); |
149 | if (Init->isBaseInitializer()) { |
150 | const auto *ThisReg = cast<SubRegion>(Val: ThisVal.getAsRegion()); |
151 | const CXXRecordDecl *BaseClass = |
152 | Init->getBaseClass()->getAsCXXRecordDecl(); |
153 | const auto *BaseReg = |
154 | MRMgr.getCXXBaseObjectRegion(BaseClass, Super: ThisReg, |
155 | IsVirtual: Init->isBaseVirtual()); |
156 | return SVB.makeLoc(region: BaseReg); |
157 | } |
158 | if (Init->isDelegatingInitializer()) |
159 | return ThisVal; |
160 | |
161 | const ValueDecl *Field; |
162 | SVal FieldVal; |
163 | if (Init->isIndirectMemberInitializer()) { |
164 | Field = Init->getIndirectMember(); |
165 | FieldVal = State->getLValue(decl: Init->getIndirectMember(), Base: ThisVal); |
166 | } else { |
167 | Field = Init->getMember(); |
168 | FieldVal = State->getLValue(decl: Init->getMember(), Base: ThisVal); |
169 | } |
170 | |
171 | QualType Ty = Field->getType(); |
172 | return makeElementRegion(State, LValue: FieldVal, Ty, IsArray&: CallOpts.IsArrayCtorOrDtor, |
173 | Idx); |
174 | } |
175 | case ConstructionContext::NewAllocatedObjectKind: { |
176 | if (AMgr.getAnalyzerOptions().MayInlineCXXAllocator) { |
177 | const auto *NECC = cast<NewAllocatedObjectConstructionContext>(Val: CC); |
178 | const auto *NE = NECC->getCXXNewExpr(); |
179 | SVal V = *getObjectUnderConstruction(State, Item: NE, LC: LCtx); |
180 | if (const SubRegion *MR = |
181 | dyn_cast_or_null<SubRegion>(Val: V.getAsRegion())) { |
182 | if (NE->isArray()) { |
183 | CallOpts.IsArrayCtorOrDtor = true; |
184 | |
185 | auto Ty = NE->getType()->getPointeeType(); |
186 | while (const auto *AT = getContext().getAsArrayType(Ty)) |
187 | Ty = AT->getElementType(); |
188 | |
189 | auto R = MRMgr.getElementRegion(elementType: Ty, Idx: svalBuilder.makeArrayIndex(idx: Idx), |
190 | superRegion: MR, Ctx&: SVB.getContext()); |
191 | |
192 | return loc::MemRegionVal(R); |
193 | } |
194 | return V; |
195 | } |
196 | // TODO: Detect when the allocator returns a null pointer. |
197 | // Constructor shall not be called in this case. |
198 | } |
199 | break; |
200 | } |
201 | case ConstructionContext::SimpleReturnedValueKind: |
202 | case ConstructionContext::CXX17ElidedCopyReturnedValueKind: { |
203 | // The temporary is to be managed by the parent stack frame. |
204 | // So build it in the parent stack frame if we're not in the |
205 | // top frame of the analysis. |
206 | const StackFrameContext *SFC = LCtx->getStackFrame(); |
207 | if (const LocationContext *CallerLCtx = SFC->getParent()) { |
208 | auto RTC = (*SFC->getCallSiteBlock())[SFC->getIndex()] |
209 | .getAs<CFGCXXRecordTypedCall>(); |
210 | if (!RTC) { |
211 | // We were unable to find the correct construction context for the |
212 | // call in the parent stack frame. This is equivalent to not being |
213 | // able to find construction context at all. |
214 | break; |
215 | } |
216 | if (isa<BlockInvocationContext>(Val: CallerLCtx)) { |
217 | // Unwrap block invocation contexts. They're mostly part of |
218 | // the current stack frame. |
219 | CallerLCtx = CallerLCtx->getParent(); |
220 | assert(!isa<BlockInvocationContext>(CallerLCtx)); |
221 | } |
222 | |
223 | NodeBuilderContext CallerBldrCtx(getCoreEngine(), |
224 | SFC->getCallSiteBlock(), CallerLCtx); |
225 | return computeObjectUnderConstruction( |
226 | E: cast<Expr>(Val: SFC->getCallSite()), State, BldrCtx: &CallerBldrCtx, LCtx: CallerLCtx, |
227 | CC: RTC->getConstructionContext(), CallOpts); |
228 | } else { |
229 | // We are on the top frame of the analysis. We do not know where is the |
230 | // object returned to. Conjure a symbolic region for the return value. |
231 | // TODO: We probably need a new MemRegion kind to represent the storage |
232 | // of that SymbolicRegion, so that we could produce a fancy symbol |
233 | // instead of an anonymous conjured symbol. |
234 | // TODO: Do we need to track the region to avoid having it dead |
235 | // too early? It does die too early, at least in C++17, but because |
236 | // putting anything into a SymbolicRegion causes an immediate escape, |
237 | // it doesn't cause any leak false positives. |
238 | const auto *RCC = cast<ReturnedValueConstructionContext>(Val: CC); |
239 | // Make sure that this doesn't coincide with any other symbol |
240 | // conjured for the returned expression. |
241 | static const int TopLevelSymRegionTag = 0; |
242 | const Expr *RetE = RCC->getReturnStmt()->getRetValue(); |
243 | assert(RetE && "Void returns should not have a construction context" ); |
244 | QualType ReturnTy = RetE->getType(); |
245 | QualType RegionTy = ACtx.getPointerType(T: ReturnTy); |
246 | return SVB.conjureSymbolVal(symbolTag: &TopLevelSymRegionTag, expr: RetE, LCtx: SFC, type: RegionTy, |
247 | count: currBldrCtx->blockCount()); |
248 | } |
249 | llvm_unreachable("Unhandled return value construction context!" ); |
250 | } |
251 | case ConstructionContext::ElidedTemporaryObjectKind: { |
252 | assert(AMgr.getAnalyzerOptions().ShouldElideConstructors); |
253 | const auto *TCC = cast<ElidedTemporaryObjectConstructionContext>(Val: CC); |
254 | |
255 | // Support pre-C++17 copy elision. We'll have the elidable copy |
256 | // constructor in the AST and in the CFG, but we'll skip it |
257 | // and construct directly into the final object. This call |
258 | // also sets the CallOpts flags for us. |
259 | // If the elided copy/move constructor is not supported, there's still |
260 | // benefit in trying to model the non-elided constructor. |
261 | // Stash our state before trying to elide, as it'll get overwritten. |
262 | ProgramStateRef PreElideState = State; |
263 | EvalCallOptions PreElideCallOpts = CallOpts; |
264 | |
265 | SVal V = computeObjectUnderConstruction( |
266 | TCC->getConstructorAfterElision(), State, BldrCtx, LCtx, |
267 | TCC->getConstructionContextAfterElision(), CallOpts); |
268 | |
269 | // FIXME: This definition of "copy elision has not failed" is unreliable. |
270 | // It doesn't indicate that the constructor will actually be inlined |
271 | // later; this is still up to evalCall() to decide. |
272 | if (!CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion) |
273 | return V; |
274 | |
275 | // Copy elision failed. Revert the changes and proceed as if we have |
276 | // a simple temporary. |
277 | CallOpts = PreElideCallOpts; |
278 | CallOpts.IsElidableCtorThatHasNotBeenElided = true; |
279 | [[fallthrough]]; |
280 | } |
281 | case ConstructionContext::SimpleTemporaryObjectKind: { |
282 | const auto *TCC = cast<TemporaryObjectConstructionContext>(Val: CC); |
283 | const MaterializeTemporaryExpr *MTE = TCC->getMaterializedTemporaryExpr(); |
284 | |
285 | CallOpts.IsTemporaryCtorOrDtor = true; |
286 | if (MTE) { |
287 | if (const ValueDecl *VD = MTE->getExtendingDecl()) { |
288 | StorageDuration SD = MTE->getStorageDuration(); |
289 | assert(SD != SD_FullExpression); |
290 | if (!VD->getType()->isReferenceType()) { |
291 | // We're lifetime-extended by a surrounding aggregate. |
292 | // Automatic destructors aren't quite working in this case |
293 | // on the CFG side. We should warn the caller about that. |
294 | // FIXME: Is there a better way to retrieve this information from |
295 | // the MaterializeTemporaryExpr? |
296 | CallOpts.IsTemporaryLifetimeExtendedViaAggregate = true; |
297 | } |
298 | |
299 | if (SD == SD_Static || SD == SD_Thread) |
300 | return loc::MemRegionVal( |
301 | MRMgr.getCXXStaticLifetimeExtendedObjectRegion(Ex: E, VD)); |
302 | |
303 | return loc::MemRegionVal( |
304 | MRMgr.getCXXLifetimeExtendedObjectRegion(Ex: E, VD, LC: LCtx)); |
305 | } |
306 | assert(MTE->getStorageDuration() == SD_FullExpression); |
307 | } |
308 | |
309 | return loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(Ex: E, LC: LCtx)); |
310 | } |
311 | case ConstructionContext::LambdaCaptureKind: { |
312 | CallOpts.IsTemporaryCtorOrDtor = true; |
313 | |
314 | const auto *LCC = cast<LambdaCaptureConstructionContext>(Val: CC); |
315 | |
316 | SVal Base = loc::MemRegionVal( |
317 | MRMgr.getCXXTempObjectRegion(Ex: LCC->getInitializer(), LC: LCtx)); |
318 | |
319 | const auto *CE = dyn_cast_or_null<CXXConstructExpr>(Val: E); |
320 | if (getIndexOfElementToConstruct(State, E: CE, LCtx)) { |
321 | CallOpts.IsArrayCtorOrDtor = true; |
322 | Base = State->getLValue(ElementType: E->getType(), Idx: svalBuilder.makeArrayIndex(idx: Idx), |
323 | Base); |
324 | } |
325 | |
326 | return Base; |
327 | } |
328 | case ConstructionContext::ArgumentKind: { |
329 | // Arguments are technically temporaries. |
330 | CallOpts.IsTemporaryCtorOrDtor = true; |
331 | |
332 | const auto *ACC = cast<ArgumentConstructionContext>(Val: CC); |
333 | const Expr *E = ACC->getCallLikeExpr(); |
334 | unsigned Idx = ACC->getIndex(); |
335 | |
336 | CallEventManager &CEMgr = getStateManager().getCallEventManager(); |
337 | auto getArgLoc = [&](CallEventRef<> Caller) -> std::optional<SVal> { |
338 | const LocationContext *FutureSFC = |
339 | Caller->getCalleeStackFrame(BlockCount: BldrCtx->blockCount()); |
340 | // Return early if we are unable to reliably foresee |
341 | // the future stack frame. |
342 | if (!FutureSFC) |
343 | return std::nullopt; |
344 | |
345 | // This should be equivalent to Caller->getDecl() for now, but |
346 | // FutureSFC->getDecl() is likely to support better stuff (like |
347 | // virtual functions) earlier. |
348 | const Decl *CalleeD = FutureSFC->getDecl(); |
349 | |
350 | // FIXME: Support for variadic arguments is not implemented here yet. |
351 | if (CallEvent::isVariadic(D: CalleeD)) |
352 | return std::nullopt; |
353 | |
354 | // Operator arguments do not correspond to operator parameters |
355 | // because this-argument is implemented as a normal argument in |
356 | // operator call expressions but not in operator declarations. |
357 | const TypedValueRegion *TVR = Caller->getParameterLocation( |
358 | Index: *Caller->getAdjustedParameterIndex(ASTArgumentIndex: Idx), BlockCount: BldrCtx->blockCount()); |
359 | if (!TVR) |
360 | return std::nullopt; |
361 | |
362 | return loc::MemRegionVal(TVR); |
363 | }; |
364 | |
365 | if (const auto *CE = dyn_cast<CallExpr>(Val: E)) { |
366 | CallEventRef<> Caller = |
367 | CEMgr.getSimpleCall(E: CE, State, LCtx, ElemRef: getCFGElementRef()); |
368 | if (std::optional<SVal> V = getArgLoc(Caller)) |
369 | return *V; |
370 | else |
371 | break; |
372 | } else if (const auto *CCE = dyn_cast<CXXConstructExpr>(Val: E)) { |
373 | // Don't bother figuring out the target region for the future |
374 | // constructor because we won't need it. |
375 | CallEventRef<> Caller = CEMgr.getCXXConstructorCall( |
376 | E: CCE, /*Target=*/nullptr, State, LCtx, ElemRef: getCFGElementRef()); |
377 | if (std::optional<SVal> V = getArgLoc(Caller)) |
378 | return *V; |
379 | else |
380 | break; |
381 | } else if (const auto *ME = dyn_cast<ObjCMessageExpr>(Val: E)) { |
382 | CallEventRef<> Caller = |
383 | CEMgr.getObjCMethodCall(E: ME, State, LCtx, ElemRef: getCFGElementRef()); |
384 | if (std::optional<SVal> V = getArgLoc(Caller)) |
385 | return *V; |
386 | else |
387 | break; |
388 | } |
389 | } |
390 | } // switch (CC->getKind()) |
391 | } |
392 | |
393 | // If we couldn't find an existing region to construct into, assume we're |
394 | // constructing a temporary. Notify the caller of our failure. |
395 | CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true; |
396 | return loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(Ex: E, LC: LCtx)); |
397 | } |
398 | |
399 | ProgramStateRef ExprEngine::updateObjectsUnderConstruction( |
400 | SVal V, const Expr *E, ProgramStateRef State, const LocationContext *LCtx, |
401 | const ConstructionContext *CC, const EvalCallOptions &CallOpts) { |
402 | if (CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion) { |
403 | // Sounds like we failed to find the target region and therefore |
404 | // copy elision failed. There's nothing we can do about it here. |
405 | return State; |
406 | } |
407 | |
408 | // See if we're constructing an existing region by looking at the |
409 | // current construction context. |
410 | assert(CC && "Computed target region without construction context?" ); |
411 | switch (CC->getKind()) { |
412 | case ConstructionContext::CXX17ElidedCopyVariableKind: |
413 | case ConstructionContext::SimpleVariableKind: { |
414 | const auto *DSCC = cast<VariableConstructionContext>(Val: CC); |
415 | return addObjectUnderConstruction(State, Item: DSCC->getDeclStmt(), LC: LCtx, V); |
416 | } |
417 | case ConstructionContext::CXX17ElidedCopyConstructorInitializerKind: |
418 | case ConstructionContext::SimpleConstructorInitializerKind: { |
419 | const auto *ICC = cast<ConstructorInitializerConstructionContext>(Val: CC); |
420 | const auto *Init = ICC->getCXXCtorInitializer(); |
421 | // Base and delegating initializers handled above |
422 | assert(Init->isAnyMemberInitializer() && |
423 | "Base and delegating initializers should have been handled by" |
424 | "computeObjectUnderConstruction()" ); |
425 | return addObjectUnderConstruction(State, Item: Init, LC: LCtx, V); |
426 | } |
427 | case ConstructionContext::NewAllocatedObjectKind: { |
428 | return State; |
429 | } |
430 | case ConstructionContext::SimpleReturnedValueKind: |
431 | case ConstructionContext::CXX17ElidedCopyReturnedValueKind: { |
432 | const StackFrameContext *SFC = LCtx->getStackFrame(); |
433 | const LocationContext *CallerLCtx = SFC->getParent(); |
434 | if (!CallerLCtx) { |
435 | // No extra work is necessary in top frame. |
436 | return State; |
437 | } |
438 | |
439 | auto RTC = (*SFC->getCallSiteBlock())[SFC->getIndex()] |
440 | .getAs<CFGCXXRecordTypedCall>(); |
441 | assert(RTC && "Could not have had a target region without it" ); |
442 | if (isa<BlockInvocationContext>(Val: CallerLCtx)) { |
443 | // Unwrap block invocation contexts. They're mostly part of |
444 | // the current stack frame. |
445 | CallerLCtx = CallerLCtx->getParent(); |
446 | assert(!isa<BlockInvocationContext>(CallerLCtx)); |
447 | } |
448 | |
449 | return updateObjectsUnderConstruction(V, |
450 | E: cast<Expr>(Val: SFC->getCallSite()), State, LCtx: CallerLCtx, |
451 | CC: RTC->getConstructionContext(), CallOpts); |
452 | } |
453 | case ConstructionContext::ElidedTemporaryObjectKind: { |
454 | assert(AMgr.getAnalyzerOptions().ShouldElideConstructors); |
455 | if (!CallOpts.IsElidableCtorThatHasNotBeenElided) { |
456 | const auto *TCC = cast<ElidedTemporaryObjectConstructionContext>(Val: CC); |
457 | State = updateObjectsUnderConstruction( |
458 | V, TCC->getConstructorAfterElision(), State, LCtx, |
459 | TCC->getConstructionContextAfterElision(), CallOpts); |
460 | |
461 | // Remember that we've elided the constructor. |
462 | State = addObjectUnderConstruction( |
463 | State, Item: TCC->getConstructorAfterElision(), LC: LCtx, V); |
464 | |
465 | // Remember that we've elided the destructor. |
466 | if (const auto *BTE = TCC->getCXXBindTemporaryExpr()) |
467 | State = elideDestructor(State, BTE, LC: LCtx); |
468 | |
469 | // Instead of materialization, shamelessly return |
470 | // the final object destination. |
471 | if (const auto *MTE = TCC->getMaterializedTemporaryExpr()) |
472 | State = addObjectUnderConstruction(State, Item: MTE, LC: LCtx, V); |
473 | |
474 | return State; |
475 | } |
476 | // If we decided not to elide the constructor, proceed as if |
477 | // it's a simple temporary. |
478 | [[fallthrough]]; |
479 | } |
480 | case ConstructionContext::SimpleTemporaryObjectKind: { |
481 | const auto *TCC = cast<TemporaryObjectConstructionContext>(Val: CC); |
482 | if (const auto *BTE = TCC->getCXXBindTemporaryExpr()) |
483 | State = addObjectUnderConstruction(State, Item: BTE, LC: LCtx, V); |
484 | |
485 | if (const auto *MTE = TCC->getMaterializedTemporaryExpr()) |
486 | State = addObjectUnderConstruction(State, Item: MTE, LC: LCtx, V); |
487 | |
488 | return State; |
489 | } |
490 | case ConstructionContext::LambdaCaptureKind: { |
491 | const auto *LCC = cast<LambdaCaptureConstructionContext>(Val: CC); |
492 | |
493 | // If we capture and array, we want to store the super region, not a |
494 | // sub-region. |
495 | if (const auto *EL = dyn_cast_or_null<ElementRegion>(Val: V.getAsRegion())) |
496 | V = loc::MemRegionVal(EL->getSuperRegion()); |
497 | |
498 | return addObjectUnderConstruction( |
499 | State, Item: {LCC->getLambdaExpr(), LCC->getIndex()}, LC: LCtx, V); |
500 | } |
501 | case ConstructionContext::ArgumentKind: { |
502 | const auto *ACC = cast<ArgumentConstructionContext>(Val: CC); |
503 | if (const auto *BTE = ACC->getCXXBindTemporaryExpr()) |
504 | State = addObjectUnderConstruction(State, Item: BTE, LC: LCtx, V); |
505 | |
506 | return addObjectUnderConstruction( |
507 | State, Item: {ACC->getCallLikeExpr(), ACC->getIndex()}, LC: LCtx, V); |
508 | } |
509 | } |
510 | llvm_unreachable("Unhandled construction context!" ); |
511 | } |
512 | |
513 | static ProgramStateRef |
514 | bindRequiredArrayElementToEnvironment(ProgramStateRef State, |
515 | const ArrayInitLoopExpr *AILE, |
516 | const LocationContext *LCtx, SVal Idx) { |
517 | // The ctor in this case is guaranteed to be a copy ctor, otherwise we hit a |
518 | // compile time error. |
519 | // |
520 | // -ArrayInitLoopExpr <-- we're here |
521 | // |-OpaqueValueExpr |
522 | // | `-DeclRefExpr <-- match this |
523 | // `-CXXConstructExpr |
524 | // `-ImplicitCastExpr |
525 | // `-ArraySubscriptExpr |
526 | // |-ImplicitCastExpr |
527 | // | `-OpaqueValueExpr |
528 | // | `-DeclRefExpr |
529 | // `-ArrayInitIndexExpr |
530 | // |
531 | // The resulting expression might look like the one below in an implicit |
532 | // copy/move ctor. |
533 | // |
534 | // ArrayInitLoopExpr <-- we're here |
535 | // |-OpaqueValueExpr |
536 | // | `-MemberExpr <-- match this |
537 | // | (`-CXXStaticCastExpr) <-- move ctor only |
538 | // | `-DeclRefExpr |
539 | // `-CXXConstructExpr |
540 | // `-ArraySubscriptExpr |
541 | // |-ImplicitCastExpr |
542 | // | `-OpaqueValueExpr |
543 | // | `-MemberExpr |
544 | // | `-DeclRefExpr |
545 | // `-ArrayInitIndexExpr |
546 | // |
547 | // The resulting expression for a multidimensional array. |
548 | // ArrayInitLoopExpr <-- we're here |
549 | // |-OpaqueValueExpr |
550 | // | `-DeclRefExpr <-- match this |
551 | // `-ArrayInitLoopExpr |
552 | // |-OpaqueValueExpr |
553 | // | `-ArraySubscriptExpr |
554 | // | |-ImplicitCastExpr |
555 | // | | `-OpaqueValueExpr |
556 | // | | `-DeclRefExpr |
557 | // | `-ArrayInitIndexExpr |
558 | // `-CXXConstructExpr <-- extract this |
559 | // ` ... |
560 | |
561 | const auto *OVESrc = AILE->getCommonExpr()->getSourceExpr(); |
562 | |
563 | // HACK: There is no way we can put the index of the array element into the |
564 | // CFG unless we unroll the loop, so we manually select and bind the required |
565 | // parameter to the environment. |
566 | const auto *CE = |
567 | cast<CXXConstructExpr>(Val: extractElementInitializerFromNestedAILE(AILE)); |
568 | |
569 | SVal Base = UnknownVal(); |
570 | if (const auto *ME = dyn_cast<MemberExpr>(Val: OVESrc)) |
571 | Base = State->getSVal(ME, LCtx); |
572 | else if (const auto *DRE = dyn_cast<DeclRefExpr>(Val: OVESrc)) |
573 | Base = State->getLValue(VD: cast<VarDecl>(Val: DRE->getDecl()), LC: LCtx); |
574 | else |
575 | llvm_unreachable("ArrayInitLoopExpr contains unexpected source expression" ); |
576 | |
577 | SVal NthElem = State->getLValue(CE->getType(), Idx, Base); |
578 | |
579 | return State->BindExpr(CE->getArg(Arg: 0), LCtx, NthElem); |
580 | } |
581 | |
582 | void ExprEngine::handleConstructor(const Expr *E, |
583 | ExplodedNode *Pred, |
584 | ExplodedNodeSet &destNodes) { |
585 | const auto *CE = dyn_cast<CXXConstructExpr>(Val: E); |
586 | const auto *CIE = dyn_cast<CXXInheritedCtorInitExpr>(Val: E); |
587 | assert(CE || CIE); |
588 | |
589 | const LocationContext *LCtx = Pred->getLocationContext(); |
590 | ProgramStateRef State = Pred->getState(); |
591 | |
592 | SVal Target = UnknownVal(); |
593 | |
594 | if (CE) { |
595 | if (std::optional<SVal> ElidedTarget = |
596 | getObjectUnderConstruction(State, Item: CE, LC: LCtx)) { |
597 | // We've previously modeled an elidable constructor by pretending that |
598 | // it in fact constructs into the correct target. This constructor can |
599 | // therefore be skipped. |
600 | Target = *ElidedTarget; |
601 | StmtNodeBuilder Bldr(Pred, destNodes, *currBldrCtx); |
602 | State = finishObjectConstruction(State, Item: CE, LC: LCtx); |
603 | if (auto L = Target.getAs<Loc>()) |
604 | State = State->BindExpr(S: CE, LCtx, V: State->getSVal(*L, CE->getType())); |
605 | Bldr.generateNode(CE, Pred, State); |
606 | return; |
607 | } |
608 | } |
609 | |
610 | EvalCallOptions CallOpts; |
611 | auto C = getCurrentCFGElement().getAs<CFGConstructor>(); |
612 | assert(C || getCurrentCFGElement().getAs<CFGStmt>()); |
613 | const ConstructionContext *CC = C ? C->getConstructionContext() : nullptr; |
614 | |
615 | const CXXConstructionKind CK = |
616 | CE ? CE->getConstructionKind() : CIE->getConstructionKind(); |
617 | switch (CK) { |
618 | case CXXConstructionKind::Complete: { |
619 | // Inherited constructors are always base class constructors. |
620 | assert(CE && !CIE && "A complete constructor is inherited?!" ); |
621 | |
622 | // If the ctor is part of an ArrayInitLoopExpr, we want to handle it |
623 | // differently. |
624 | auto *AILE = CC ? CC->getArrayInitLoop() : nullptr; |
625 | |
626 | unsigned Idx = 0; |
627 | if (CE->getType()->isArrayType() || AILE) { |
628 | |
629 | auto isZeroSizeArray = [&] { |
630 | uint64_t Size = 1; |
631 | |
632 | if (const auto *CAT = dyn_cast<ConstantArrayType>(CE->getType())) |
633 | Size = getContext().getConstantArrayElementCount(CA: CAT); |
634 | else if (AILE) |
635 | Size = getContext().getArrayInitLoopExprElementCount(AILE); |
636 | |
637 | return Size == 0; |
638 | }; |
639 | |
640 | // No element construction will happen in a 0 size array. |
641 | if (isZeroSizeArray()) { |
642 | StmtNodeBuilder Bldr(Pred, destNodes, *currBldrCtx); |
643 | static SimpleProgramPointTag T{"ExprEngine" , |
644 | "Skipping 0 size array construction" }; |
645 | Bldr.generateNode(CE, Pred, State, &T); |
646 | return; |
647 | } |
648 | |
649 | Idx = getIndexOfElementToConstruct(State, E: CE, LCtx).value_or(u: 0u); |
650 | State = setIndexOfElementToConstruct(State, E: CE, LCtx, Idx: Idx + 1); |
651 | } |
652 | |
653 | if (AILE) { |
654 | // Only set this once even though we loop through it multiple times. |
655 | if (!getPendingInitLoop(State, E: CE, LCtx)) |
656 | State = setPendingInitLoop( |
657 | State, E: CE, LCtx, |
658 | Idx: getContext().getArrayInitLoopExprElementCount(AILE)); |
659 | |
660 | State = bindRequiredArrayElementToEnvironment( |
661 | State, AILE, LCtx, Idx: svalBuilder.makeArrayIndex(idx: Idx)); |
662 | } |
663 | |
664 | // The target region is found from construction context. |
665 | std::tie(args&: State, args&: Target) = handleConstructionContext( |
666 | CE, State, currBldrCtx, LCtx, CC, CallOpts, Idx); |
667 | break; |
668 | } |
669 | case CXXConstructionKind::VirtualBase: { |
670 | // Make sure we are not calling virtual base class initializers twice. |
671 | // Only the most-derived object should initialize virtual base classes. |
672 | const auto *OuterCtor = dyn_cast_or_null<CXXConstructExpr>( |
673 | Val: LCtx->getStackFrame()->getCallSite()); |
674 | assert( |
675 | (!OuterCtor || |
676 | OuterCtor->getConstructionKind() == CXXConstructionKind::Complete || |
677 | OuterCtor->getConstructionKind() == CXXConstructionKind::Delegating) && |
678 | ("This virtual base should have already been initialized by " |
679 | "the most derived class!" )); |
680 | (void)OuterCtor; |
681 | [[fallthrough]]; |
682 | } |
683 | case CXXConstructionKind::NonVirtualBase: |
684 | // In C++17, classes with non-virtual bases may be aggregates, so they would |
685 | // be initialized as aggregates without a constructor call, so we may have |
686 | // a base class constructed directly into an initializer list without |
687 | // having the derived-class constructor call on the previous stack frame. |
688 | // Initializer lists may be nested into more initializer lists that |
689 | // correspond to surrounding aggregate initializations. |
690 | // FIXME: For now this code essentially bails out. We need to find the |
691 | // correct target region and set it. |
692 | // FIXME: Instead of relying on the ParentMap, we should have the |
693 | // trigger-statement (InitListExpr in this case) passed down from CFG or |
694 | // otherwise always available during construction. |
695 | if (isa_and_nonnull<InitListExpr>(LCtx->getParentMap().getParent(E))) { |
696 | MemRegionManager &MRMgr = getSValBuilder().getRegionManager(); |
697 | Target = loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(Ex: E, LC: LCtx)); |
698 | CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true; |
699 | break; |
700 | } |
701 | [[fallthrough]]; |
702 | case CXXConstructionKind::Delegating: { |
703 | const CXXMethodDecl *CurCtor = cast<CXXMethodDecl>(Val: LCtx->getDecl()); |
704 | Loc ThisPtr = getSValBuilder().getCXXThis(D: CurCtor, |
705 | SFC: LCtx->getStackFrame()); |
706 | SVal ThisVal = State->getSVal(LV: ThisPtr); |
707 | |
708 | if (CK == CXXConstructionKind::Delegating) { |
709 | Target = ThisVal; |
710 | } else { |
711 | // Cast to the base type. |
712 | bool IsVirtual = (CK == CXXConstructionKind::VirtualBase); |
713 | SVal BaseVal = |
714 | getStoreManager().evalDerivedToBase(Derived: ThisVal, DerivedPtrType: E->getType(), IsVirtual); |
715 | Target = BaseVal; |
716 | } |
717 | break; |
718 | } |
719 | } |
720 | |
721 | if (State != Pred->getState()) { |
722 | static SimpleProgramPointTag T("ExprEngine" , |
723 | "Prepare for object construction" ); |
724 | ExplodedNodeSet DstPrepare; |
725 | StmtNodeBuilder BldrPrepare(Pred, DstPrepare, *currBldrCtx); |
726 | BldrPrepare.generateNode(E, Pred, State, &T, ProgramPoint::PreStmtKind); |
727 | assert(DstPrepare.size() <= 1); |
728 | if (DstPrepare.size() == 0) |
729 | return; |
730 | Pred = *BldrPrepare.begin(); |
731 | } |
732 | |
733 | const MemRegion *TargetRegion = Target.getAsRegion(); |
734 | CallEventManager &CEMgr = getStateManager().getCallEventManager(); |
735 | CallEventRef<> Call = |
736 | CIE ? (CallEventRef<>)CEMgr.getCXXInheritedConstructorCall( |
737 | E: CIE, Target: TargetRegion, State, LCtx, ElemRef: getCFGElementRef()) |
738 | : (CallEventRef<>)CEMgr.getCXXConstructorCall( |
739 | E: CE, Target: TargetRegion, State, LCtx, ElemRef: getCFGElementRef()); |
740 | |
741 | ExplodedNodeSet DstPreVisit; |
742 | getCheckerManager().runCheckersForPreStmt(DstPreVisit, Pred, E, *this); |
743 | |
744 | ExplodedNodeSet PreInitialized; |
745 | if (CE) { |
746 | // FIXME: Is it possible and/or useful to do this before PreStmt? |
747 | StmtNodeBuilder Bldr(DstPreVisit, PreInitialized, *currBldrCtx); |
748 | for (ExplodedNode *N : DstPreVisit) { |
749 | ProgramStateRef State = N->getState(); |
750 | if (CE->requiresZeroInitialization()) { |
751 | // FIXME: Once we properly handle constructors in new-expressions, we'll |
752 | // need to invalidate the region before setting a default value, to make |
753 | // sure there aren't any lingering bindings around. This probably needs |
754 | // to happen regardless of whether or not the object is zero-initialized |
755 | // to handle random fields of a placement-initialized object picking up |
756 | // old bindings. We might only want to do it when we need to, though. |
757 | // FIXME: This isn't actually correct for arrays -- we need to zero- |
758 | // initialize the entire array, not just the first element -- but our |
759 | // handling of arrays everywhere else is weak as well, so this shouldn't |
760 | // actually make things worse. Placement new makes this tricky as well, |
761 | // since it's then possible to be initializing one part of a multi- |
762 | // dimensional array. |
763 | State = State->bindDefaultZero(loc: Target, LCtx); |
764 | } |
765 | |
766 | Bldr.generateNode(CE, N, State, /*tag=*/nullptr, |
767 | ProgramPoint::PreStmtKind); |
768 | } |
769 | } else { |
770 | PreInitialized = DstPreVisit; |
771 | } |
772 | |
773 | ExplodedNodeSet DstPreCall; |
774 | getCheckerManager().runCheckersForPreCall(Dst&: DstPreCall, Src: PreInitialized, |
775 | Call: *Call, Eng&: *this); |
776 | |
777 | ExplodedNodeSet DstEvaluated; |
778 | |
779 | if (CE && CE->getConstructor()->isTrivial() && |
780 | CE->getConstructor()->isCopyOrMoveConstructor() && |
781 | !CallOpts.IsArrayCtorOrDtor) { |
782 | StmtNodeBuilder Bldr(DstPreCall, DstEvaluated, *currBldrCtx); |
783 | // FIXME: Handle other kinds of trivial constructors as well. |
784 | for (ExplodedNode *N : DstPreCall) |
785 | performTrivialCopy(Bldr, Pred: N, Call: *Call); |
786 | |
787 | } else { |
788 | for (ExplodedNode *N : DstPreCall) |
789 | getCheckerManager().runCheckersForEvalCall(Dst&: DstEvaluated, Src: N, CE: *Call, Eng&: *this, |
790 | CallOpts); |
791 | } |
792 | |
793 | // If the CFG was constructed without elements for temporary destructors |
794 | // and the just-called constructor created a temporary object then |
795 | // stop exploration if the temporary object has a noreturn constructor. |
796 | // This can lose coverage because the destructor, if it were present |
797 | // in the CFG, would be called at the end of the full expression or |
798 | // later (for life-time extended temporaries) -- but avoids infeasible |
799 | // paths when no-return temporary destructors are used for assertions. |
800 | ExplodedNodeSet DstEvaluatedPostProcessed; |
801 | StmtNodeBuilder Bldr(DstEvaluated, DstEvaluatedPostProcessed, *currBldrCtx); |
802 | const AnalysisDeclContext *ADC = LCtx->getAnalysisDeclContext(); |
803 | if (!ADC->getCFGBuildOptions().AddTemporaryDtors) { |
804 | if (llvm::isa_and_nonnull<CXXTempObjectRegion, |
805 | CXXLifetimeExtendedObjectRegion>(Val: TargetRegion) && |
806 | cast<CXXConstructorDecl>(Val: Call->getDecl()) |
807 | ->getParent() |
808 | ->isAnyDestructorNoReturn()) { |
809 | |
810 | // If we've inlined the constructor, then DstEvaluated would be empty. |
811 | // In this case we still want a sink, which could be implemented |
812 | // in processCallExit. But we don't have that implemented at the moment, |
813 | // so if you hit this assertion, see if you can avoid inlining |
814 | // the respective constructor when analyzer-config cfg-temporary-dtors |
815 | // is set to false. |
816 | // Otherwise there's nothing wrong with inlining such constructor. |
817 | assert(!DstEvaluated.empty() && |
818 | "We should not have inlined this constructor!" ); |
819 | |
820 | for (ExplodedNode *N : DstEvaluated) { |
821 | Bldr.generateSink(E, N, N->getState()); |
822 | } |
823 | |
824 | // There is no need to run the PostCall and PostStmt checker |
825 | // callbacks because we just generated sinks on all nodes in th |
826 | // frontier. |
827 | return; |
828 | } |
829 | } |
830 | |
831 | ExplodedNodeSet DstPostArgumentCleanup; |
832 | for (ExplodedNode *I : DstEvaluatedPostProcessed) |
833 | finishArgumentConstruction(Dst&: DstPostArgumentCleanup, Pred: I, Call: *Call); |
834 | |
835 | // If there were other constructors called for object-type arguments |
836 | // of this constructor, clean them up. |
837 | ExplodedNodeSet DstPostCall; |
838 | getCheckerManager().runCheckersForPostCall(Dst&: DstPostCall, |
839 | Src: DstPostArgumentCleanup, |
840 | Call: *Call, Eng&: *this); |
841 | getCheckerManager().runCheckersForPostStmt(destNodes, DstPostCall, E, *this); |
842 | } |
843 | |
844 | void ExprEngine::VisitCXXConstructExpr(const CXXConstructExpr *CE, |
845 | ExplodedNode *Pred, |
846 | ExplodedNodeSet &Dst) { |
847 | handleConstructor(CE, Pred, Dst); |
848 | } |
849 | |
850 | void ExprEngine::VisitCXXInheritedCtorInitExpr( |
851 | const CXXInheritedCtorInitExpr *CE, ExplodedNode *Pred, |
852 | ExplodedNodeSet &Dst) { |
853 | handleConstructor(CE, Pred, Dst); |
854 | } |
855 | |
856 | void ExprEngine::VisitCXXDestructor(QualType ObjectType, |
857 | const MemRegion *Dest, |
858 | const Stmt *S, |
859 | bool IsBaseDtor, |
860 | ExplodedNode *Pred, |
861 | ExplodedNodeSet &Dst, |
862 | EvalCallOptions &CallOpts) { |
863 | assert(S && "A destructor without a trigger!" ); |
864 | const LocationContext *LCtx = Pred->getLocationContext(); |
865 | ProgramStateRef State = Pred->getState(); |
866 | |
867 | const CXXRecordDecl *RecordDecl = ObjectType->getAsCXXRecordDecl(); |
868 | assert(RecordDecl && "Only CXXRecordDecls should have destructors" ); |
869 | const CXXDestructorDecl *DtorDecl = RecordDecl->getDestructor(); |
870 | // FIXME: There should always be a Decl, otherwise the destructor call |
871 | // shouldn't have been added to the CFG in the first place. |
872 | if (!DtorDecl) { |
873 | // Skip the invalid destructor. We cannot simply return because |
874 | // it would interrupt the analysis instead. |
875 | static SimpleProgramPointTag T("ExprEngine" , "SkipInvalidDestructor" ); |
876 | // FIXME: PostImplicitCall with a null decl may crash elsewhere anyway. |
877 | PostImplicitCall PP(/*Decl=*/nullptr, S->getEndLoc(), LCtx, |
878 | getCFGElementRef(), &T); |
879 | NodeBuilder Bldr(Pred, Dst, *currBldrCtx); |
880 | Bldr.generateNode(PP, State: Pred->getState(), Pred); |
881 | return; |
882 | } |
883 | |
884 | if (!Dest) { |
885 | // We're trying to destroy something that is not a region. This may happen |
886 | // for a variety of reasons (unknown target region, concrete integer instead |
887 | // of target region, etc.). The current code makes an attempt to recover. |
888 | // FIXME: We probably don't really need to recover when we're dealing |
889 | // with concrete integers specifically. |
890 | CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true; |
891 | if (const Expr *E = dyn_cast_or_null<Expr>(Val: S)) { |
892 | Dest = MRMgr.getCXXTempObjectRegion(Ex: E, LC: Pred->getLocationContext()); |
893 | } else { |
894 | static SimpleProgramPointTag T("ExprEngine" , "SkipInvalidDestructor" ); |
895 | NodeBuilder Bldr(Pred, Dst, *currBldrCtx); |
896 | Bldr.generateSink(PP: Pred->getLocation().withTag(tag: &T), |
897 | State: Pred->getState(), Pred); |
898 | return; |
899 | } |
900 | } |
901 | |
902 | CallEventManager &CEMgr = getStateManager().getCallEventManager(); |
903 | CallEventRef<CXXDestructorCall> Call = CEMgr.getCXXDestructorCall( |
904 | DD: DtorDecl, Trigger: S, Target: Dest, IsBase: IsBaseDtor, State, LCtx, ElemRef: getCFGElementRef()); |
905 | |
906 | PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), |
907 | Call->getSourceRange().getBegin(), |
908 | "Error evaluating destructor" ); |
909 | |
910 | ExplodedNodeSet DstPreCall; |
911 | getCheckerManager().runCheckersForPreCall(Dst&: DstPreCall, Src: Pred, |
912 | Call: *Call, Eng&: *this); |
913 | |
914 | ExplodedNodeSet DstInvalidated; |
915 | StmtNodeBuilder Bldr(DstPreCall, DstInvalidated, *currBldrCtx); |
916 | for (ExplodedNode *N : DstPreCall) |
917 | defaultEvalCall(B&: Bldr, Pred: N, Call: *Call, CallOpts); |
918 | |
919 | getCheckerManager().runCheckersForPostCall(Dst, Src: DstInvalidated, |
920 | Call: *Call, Eng&: *this); |
921 | } |
922 | |
923 | void ExprEngine::VisitCXXNewAllocatorCall(const CXXNewExpr *CNE, |
924 | ExplodedNode *Pred, |
925 | ExplodedNodeSet &Dst) { |
926 | ProgramStateRef State = Pred->getState(); |
927 | const LocationContext *LCtx = Pred->getLocationContext(); |
928 | PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), |
929 | CNE->getBeginLoc(), |
930 | "Error evaluating New Allocator Call" ); |
931 | CallEventManager &CEMgr = getStateManager().getCallEventManager(); |
932 | CallEventRef<CXXAllocatorCall> Call = |
933 | CEMgr.getCXXAllocatorCall(E: CNE, State, LCtx, ElemRef: getCFGElementRef()); |
934 | |
935 | ExplodedNodeSet DstPreCall; |
936 | getCheckerManager().runCheckersForPreCall(Dst&: DstPreCall, Src: Pred, |
937 | Call: *Call, Eng&: *this); |
938 | |
939 | ExplodedNodeSet DstPostCall; |
940 | StmtNodeBuilder CallBldr(DstPreCall, DstPostCall, *currBldrCtx); |
941 | for (ExplodedNode *I : DstPreCall) { |
942 | // FIXME: Provide evalCall for checkers? |
943 | defaultEvalCall(B&: CallBldr, Pred: I, Call: *Call); |
944 | } |
945 | // If the call is inlined, DstPostCall will be empty and we bail out now. |
946 | |
947 | // Store return value of operator new() for future use, until the actual |
948 | // CXXNewExpr gets processed. |
949 | ExplodedNodeSet DstPostValue; |
950 | StmtNodeBuilder ValueBldr(DstPostCall, DstPostValue, *currBldrCtx); |
951 | for (ExplodedNode *I : DstPostCall) { |
952 | // FIXME: Because CNE serves as the "call site" for the allocator (due to |
953 | // lack of a better expression in the AST), the conjured return value symbol |
954 | // is going to be of the same type (C++ object pointer type). Technically |
955 | // this is not correct because the operator new's prototype always says that |
956 | // it returns a 'void *'. So we should change the type of the symbol, |
957 | // and then evaluate the cast over the symbolic pointer from 'void *' to |
958 | // the object pointer type. But without changing the symbol's type it |
959 | // is breaking too much to evaluate the no-op symbolic cast over it, so we |
960 | // skip it for now. |
961 | ProgramStateRef State = I->getState(); |
962 | SVal RetVal = State->getSVal(CNE, LCtx); |
963 | // [basic.stc.dynamic.allocation] (on the return value of an allocation |
964 | // function): |
965 | // "The order, contiguity, and initial value of storage allocated by |
966 | // successive calls to an allocation function are unspecified." |
967 | State = State->bindDefaultInitial(loc: RetVal, V: UndefinedVal{}, LCtx); |
968 | |
969 | // If this allocation function is not declared as non-throwing, failures |
970 | // /must/ be signalled by exceptions, and thus the return value will never |
971 | // be NULL. -fno-exceptions does not influence this semantics. |
972 | // FIXME: GCC has a -fcheck-new option, which forces it to consider the case |
973 | // where new can return NULL. If we end up supporting that option, we can |
974 | // consider adding a check for it here. |
975 | // C++11 [basic.stc.dynamic.allocation]p3. |
976 | if (const FunctionDecl *FD = CNE->getOperatorNew()) { |
977 | QualType Ty = FD->getType(); |
978 | if (const auto *ProtoType = Ty->getAs<FunctionProtoType>()) |
979 | if (!ProtoType->isNothrow()) |
980 | State = State->assume(Cond: RetVal.castAs<DefinedOrUnknownSVal>(), Assumption: true); |
981 | } |
982 | |
983 | ValueBldr.generateNode( |
984 | CNE, I, addObjectUnderConstruction(State, Item: CNE, LC: LCtx, V: RetVal)); |
985 | } |
986 | |
987 | ExplodedNodeSet DstPostPostCallCallback; |
988 | getCheckerManager().runCheckersForPostCall(Dst&: DstPostPostCallCallback, |
989 | Src: DstPostValue, Call: *Call, Eng&: *this); |
990 | for (ExplodedNode *I : DstPostPostCallCallback) { |
991 | getCheckerManager().runCheckersForNewAllocator(Call: *Call, Dst, Pred: I, Eng&: *this); |
992 | } |
993 | } |
994 | |
995 | void ExprEngine::VisitCXXNewExpr(const CXXNewExpr *CNE, ExplodedNode *Pred, |
996 | ExplodedNodeSet &Dst) { |
997 | // FIXME: Much of this should eventually migrate to CXXAllocatorCall. |
998 | // Also, we need to decide how allocators actually work -- they're not |
999 | // really part of the CXXNewExpr because they happen BEFORE the |
1000 | // CXXConstructExpr subexpression. See PR12014 for some discussion. |
1001 | |
1002 | unsigned blockCount = currBldrCtx->blockCount(); |
1003 | const LocationContext *LCtx = Pred->getLocationContext(); |
1004 | SVal symVal = UnknownVal(); |
1005 | FunctionDecl *FD = CNE->getOperatorNew(); |
1006 | |
1007 | bool IsStandardGlobalOpNewFunction = |
1008 | FD->isReplaceableGlobalAllocationFunction(); |
1009 | |
1010 | ProgramStateRef State = Pred->getState(); |
1011 | |
1012 | // Retrieve the stored operator new() return value. |
1013 | if (AMgr.getAnalyzerOptions().MayInlineCXXAllocator) { |
1014 | symVal = *getObjectUnderConstruction(State, Item: CNE, LC: LCtx); |
1015 | State = finishObjectConstruction(State, Item: CNE, LC: LCtx); |
1016 | } |
1017 | |
1018 | // We assume all standard global 'operator new' functions allocate memory in |
1019 | // heap. We realize this is an approximation that might not correctly model |
1020 | // a custom global allocator. |
1021 | if (symVal.isUnknown()) { |
1022 | if (IsStandardGlobalOpNewFunction) |
1023 | symVal = svalBuilder.getConjuredHeapSymbolVal(CNE, LCtx, blockCount); |
1024 | else |
1025 | symVal = svalBuilder.conjureSymbolVal(nullptr, CNE, LCtx, CNE->getType(), |
1026 | blockCount); |
1027 | } |
1028 | |
1029 | CallEventManager &CEMgr = getStateManager().getCallEventManager(); |
1030 | CallEventRef<CXXAllocatorCall> Call = |
1031 | CEMgr.getCXXAllocatorCall(E: CNE, State, LCtx, ElemRef: getCFGElementRef()); |
1032 | |
1033 | if (!AMgr.getAnalyzerOptions().MayInlineCXXAllocator) { |
1034 | // Invalidate placement args. |
1035 | // FIXME: Once we figure out how we want allocators to work, |
1036 | // we should be using the usual pre-/(default-)eval-/post-call checkers |
1037 | // here. |
1038 | State = Call->invalidateRegions(BlockCount: blockCount); |
1039 | if (!State) |
1040 | return; |
1041 | |
1042 | // If this allocation function is not declared as non-throwing, failures |
1043 | // /must/ be signalled by exceptions, and thus the return value will never |
1044 | // be NULL. -fno-exceptions does not influence this semantics. |
1045 | // FIXME: GCC has a -fcheck-new option, which forces it to consider the case |
1046 | // where new can return NULL. If we end up supporting that option, we can |
1047 | // consider adding a check for it here. |
1048 | // C++11 [basic.stc.dynamic.allocation]p3. |
1049 | if (const auto *ProtoType = FD->getType()->getAs<FunctionProtoType>()) |
1050 | if (!ProtoType->isNothrow()) |
1051 | if (auto dSymVal = symVal.getAs<DefinedOrUnknownSVal>()) |
1052 | State = State->assume(Cond: *dSymVal, Assumption: true); |
1053 | } |
1054 | |
1055 | StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); |
1056 | |
1057 | SVal Result = symVal; |
1058 | |
1059 | if (CNE->isArray()) { |
1060 | |
1061 | if (const auto *NewReg = cast_or_null<SubRegion>(Val: symVal.getAsRegion())) { |
1062 | // If each element is initialized by their default constructor, the field |
1063 | // values are properly placed inside the required region, however if an |
1064 | // initializer list is used, this doesn't happen automatically. |
1065 | auto *Init = CNE->getInitializer(); |
1066 | bool isInitList = isa_and_nonnull<InitListExpr>(Val: Init); |
1067 | |
1068 | QualType ObjTy = |
1069 | isInitList ? Init->getType() : CNE->getType()->getPointeeType(); |
1070 | const ElementRegion *EleReg = |
1071 | MRMgr.getElementRegion(elementType: ObjTy, Idx: svalBuilder.makeArrayIndex(idx: 0), superRegion: NewReg, |
1072 | Ctx&: svalBuilder.getContext()); |
1073 | Result = loc::MemRegionVal(EleReg); |
1074 | |
1075 | // If the array is list initialized, we bind the initializer list to the |
1076 | // memory region here, otherwise we would lose it. |
1077 | if (isInitList) { |
1078 | Bldr.takeNodes(N: Pred); |
1079 | Pred = Bldr.generateNode(CNE, Pred, State); |
1080 | |
1081 | SVal V = State->getSVal(Init, LCtx); |
1082 | ExplodedNodeSet evaluated; |
1083 | evalBind(evaluated, CNE, Pred, Result, V, true); |
1084 | |
1085 | Bldr.takeNodes(N: Pred); |
1086 | Bldr.addNodes(S: evaluated); |
1087 | |
1088 | Pred = *evaluated.begin(); |
1089 | State = Pred->getState(); |
1090 | } |
1091 | } |
1092 | |
1093 | State = State->BindExpr(CNE, Pred->getLocationContext(), Result); |
1094 | Bldr.generateNode(CNE, Pred, State); |
1095 | return; |
1096 | } |
1097 | |
1098 | // FIXME: Once we have proper support for CXXConstructExprs inside |
1099 | // CXXNewExpr, we need to make sure that the constructed object is not |
1100 | // immediately invalidated here. (The placement call should happen before |
1101 | // the constructor call anyway.) |
1102 | if (FD->isReservedGlobalPlacementOperator()) { |
1103 | // Non-array placement new should always return the placement location. |
1104 | SVal PlacementLoc = State->getSVal(CNE->getPlacementArg(I: 0), LCtx); |
1105 | Result = svalBuilder.evalCast(V: PlacementLoc, CastTy: CNE->getType(), |
1106 | OriginalTy: CNE->getPlacementArg(I: 0)->getType()); |
1107 | } |
1108 | |
1109 | // Bind the address of the object, then check to see if we cached out. |
1110 | State = State->BindExpr(CNE, LCtx, Result); |
1111 | ExplodedNode *NewN = Bldr.generateNode(CNE, Pred, State); |
1112 | if (!NewN) |
1113 | return; |
1114 | |
1115 | // If the type is not a record, we won't have a CXXConstructExpr as an |
1116 | // initializer. Copy the value over. |
1117 | if (const Expr *Init = CNE->getInitializer()) { |
1118 | if (!isa<CXXConstructExpr>(Val: Init)) { |
1119 | assert(Bldr.getResults().size() == 1); |
1120 | Bldr.takeNodes(N: NewN); |
1121 | evalBind(Dst, CNE, NewN, Result, State->getSVal(Init, LCtx), |
1122 | /*FirstInit=*/IsStandardGlobalOpNewFunction); |
1123 | } |
1124 | } |
1125 | } |
1126 | |
1127 | void ExprEngine::VisitCXXDeleteExpr(const CXXDeleteExpr *CDE, |
1128 | ExplodedNode *Pred, ExplodedNodeSet &Dst) { |
1129 | |
1130 | CallEventManager &CEMgr = getStateManager().getCallEventManager(); |
1131 | CallEventRef<CXXDeallocatorCall> Call = CEMgr.getCXXDeallocatorCall( |
1132 | E: CDE, State: Pred->getState(), LCtx: Pred->getLocationContext(), ElemRef: getCFGElementRef()); |
1133 | |
1134 | ExplodedNodeSet DstPreCall; |
1135 | getCheckerManager().runCheckersForPreCall(Dst&: DstPreCall, Src: Pred, Call: *Call, Eng&: *this); |
1136 | ExplodedNodeSet DstPostCall; |
1137 | |
1138 | if (AMgr.getAnalyzerOptions().MayInlineCXXAllocator) { |
1139 | StmtNodeBuilder Bldr(DstPreCall, DstPostCall, *currBldrCtx); |
1140 | for (ExplodedNode *I : DstPreCall) { |
1141 | defaultEvalCall(B&: Bldr, Pred: I, Call: *Call); |
1142 | } |
1143 | } else { |
1144 | DstPostCall = DstPreCall; |
1145 | } |
1146 | getCheckerManager().runCheckersForPostCall(Dst, Src: DstPostCall, Call: *Call, Eng&: *this); |
1147 | } |
1148 | |
1149 | void ExprEngine::VisitCXXCatchStmt(const CXXCatchStmt *CS, ExplodedNode *Pred, |
1150 | ExplodedNodeSet &Dst) { |
1151 | const VarDecl *VD = CS->getExceptionDecl(); |
1152 | if (!VD) { |
1153 | Dst.Add(N: Pred); |
1154 | return; |
1155 | } |
1156 | |
1157 | const LocationContext *LCtx = Pred->getLocationContext(); |
1158 | SVal V = svalBuilder.conjureSymbolVal(CS, LCtx, VD->getType(), |
1159 | currBldrCtx->blockCount()); |
1160 | ProgramStateRef state = Pred->getState(); |
1161 | state = state->bindLoc(location: state->getLValue(VD, LC: LCtx), V, LCtx); |
1162 | |
1163 | StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); |
1164 | Bldr.generateNode(S: CS, Pred, St: state); |
1165 | } |
1166 | |
1167 | void ExprEngine::VisitCXXThisExpr(const CXXThisExpr *TE, ExplodedNode *Pred, |
1168 | ExplodedNodeSet &Dst) { |
1169 | StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); |
1170 | |
1171 | // Get the this object region from StoreManager. |
1172 | const LocationContext *LCtx = Pred->getLocationContext(); |
1173 | const MemRegion *R = |
1174 | svalBuilder.getRegionManager().getCXXThisRegion( |
1175 | thisPointerTy: getContext().getCanonicalType(TE->getType()), |
1176 | LC: LCtx); |
1177 | |
1178 | ProgramStateRef state = Pred->getState(); |
1179 | SVal V = state->getSVal(LV: loc::MemRegionVal(R)); |
1180 | Bldr.generateNode(TE, Pred, state->BindExpr(TE, LCtx, V)); |
1181 | } |
1182 | |
1183 | void ExprEngine::VisitLambdaExpr(const LambdaExpr *LE, ExplodedNode *Pred, |
1184 | ExplodedNodeSet &Dst) { |
1185 | const LocationContext *LocCtxt = Pred->getLocationContext(); |
1186 | |
1187 | // Get the region of the lambda itself. |
1188 | const MemRegion *R = svalBuilder.getRegionManager().getCXXTempObjectRegion( |
1189 | LE, LocCtxt); |
1190 | SVal V = loc::MemRegionVal(R); |
1191 | |
1192 | ProgramStateRef State = Pred->getState(); |
1193 | |
1194 | // If we created a new MemRegion for the lambda, we should explicitly bind |
1195 | // the captures. |
1196 | for (auto const [Idx, FieldForCapture, InitExpr] : |
1197 | llvm::zip(llvm::seq<unsigned>(0, -1), LE->getLambdaClass()->fields(), |
1198 | LE->capture_inits())) { |
1199 | SVal FieldLoc = State->getLValue(FieldForCapture, V); |
1200 | |
1201 | SVal InitVal; |
1202 | if (!FieldForCapture->hasCapturedVLAType()) { |
1203 | assert(InitExpr && "Capture missing initialization expression" ); |
1204 | |
1205 | // Capturing a 0 length array is a no-op, so we ignore it to get a more |
1206 | // accurate analysis. If it's not ignored, it would set the default |
1207 | // binding of the lambda to 'Unknown', which can lead to falsely detecting |
1208 | // 'Uninitialized' values as 'Unknown' and not reporting a warning. |
1209 | const auto FTy = FieldForCapture->getType(); |
1210 | if (FTy->isConstantArrayType() && |
1211 | getContext().getConstantArrayElementCount( |
1212 | getContext().getAsConstantArrayType(FTy)) == 0) |
1213 | continue; |
1214 | |
1215 | // With C++17 copy elision the InitExpr can be anything, so instead of |
1216 | // pattern matching all cases, we simple check if the current field is |
1217 | // under construction or not, regardless what it's InitExpr is. |
1218 | if (const auto OUC = |
1219 | getObjectUnderConstruction(State, {LE, Idx}, LocCtxt)) { |
1220 | InitVal = State->getSVal(OUC->getAsRegion()); |
1221 | |
1222 | State = finishObjectConstruction(State, {LE, Idx}, LocCtxt); |
1223 | } else |
1224 | InitVal = State->getSVal(InitExpr, LocCtxt); |
1225 | |
1226 | } else { |
1227 | |
1228 | assert(!getObjectUnderConstruction(State, {LE, Idx}, LocCtxt) && |
1229 | "VLA capture by value is a compile time error!" ); |
1230 | |
1231 | // The field stores the length of a captured variable-length array. |
1232 | // These captures don't have initialization expressions; instead we |
1233 | // get the length from the VLAType size expression. |
1234 | Expr *SizeExpr = FieldForCapture->getCapturedVLAType()->getSizeExpr(); |
1235 | InitVal = State->getSVal(SizeExpr, LocCtxt); |
1236 | } |
1237 | |
1238 | State = State->bindLoc(FieldLoc, InitVal, LocCtxt); |
1239 | } |
1240 | |
1241 | // Decay the Loc into an RValue, because there might be a |
1242 | // MaterializeTemporaryExpr node above this one which expects the bound value |
1243 | // to be an RValue. |
1244 | SVal LambdaRVal = State->getSVal(R); |
1245 | |
1246 | ExplodedNodeSet Tmp; |
1247 | StmtNodeBuilder Bldr(Pred, Tmp, *currBldrCtx); |
1248 | // FIXME: is this the right program point kind? |
1249 | Bldr.generateNode(LE, Pred, |
1250 | State->BindExpr(LE, LocCtxt, LambdaRVal), |
1251 | nullptr, ProgramPoint::PostLValueKind); |
1252 | |
1253 | // FIXME: Move all post/pre visits to ::Visit(). |
1254 | getCheckerManager().runCheckersForPostStmt(Dst, Tmp, LE, *this); |
1255 | } |
1256 | |