1//===- CoreEngine.cpp - Path-Sensitive Dataflow Engine --------------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file defines a generic engine for intraprocedural, path-sensitive,
10// dataflow analysis via graph reachability engine.
11//
12//===----------------------------------------------------------------------===//
13
14#include "clang/StaticAnalyzer/Core/PathSensitive/CoreEngine.h"
15#include "clang/AST/Expr.h"
16#include "clang/AST/ExprCXX.h"
17#include "clang/AST/Stmt.h"
18#include "clang/AST/StmtCXX.h"
19#include "clang/Analysis/AnalysisDeclContext.h"
20#include "clang/Analysis/CFG.h"
21#include "clang/Analysis/ProgramPoint.h"
22#include "clang/Basic/LLVM.h"
23#include "clang/StaticAnalyzer/Core/AnalyzerOptions.h"
24#include "clang/StaticAnalyzer/Core/PathSensitive/BlockCounter.h"
25#include "clang/StaticAnalyzer/Core/PathSensitive/ExplodedGraph.h"
26#include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
27#include "clang/StaticAnalyzer/Core/PathSensitive/FunctionSummary.h"
28#include "clang/StaticAnalyzer/Core/PathSensitive/WorkList.h"
29#include "llvm/ADT/STLExtras.h"
30#include "llvm/ADT/Statistic.h"
31#include "llvm/Support/Casting.h"
32#include "llvm/Support/ErrorHandling.h"
33#include <algorithm>
34#include <cassert>
35#include <memory>
36#include <optional>
37#include <utility>
38
39using namespace clang;
40using namespace ento;
41
42#define DEBUG_TYPE "CoreEngine"
43
44STATISTIC(NumSteps,
45 "The # of steps executed.");
46STATISTIC(NumSTUSteps, "The # of STU steps executed.");
47STATISTIC(NumCTUSteps, "The # of CTU steps executed.");
48STATISTIC(NumReachedMaxSteps,
49 "The # of times we reached the max number of steps.");
50STATISTIC(NumPathsExplored,
51 "The # of paths explored by the analyzer.");
52
53//===----------------------------------------------------------------------===//
54// Core analysis engine.
55//===----------------------------------------------------------------------===//
56
57static std::unique_ptr<WorkList> generateWorkList(AnalyzerOptions &Opts) {
58 switch (Opts.getExplorationStrategy()) {
59 case ExplorationStrategyKind::DFS:
60 return WorkList::makeDFS();
61 case ExplorationStrategyKind::BFS:
62 return WorkList::makeBFS();
63 case ExplorationStrategyKind::BFSBlockDFSContents:
64 return WorkList::makeBFSBlockDFSContents();
65 case ExplorationStrategyKind::UnexploredFirst:
66 return WorkList::makeUnexploredFirst();
67 case ExplorationStrategyKind::UnexploredFirstQueue:
68 return WorkList::makeUnexploredFirstPriorityQueue();
69 case ExplorationStrategyKind::UnexploredFirstLocationQueue:
70 return WorkList::makeUnexploredFirstPriorityLocationQueue();
71 }
72 llvm_unreachable("Unknown AnalyzerOptions::ExplorationStrategyKind");
73}
74
75CoreEngine::CoreEngine(ExprEngine &exprengine, FunctionSummariesTy *FS,
76 AnalyzerOptions &Opts)
77 : ExprEng(exprengine), WList(generateWorkList(Opts)),
78 CTUWList(Opts.IsNaiveCTUEnabled ? generateWorkList(Opts) : nullptr),
79 BCounterFactory(G.getAllocator()), FunctionSummaries(FS) {}
80
81void CoreEngine::setBlockCounter(BlockCounter C) {
82 WList->setBlockCounter(C);
83 if (CTUWList)
84 CTUWList->setBlockCounter(C);
85}
86
87/// ExecuteWorkList - Run the worklist algorithm for a maximum number of steps.
88bool CoreEngine::ExecuteWorkList(const LocationContext *L, unsigned MaxSteps,
89 ProgramStateRef InitState) {
90 if (G.num_roots() == 0) { // Initialize the analysis by constructing
91 // the root if none exists.
92
93 const CFGBlock *Entry = &(L->getCFG()->getEntry());
94
95 assert(Entry->empty() && "Entry block must be empty.");
96
97 assert(Entry->succ_size() == 1 && "Entry block must have 1 successor.");
98
99 // Mark the entry block as visited.
100 FunctionSummaries->markVisitedBasicBlock(ID: Entry->getBlockID(),
101 D: L->getDecl(),
102 TotalIDs: L->getCFG()->getNumBlockIDs());
103
104 // Get the solitary successor.
105 const CFGBlock *Succ = *(Entry->succ_begin());
106
107 // Construct an edge representing the
108 // starting location in the function.
109 BlockEdge StartLoc(Entry, Succ, L);
110
111 // Set the current block counter to being empty.
112 setBlockCounter(BCounterFactory.GetEmptyCounter());
113
114 if (!InitState)
115 InitState = ExprEng.getInitialState(InitLoc: L);
116
117 bool IsNew;
118 ExplodedNode *Node = G.getNode(L: StartLoc, State: InitState, IsSink: false, IsNew: &IsNew);
119 assert(IsNew);
120 G.addRoot(V: Node);
121
122 NodeBuilderContext BuilderCtx(*this, StartLoc.getDst(), Node);
123 ExplodedNodeSet DstBegin;
124 ExprEng.processBeginOfFunction(BC&: BuilderCtx, Pred: Node, Dst&: DstBegin, L: StartLoc);
125
126 enqueue(Set&: DstBegin);
127 }
128
129 // Check if we have a steps limit
130 bool UnlimitedSteps = MaxSteps == 0;
131
132 // Cap our pre-reservation in the event that the user specifies
133 // a very large number of maximum steps.
134 const unsigned PreReservationCap = 4000000;
135 if(!UnlimitedSteps)
136 G.reserve(NodeCount: std::min(a: MaxSteps, b: PreReservationCap));
137
138 auto ProcessWList = [this, UnlimitedSteps](unsigned MaxSteps) {
139 unsigned Steps = MaxSteps;
140 while (WList->hasWork()) {
141 if (!UnlimitedSteps) {
142 if (Steps == 0) {
143 NumReachedMaxSteps++;
144 break;
145 }
146 --Steps;
147 }
148
149 NumSteps++;
150
151 const WorkListUnit &WU = WList->dequeue();
152
153 // Set the current block counter.
154 setBlockCounter(WU.getBlockCounter());
155
156 // Retrieve the node.
157 ExplodedNode *Node = WU.getNode();
158
159 dispatchWorkItem(Pred: Node, Loc: Node->getLocation(), WU);
160 }
161 return MaxSteps - Steps;
162 };
163 const unsigned STUSteps = ProcessWList(MaxSteps);
164
165 if (CTUWList) {
166 NumSTUSteps += STUSteps;
167 const unsigned MinCTUSteps =
168 this->ExprEng.getAnalysisManager().options.CTUMaxNodesMin;
169 const unsigned Pct =
170 this->ExprEng.getAnalysisManager().options.CTUMaxNodesPercentage;
171 unsigned MaxCTUSteps = std::max(a: STUSteps * Pct / 100, b: MinCTUSteps);
172
173 WList = std::move(CTUWList);
174 const unsigned CTUSteps = ProcessWList(MaxCTUSteps);
175 NumCTUSteps += CTUSteps;
176 }
177
178 ExprEng.processEndWorklist();
179 return WList->hasWork();
180}
181
182void CoreEngine::dispatchWorkItem(ExplodedNode* Pred, ProgramPoint Loc,
183 const WorkListUnit& WU) {
184 // Dispatch on the location type.
185 switch (Loc.getKind()) {
186 case ProgramPoint::BlockEdgeKind:
187 HandleBlockEdge(E: Loc.castAs<BlockEdge>(), Pred);
188 break;
189
190 case ProgramPoint::BlockEntranceKind:
191 HandleBlockEntrance(E: Loc.castAs<BlockEntrance>(), Pred);
192 break;
193
194 case ProgramPoint::BlockExitKind:
195 assert(false && "BlockExit location never occur in forward analysis.");
196 break;
197
198 case ProgramPoint::CallEnterKind:
199 HandleCallEnter(CE: Loc.castAs<CallEnter>(), Pred);
200 break;
201
202 case ProgramPoint::CallExitBeginKind:
203 ExprEng.processCallExit(Pred);
204 break;
205
206 case ProgramPoint::EpsilonKind: {
207 assert(Pred->hasSinglePred() &&
208 "Assume epsilon has exactly one predecessor by construction");
209 ExplodedNode *PNode = Pred->getFirstPred();
210 dispatchWorkItem(Pred, Loc: PNode->getLocation(), WU);
211 break;
212 }
213 default:
214 assert(Loc.getAs<PostStmt>() ||
215 Loc.getAs<PostInitializer>() ||
216 Loc.getAs<PostImplicitCall>() ||
217 Loc.getAs<CallExitEnd>() ||
218 Loc.getAs<LoopExit>() ||
219 Loc.getAs<PostAllocatorCall>());
220 HandlePostStmt(B: WU.getBlock(), StmtIdx: WU.getIndex(), Pred);
221 break;
222 }
223}
224
225bool CoreEngine::ExecuteWorkListWithInitialState(const LocationContext *L,
226 unsigned Steps,
227 ProgramStateRef InitState,
228 ExplodedNodeSet &Dst) {
229 bool DidNotFinish = ExecuteWorkList(L, MaxSteps: Steps, InitState);
230 for (ExplodedGraph::eop_iterator I = G.eop_begin(), E = G.eop_end(); I != E;
231 ++I) {
232 Dst.Add(N: *I);
233 }
234 return DidNotFinish;
235}
236
237void CoreEngine::HandleBlockEdge(const BlockEdge &L, ExplodedNode *Pred) {
238 const CFGBlock *Blk = L.getDst();
239 NodeBuilderContext BuilderCtx(*this, Blk, Pred);
240
241 // Mark this block as visited.
242 const LocationContext *LC = Pred->getLocationContext();
243 FunctionSummaries->markVisitedBasicBlock(ID: Blk->getBlockID(),
244 D: LC->getDecl(),
245 TotalIDs: LC->getCFG()->getNumBlockIDs());
246
247 // Display a prunable path note to the user if it's a virtual bases branch
248 // and we're taking the path that skips virtual base constructors.
249 if (L.getSrc()->getTerminator().isVirtualBaseBranch() &&
250 L.getDst() == *L.getSrc()->succ_begin()) {
251 ProgramPoint P = L.withTag(tag: getDataTags().make<NoteTag>(
252 ConstructorArgs: [](BugReporterContext &, PathSensitiveBugReport &) -> std::string {
253 // TODO: Just call out the name of the most derived class
254 // when we know it.
255 return "Virtual base initialization skipped because "
256 "it has already been handled by the most derived class";
257 },
258 /*IsPrunable=*/ConstructorArgs: true));
259 // Perform the transition.
260 ExplodedNodeSet Dst;
261 NodeBuilder Bldr(Pred, Dst, BuilderCtx);
262 Pred = Bldr.generateNode(PP: P, State: Pred->getState(), Pred);
263 if (!Pred)
264 return;
265 }
266
267 // Check if we are entering the EXIT block.
268 if (Blk == &(L.getLocationContext()->getCFG()->getExit())) {
269 assert(L.getLocationContext()->getCFG()->getExit().empty() &&
270 "EXIT block cannot contain Stmts.");
271
272 // Get return statement..
273 const ReturnStmt *RS = nullptr;
274 if (!L.getSrc()->empty()) {
275 CFGElement LastElement = L.getSrc()->back();
276 if (std::optional<CFGStmt> LastStmt = LastElement.getAs<CFGStmt>()) {
277 RS = dyn_cast<ReturnStmt>(Val: LastStmt->getStmt());
278 } else if (std::optional<CFGAutomaticObjDtor> AutoDtor =
279 LastElement.getAs<CFGAutomaticObjDtor>()) {
280 RS = dyn_cast<ReturnStmt>(Val: AutoDtor->getTriggerStmt());
281 }
282 }
283
284 // Process the final state transition.
285 ExprEng.processEndOfFunction(BC&: BuilderCtx, Pred, RS);
286
287 // This path is done. Don't enqueue any more nodes.
288 return;
289 }
290
291 // Call into the ExprEngine to process entering the CFGBlock.
292 ExplodedNodeSet dstNodes;
293 BlockEntrance BE(Blk, Pred->getLocationContext());
294 NodeBuilderWithSinks nodeBuilder(Pred, dstNodes, BuilderCtx, BE);
295 ExprEng.processCFGBlockEntrance(L, nodeBuilder, Pred);
296
297 // Auto-generate a node.
298 if (!nodeBuilder.hasGeneratedNodes()) {
299 nodeBuilder.generateNode(State: Pred->State, Pred);
300 }
301
302 // Enqueue nodes onto the worklist.
303 enqueue(Set&: dstNodes);
304}
305
306void CoreEngine::HandleBlockEntrance(const BlockEntrance &L,
307 ExplodedNode *Pred) {
308 // Increment the block counter.
309 const LocationContext *LC = Pred->getLocationContext();
310 unsigned BlockId = L.getBlock()->getBlockID();
311 BlockCounter Counter = WList->getBlockCounter();
312 Counter = BCounterFactory.IncrementCount(BC: Counter, CallSite: LC->getStackFrame(),
313 BlockID: BlockId);
314 setBlockCounter(Counter);
315
316 // Process the entrance of the block.
317 if (std::optional<CFGElement> E = L.getFirstElement()) {
318 NodeBuilderContext Ctx(*this, L.getBlock(), Pred);
319 ExprEng.processCFGElement(E: *E, Pred, StmtIdx: 0, Ctx: &Ctx);
320 } else
321 HandleBlockExit(B: L.getBlock(), Pred);
322}
323
324void CoreEngine::HandleBlockExit(const CFGBlock * B, ExplodedNode *Pred) {
325 if (const Stmt *Term = B->getTerminatorStmt()) {
326 switch (Term->getStmtClass()) {
327 default:
328 llvm_unreachable("Analysis for this terminator not implemented.");
329
330 case Stmt::CXXBindTemporaryExprClass:
331 HandleCleanupTemporaryBranch(
332 BTE: cast<CXXBindTemporaryExpr>(Val: Term), B, Pred);
333 return;
334
335 // Model static initializers.
336 case Stmt::DeclStmtClass:
337 HandleStaticInit(DS: cast<DeclStmt>(Val: Term), B, Pred);
338 return;
339
340 case Stmt::BinaryOperatorClass: // '&&' and '||'
341 HandleBranch(cast<BinaryOperator>(Val: Term)->getLHS(), Term, B, Pred);
342 return;
343
344 case Stmt::BinaryConditionalOperatorClass:
345 case Stmt::ConditionalOperatorClass:
346 HandleBranch(cast<AbstractConditionalOperator>(Val: Term)->getCond(),
347 Term, B, Pred);
348 return;
349
350 // FIXME: Use constant-folding in CFG construction to simplify this
351 // case.
352
353 case Stmt::ChooseExprClass:
354 HandleBranch(cast<ChooseExpr>(Val: Term)->getCond(), Term, B, Pred);
355 return;
356
357 case Stmt::CXXTryStmtClass:
358 // Generate a node for each of the successors.
359 // Our logic for EH analysis can certainly be improved.
360 for (CFGBlock::const_succ_iterator it = B->succ_begin(),
361 et = B->succ_end(); it != et; ++it) {
362 if (const CFGBlock *succ = *it) {
363 generateNode(Loc: BlockEdge(B, succ, Pred->getLocationContext()),
364 State: Pred->State, Pred);
365 }
366 }
367 return;
368
369 case Stmt::DoStmtClass:
370 HandleBranch(cast<DoStmt>(Val: Term)->getCond(), Term, B, Pred);
371 return;
372
373 case Stmt::CXXForRangeStmtClass:
374 HandleBranch(cast<CXXForRangeStmt>(Val: Term)->getCond(), Term, B, Pred);
375 return;
376
377 case Stmt::ForStmtClass:
378 HandleBranch(cast<ForStmt>(Val: Term)->getCond(), Term, B, Pred);
379 return;
380
381 case Stmt::SEHLeaveStmtClass:
382 case Stmt::ContinueStmtClass:
383 case Stmt::BreakStmtClass:
384 case Stmt::GotoStmtClass:
385 break;
386
387 case Stmt::IfStmtClass:
388 HandleBranch(cast<IfStmt>(Val: Term)->getCond(), Term, B, Pred);
389 return;
390
391 case Stmt::IndirectGotoStmtClass: {
392 // Only 1 successor: the indirect goto dispatch block.
393 assert(B->succ_size() == 1);
394
395 IndirectGotoNodeBuilder
396 builder(Pred, B, cast<IndirectGotoStmt>(Val: Term)->getTarget(),
397 *(B->succ_begin()), this);
398
399 ExprEng.processIndirectGoto(builder);
400 return;
401 }
402
403 case Stmt::ObjCForCollectionStmtClass:
404 // In the case of ObjCForCollectionStmt, it appears twice in a CFG:
405 //
406 // (1) inside a basic block, which represents the binding of the
407 // 'element' variable to a value.
408 // (2) in a terminator, which represents the branch.
409 //
410 // For (1), ExprEngine will bind a value (i.e., 0 or 1) indicating
411 // whether or not collection contains any more elements. We cannot
412 // just test to see if the element is nil because a container can
413 // contain nil elements.
414 HandleBranch(Cond: Term, Term, B, Pred);
415 return;
416
417 case Stmt::SwitchStmtClass: {
418 SwitchNodeBuilder builder(Pred, B, cast<SwitchStmt>(Val: Term)->getCond(),
419 this);
420
421 ExprEng.processSwitch(builder);
422 return;
423 }
424
425 case Stmt::WhileStmtClass:
426 HandleBranch(cast<WhileStmt>(Val: Term)->getCond(), Term, B, Pred);
427 return;
428
429 case Stmt::GCCAsmStmtClass:
430 assert(cast<GCCAsmStmt>(Term)->isAsmGoto() && "Encountered GCCAsmStmt without labels");
431 // TODO: Handle jumping to labels
432 return;
433 }
434 }
435
436 if (B->getTerminator().isVirtualBaseBranch()) {
437 HandleVirtualBaseBranch(B, Pred);
438 return;
439 }
440
441 assert(B->succ_size() == 1 &&
442 "Blocks with no terminator should have at most 1 successor.");
443
444 generateNode(Loc: BlockEdge(B, *(B->succ_begin()), Pred->getLocationContext()),
445 State: Pred->State, Pred);
446}
447
448void CoreEngine::HandleCallEnter(const CallEnter &CE, ExplodedNode *Pred) {
449 NodeBuilderContext BuilderCtx(*this, CE.getEntry(), Pred);
450 ExprEng.processCallEnter(BC&: BuilderCtx, CE, Pred);
451}
452
453void CoreEngine::HandleBranch(const Stmt *Cond, const Stmt *Term,
454 const CFGBlock * B, ExplodedNode *Pred) {
455 assert(B->succ_size() == 2);
456 NodeBuilderContext Ctx(*this, B, Pred);
457 ExplodedNodeSet Dst;
458 ExprEng.processBranch(Condition: Cond, BuilderCtx&: Ctx, Pred, Dst, DstT: *(B->succ_begin()),
459 DstF: *(B->succ_begin() + 1));
460 // Enqueue the new frontier onto the worklist.
461 enqueue(Set&: Dst);
462}
463
464void CoreEngine::HandleCleanupTemporaryBranch(const CXXBindTemporaryExpr *BTE,
465 const CFGBlock *B,
466 ExplodedNode *Pred) {
467 assert(B->succ_size() == 2);
468 NodeBuilderContext Ctx(*this, B, Pred);
469 ExplodedNodeSet Dst;
470 ExprEng.processCleanupTemporaryBranch(BTE, BldCtx&: Ctx, Pred, Dst, DstT: *(B->succ_begin()),
471 DstF: *(B->succ_begin() + 1));
472 // Enqueue the new frontier onto the worklist.
473 enqueue(Set&: Dst);
474}
475
476void CoreEngine::HandleStaticInit(const DeclStmt *DS, const CFGBlock *B,
477 ExplodedNode *Pred) {
478 assert(B->succ_size() == 2);
479 NodeBuilderContext Ctx(*this, B, Pred);
480 ExplodedNodeSet Dst;
481 ExprEng.processStaticInitializer(DS, BuilderCtx&: Ctx, Pred, Dst,
482 DstT: *(B->succ_begin()), DstF: *(B->succ_begin()+1));
483 // Enqueue the new frontier onto the worklist.
484 enqueue(Set&: Dst);
485}
486
487void CoreEngine::HandlePostStmt(const CFGBlock *B, unsigned StmtIdx,
488 ExplodedNode *Pred) {
489 assert(B);
490 assert(!B->empty());
491
492 if (StmtIdx == B->size())
493 HandleBlockExit(B, Pred);
494 else {
495 NodeBuilderContext Ctx(*this, B, Pred);
496 ExprEng.processCFGElement(E: (*B)[StmtIdx], Pred, StmtIdx, Ctx: &Ctx);
497 }
498}
499
500void CoreEngine::HandleVirtualBaseBranch(const CFGBlock *B,
501 ExplodedNode *Pred) {
502 const LocationContext *LCtx = Pred->getLocationContext();
503 if (const auto *CallerCtor = dyn_cast_or_null<CXXConstructExpr>(
504 Val: LCtx->getStackFrame()->getCallSite())) {
505 switch (CallerCtor->getConstructionKind()) {
506 case CXXConstructionKind::NonVirtualBase:
507 case CXXConstructionKind::VirtualBase: {
508 BlockEdge Loc(B, *B->succ_begin(), LCtx);
509 HandleBlockEdge(L: Loc, Pred);
510 return;
511 }
512 default:
513 break;
514 }
515 }
516
517 // We either don't see a parent stack frame because we're in the top frame,
518 // or the parent stack frame doesn't initialize our virtual bases.
519 BlockEdge Loc(B, *(B->succ_begin() + 1), LCtx);
520 HandleBlockEdge(L: Loc, Pred);
521}
522
523/// generateNode - Utility method to generate nodes, hook up successors,
524/// and add nodes to the worklist.
525void CoreEngine::generateNode(const ProgramPoint &Loc,
526 ProgramStateRef State,
527 ExplodedNode *Pred) {
528 bool IsNew;
529 ExplodedNode *Node = G.getNode(L: Loc, State, IsSink: false, IsNew: &IsNew);
530
531 if (Pred)
532 Node->addPredecessor(V: Pred, G); // Link 'Node' with its predecessor.
533 else {
534 assert(IsNew);
535 G.addRoot(V: Node); // 'Node' has no predecessor. Make it a root.
536 }
537
538 // Only add 'Node' to the worklist if it was freshly generated.
539 if (IsNew) WList->enqueue(N: Node);
540}
541
542void CoreEngine::enqueueStmtNode(ExplodedNode *N,
543 const CFGBlock *Block, unsigned Idx) {
544 assert(Block);
545 assert(!N->isSink());
546
547 // Check if this node entered a callee.
548 if (N->getLocation().getAs<CallEnter>()) {
549 // Still use the index of the CallExpr. It's needed to create the callee
550 // StackFrameContext.
551 WList->enqueue(N, B: Block, idx: Idx);
552 return;
553 }
554
555 // Do not create extra nodes. Move to the next CFG element.
556 if (N->getLocation().getAs<PostInitializer>() ||
557 N->getLocation().getAs<PostImplicitCall>()||
558 N->getLocation().getAs<LoopExit>()) {
559 WList->enqueue(N, B: Block, idx: Idx+1);
560 return;
561 }
562
563 if (N->getLocation().getAs<EpsilonPoint>()) {
564 WList->enqueue(N, B: Block, idx: Idx);
565 return;
566 }
567
568 if ((*Block)[Idx].getKind() == CFGElement::NewAllocator) {
569 WList->enqueue(N, B: Block, idx: Idx+1);
570 return;
571 }
572
573 // At this point, we know we're processing a normal statement.
574 CFGStmt CS = (*Block)[Idx].castAs<CFGStmt>();
575 PostStmt Loc(CS.getStmt(), N->getLocationContext());
576
577 if (Loc == N->getLocation().withTag(tag: nullptr)) {
578 // Note: 'N' should be a fresh node because otherwise it shouldn't be
579 // a member of Deferred.
580 WList->enqueue(N, B: Block, idx: Idx+1);
581 return;
582 }
583
584 bool IsNew;
585 ExplodedNode *Succ = G.getNode(L: Loc, State: N->getState(), IsSink: false, IsNew: &IsNew);
586 Succ->addPredecessor(V: N, G);
587
588 if (IsNew)
589 WList->enqueue(N: Succ, B: Block, idx: Idx+1);
590}
591
592ExplodedNode *CoreEngine::generateCallExitBeginNode(ExplodedNode *N,
593 const ReturnStmt *RS) {
594 // Create a CallExitBegin node and enqueue it.
595 const auto *LocCtx = cast<StackFrameContext>(Val: N->getLocationContext());
596
597 // Use the callee location context.
598 CallExitBegin Loc(LocCtx, RS);
599
600 bool isNew;
601 ExplodedNode *Node = G.getNode(L: Loc, State: N->getState(), IsSink: false, IsNew: &isNew);
602 Node->addPredecessor(V: N, G);
603 return isNew ? Node : nullptr;
604}
605
606void CoreEngine::enqueue(ExplodedNodeSet &Set) {
607 for (const auto I : Set)
608 WList->enqueue(N: I);
609}
610
611void CoreEngine::enqueue(ExplodedNodeSet &Set,
612 const CFGBlock *Block, unsigned Idx) {
613 for (const auto I : Set)
614 enqueueStmtNode(N: I, Block, Idx);
615}
616
617void CoreEngine::enqueueEndOfFunction(ExplodedNodeSet &Set, const ReturnStmt *RS) {
618 for (auto *I : Set) {
619 // If we are in an inlined call, generate CallExitBegin node.
620 if (I->getLocationContext()->getParent()) {
621 I = generateCallExitBeginNode(N: I, RS);
622 if (I)
623 WList->enqueue(N: I);
624 } else {
625 // TODO: We should run remove dead bindings here.
626 G.addEndOfPath(V: I);
627 NumPathsExplored++;
628 }
629 }
630}
631
632void NodeBuilder::anchor() {}
633
634ExplodedNode* NodeBuilder::generateNodeImpl(const ProgramPoint &Loc,
635 ProgramStateRef State,
636 ExplodedNode *FromN,
637 bool MarkAsSink) {
638 HasGeneratedNodes = true;
639 bool IsNew;
640 ExplodedNode *N = C.Eng.G.getNode(L: Loc, State, IsSink: MarkAsSink, IsNew: &IsNew);
641 N->addPredecessor(V: FromN, G&: C.Eng.G);
642 Frontier.erase(N: FromN);
643
644 if (!IsNew)
645 return nullptr;
646
647 if (!MarkAsSink)
648 Frontier.Add(N);
649
650 return N;
651}
652
653void NodeBuilderWithSinks::anchor() {}
654
655StmtNodeBuilder::~StmtNodeBuilder() {
656 if (EnclosingBldr)
657 for (const auto I : Frontier)
658 EnclosingBldr->addNodes(N: I);
659}
660
661void BranchNodeBuilder::anchor() {}
662
663ExplodedNode *BranchNodeBuilder::generateNode(ProgramStateRef State,
664 bool branch,
665 ExplodedNode *NodePred) {
666 // If the branch has been marked infeasible we should not generate a node.
667 if (!isFeasible(branch))
668 return nullptr;
669
670 ProgramPoint Loc = BlockEdge(C.Block, branch ? DstT:DstF,
671 NodePred->getLocationContext());
672 ExplodedNode *Succ = generateNodeImpl(Loc, State, FromN: NodePred);
673 return Succ;
674}
675
676ExplodedNode*
677IndirectGotoNodeBuilder::generateNode(const iterator &I,
678 ProgramStateRef St,
679 bool IsSink) {
680 bool IsNew;
681 ExplodedNode *Succ =
682 Eng.G.getNode(L: BlockEdge(Src, I.getBlock(), Pred->getLocationContext()),
683 State: St, IsSink, IsNew: &IsNew);
684 Succ->addPredecessor(V: Pred, G&: Eng.G);
685
686 if (!IsNew)
687 return nullptr;
688
689 if (!IsSink)
690 Eng.WList->enqueue(N: Succ);
691
692 return Succ;
693}
694
695ExplodedNode*
696SwitchNodeBuilder::generateCaseStmtNode(const iterator &I,
697 ProgramStateRef St) {
698 bool IsNew;
699 ExplodedNode *Succ =
700 Eng.G.getNode(L: BlockEdge(Src, I.getBlock(), Pred->getLocationContext()),
701 State: St, IsSink: false, IsNew: &IsNew);
702 Succ->addPredecessor(V: Pred, G&: Eng.G);
703 if (!IsNew)
704 return nullptr;
705
706 Eng.WList->enqueue(N: Succ);
707 return Succ;
708}
709
710ExplodedNode*
711SwitchNodeBuilder::generateDefaultCaseNode(ProgramStateRef St,
712 bool IsSink) {
713 // Get the block for the default case.
714 assert(Src->succ_rbegin() != Src->succ_rend());
715 CFGBlock *DefaultBlock = *Src->succ_rbegin();
716
717 // Basic correctness check for default blocks that are unreachable and not
718 // caught by earlier stages.
719 if (!DefaultBlock)
720 return nullptr;
721
722 bool IsNew;
723 ExplodedNode *Succ =
724 Eng.G.getNode(L: BlockEdge(Src, DefaultBlock, Pred->getLocationContext()),
725 State: St, IsSink, IsNew: &IsNew);
726 Succ->addPredecessor(V: Pred, G&: Eng.G);
727
728 if (!IsNew)
729 return nullptr;
730
731 if (!IsSink)
732 Eng.WList->enqueue(N: Succ);
733
734 return Succ;
735}
736

source code of clang/lib/StaticAnalyzer/Core/CoreEngine.cpp