1//===- TypeErasedDataflowAnalysis.cpp -------------------------------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file defines type-erased base types and functions for building dataflow
10// analyses that run over Control-Flow Graphs (CFGs).
11//
12//===----------------------------------------------------------------------===//
13
14#include <optional>
15#include <system_error>
16#include <utility>
17#include <vector>
18
19#include "clang/AST/ASTDumper.h"
20#include "clang/AST/DeclCXX.h"
21#include "clang/AST/OperationKinds.h"
22#include "clang/AST/StmtCXX.h"
23#include "clang/AST/StmtVisitor.h"
24#include "clang/Analysis/Analyses/PostOrderCFGView.h"
25#include "clang/Analysis/CFG.h"
26#include "clang/Analysis/FlowSensitive/DataflowEnvironment.h"
27#include "clang/Analysis/FlowSensitive/DataflowLattice.h"
28#include "clang/Analysis/FlowSensitive/DataflowWorklist.h"
29#include "clang/Analysis/FlowSensitive/Transfer.h"
30#include "clang/Analysis/FlowSensitive/TypeErasedDataflowAnalysis.h"
31#include "clang/Analysis/FlowSensitive/Value.h"
32#include "clang/Support/Compiler.h"
33#include "llvm/ADT/ArrayRef.h"
34#include "llvm/ADT/STLExtras.h"
35#include "llvm/Support/Debug.h"
36#include "llvm/Support/Error.h"
37
38#define DEBUG_TYPE "clang-dataflow"
39
40namespace clang {
41namespace dataflow {
42class NoopLattice;
43}
44} // namespace clang
45
46namespace llvm {
47// This needs to be exported for ClangAnalysisFlowSensitiveTests so any_cast
48// uses the correct address of Any::TypeId from the clang shared library instead
49// of creating one in the test executable. when building with
50// CLANG_LINK_CLANG_DYLIB
51template struct CLANG_EXPORT_TEMPLATE Any::TypeId<clang::dataflow::NoopLattice>;
52} // namespace llvm
53
54namespace clang {
55namespace dataflow {
56
57/// Returns the index of `Block` in the successors of `Pred`.
58static int blockIndexInPredecessor(const CFGBlock &Pred,
59 const CFGBlock &Block) {
60 auto BlockPos = llvm::find_if(
61 Range: Pred.succs(), P: [&Block](const CFGBlock::AdjacentBlock &Succ) {
62 return Succ && Succ->getBlockID() == Block.getBlockID();
63 });
64 return BlockPos - Pred.succ_begin();
65}
66
67// A "backedge" node is a block introduced in the CFG exclusively to indicate a
68// loop backedge. They are exactly identified by the presence of a non-null
69// pointer to the entry block of the loop condition. Note that this is not
70// necessarily the block with the loop statement as terminator, because
71// short-circuit operators will result in multiple blocks encoding the loop
72// condition, only one of which will contain the loop statement as terminator.
73static bool isBackedgeNode(const CFGBlock &B) {
74 return B.getLoopTarget() != nullptr;
75}
76
77namespace {
78
79/// Extracts the terminator's condition expression.
80class TerminatorVisitor
81 : public ConstStmtVisitor<TerminatorVisitor, const Expr *> {
82public:
83 TerminatorVisitor() = default;
84 const Expr *VisitIfStmt(const IfStmt *S) { return S->getCond(); }
85 const Expr *VisitWhileStmt(const WhileStmt *S) { return S->getCond(); }
86 const Expr *VisitDoStmt(const DoStmt *S) { return S->getCond(); }
87 const Expr *VisitForStmt(const ForStmt *S) { return S->getCond(); }
88 const Expr *VisitCXXForRangeStmt(const CXXForRangeStmt *) {
89 // Don't do anything special for CXXForRangeStmt, because the condition
90 // (being implicitly generated) isn't visible from the loop body.
91 return nullptr;
92 }
93 const Expr *VisitBinaryOperator(const BinaryOperator *S) {
94 assert(S->getOpcode() == BO_LAnd || S->getOpcode() == BO_LOr);
95 return S->getLHS();
96 }
97 const Expr *VisitConditionalOperator(const ConditionalOperator *S) {
98 return S->getCond();
99 }
100};
101
102/// Holds data structures required for running dataflow analysis.
103struct AnalysisContext {
104 AnalysisContext(const AdornedCFG &ACFG, TypeErasedDataflowAnalysis &Analysis,
105 const Environment &InitEnv,
106 llvm::ArrayRef<std::optional<TypeErasedDataflowAnalysisState>>
107 BlockStates)
108 : ACFG(ACFG), Analysis(Analysis), InitEnv(InitEnv),
109 Log(*InitEnv.getDataflowAnalysisContext().getOptions().Log),
110 BlockStates(BlockStates) {
111 Log.beginAnalysis(ACFG, Analysis);
112 }
113 ~AnalysisContext() { Log.endAnalysis(); }
114
115 /// Contains the CFG being analyzed.
116 const AdornedCFG &ACFG;
117 /// The analysis to be run.
118 TypeErasedDataflowAnalysis &Analysis;
119 /// Initial state to start the analysis.
120 const Environment &InitEnv;
121 Logger &Log;
122 /// Stores the state of a CFG block if it has been evaluated by the analysis.
123 /// The indices correspond to the block IDs.
124 llvm::ArrayRef<std::optional<TypeErasedDataflowAnalysisState>> BlockStates;
125};
126
127class PrettyStackTraceAnalysis : public llvm::PrettyStackTraceEntry {
128public:
129 PrettyStackTraceAnalysis(const AdornedCFG &ACFG, const char *Message)
130 : ACFG(ACFG), Message(Message) {}
131
132 void print(raw_ostream &OS) const override {
133 OS << Message << "\n";
134 OS << "Decl:\n";
135 ACFG.getDecl().dump(Out&: OS);
136 OS << "CFG:\n";
137 ACFG.getCFG().print(OS, LO: LangOptions(), ShowColors: false);
138 }
139
140private:
141 const AdornedCFG &ACFG;
142 const char *Message;
143};
144
145class PrettyStackTraceCFGElement : public llvm::PrettyStackTraceEntry {
146public:
147 PrettyStackTraceCFGElement(const CFGElement &Element, int BlockIdx,
148 int ElementIdx, const char *Message)
149 : Element(Element), BlockIdx(BlockIdx), ElementIdx(ElementIdx),
150 Message(Message) {}
151
152 void print(raw_ostream &OS) const override {
153 OS << Message << ": Element [B" << BlockIdx << "." << ElementIdx << "]\n";
154 if (auto Stmt = Element.getAs<CFGStmt>()) {
155 OS << "Stmt:\n";
156 ASTDumper Dumper(OS, false);
157 Dumper.Visit(Node: Stmt->getStmt());
158 }
159 }
160
161private:
162 const CFGElement &Element;
163 int BlockIdx;
164 int ElementIdx;
165 const char *Message;
166};
167
168// Builds a joined TypeErasedDataflowAnalysisState from 0 or more sources,
169// each of which may be owned (built as part of the join) or external (a
170// reference to an Environment that will outlive the builder).
171// Avoids unneccesary copies of the environment.
172class JoinedStateBuilder {
173 AnalysisContext &AC;
174 Environment::ExprJoinBehavior JoinBehavior;
175 std::vector<const TypeErasedDataflowAnalysisState *> All;
176 std::deque<TypeErasedDataflowAnalysisState> Owned;
177
178 TypeErasedDataflowAnalysisState
179 join(const TypeErasedDataflowAnalysisState &L,
180 const TypeErasedDataflowAnalysisState &R) {
181 return {AC.Analysis.joinTypeErased(L.Lattice, R.Lattice),
182 Environment::join(EnvA: L.Env, EnvB: R.Env, Model&: AC.Analysis, ExprBehavior: JoinBehavior)};
183 }
184
185public:
186 JoinedStateBuilder(AnalysisContext &AC,
187 Environment::ExprJoinBehavior JoinBehavior)
188 : AC(AC), JoinBehavior(JoinBehavior) {}
189
190 void addOwned(TypeErasedDataflowAnalysisState State) {
191 Owned.push_back(x: std::move(State));
192 All.push_back(x: &Owned.back());
193 }
194 void addUnowned(const TypeErasedDataflowAnalysisState &State) {
195 All.push_back(x: &State);
196 }
197 TypeErasedDataflowAnalysisState take() && {
198 if (All.empty())
199 // FIXME: Consider passing `Block` to Analysis.typeErasedInitialElement
200 // to enable building analyses like computation of dominators that
201 // initialize the state of each basic block differently.
202 return {AC.Analysis.typeErasedInitialElement(), AC.InitEnv.fork()};
203 if (All.size() == 1)
204 // Join the environment with itself so that we discard expression state if
205 // desired.
206 // FIXME: We could consider writing special-case code for this that only
207 // does the discarding, but it's not clear if this is worth it.
208 return {All[0]->Lattice, Environment::join(EnvA: All[0]->Env, EnvB: All[0]->Env,
209 Model&: AC.Analysis, ExprBehavior: JoinBehavior)};
210
211 auto Result = join(L: *All[0], R: *All[1]);
212 for (unsigned I = 2; I < All.size(); ++I)
213 Result = join(L: Result, R: *All[I]);
214 return Result;
215 }
216};
217} // namespace
218
219static const Expr *getTerminatorCondition(const Stmt *TerminatorStmt) {
220 return TerminatorStmt == nullptr ? nullptr
221 : TerminatorVisitor().Visit(TerminatorStmt);
222}
223
224/// Computes the input state for a given basic block by joining the output
225/// states of its predecessors.
226///
227/// Requirements:
228///
229/// All predecessors of `Block` except those with loop back edges must have
230/// already been transferred. States in `AC.BlockStates` that are set to
231/// `std::nullopt` represent basic blocks that are not evaluated yet.
232static TypeErasedDataflowAnalysisState
233computeBlockInputState(const CFGBlock &Block, AnalysisContext &AC) {
234 std::vector<const CFGBlock *> Preds(Block.pred_begin(), Block.pred_end());
235 if (Block.getTerminator().isTemporaryDtorsBranch()) {
236 // This handles a special case where the code that produced the CFG includes
237 // a conditional operator with a branch that constructs a temporary and
238 // calls a destructor annotated as noreturn. The CFG models this as follows:
239 //
240 // B1 (contains the condition of the conditional operator) - succs: B2, B3
241 // B2 (contains code that does not call a noreturn destructor) - succs: B4
242 // B3 (contains code that calls a noreturn destructor) - succs: B4
243 // B4 (has temporary destructor terminator) - succs: B5, B6
244 // B5 (noreturn block that is associated with the noreturn destructor call)
245 // B6 (contains code that follows the conditional operator statement)
246 //
247 // The first successor (B5 above) of a basic block with a temporary
248 // destructor terminator (B4 above) is the block that evaluates the
249 // destructor. If that block has a noreturn element then the predecessor
250 // block that constructed the temporary object (B3 above) is effectively a
251 // noreturn block and its state should not be used as input for the state
252 // of the block that has a temporary destructor terminator (B4 above). This
253 // holds regardless of which branch of the ternary operator calls the
254 // noreturn destructor. However, it doesn't cases where a nested ternary
255 // operator includes a branch that contains a noreturn destructor call.
256 //
257 // See `NoreturnDestructorTest` for concrete examples.
258 if (Block.succ_begin()->getReachableBlock() != nullptr &&
259 Block.succ_begin()->getReachableBlock()->hasNoReturnElement()) {
260 const CFGBlock *StmtBlock = nullptr;
261 if (const Stmt *Terminator = Block.getTerminatorStmt())
262 StmtBlock = AC.ACFG.blockForStmt(S: *Terminator);
263 assert(StmtBlock != nullptr);
264 llvm::erase(C&: Preds, V: StmtBlock);
265 }
266 }
267
268 // If any of the predecessor blocks contains an expression consumed in a
269 // different block, we need to keep expression state.
270 // Note that in this case, we keep expression state for all predecessors,
271 // rather than only those predecessors that actually contain an expression
272 // consumed in a different block. While this is potentially suboptimal, it's
273 // actually likely, if we have control flow within a full expression, that
274 // all predecessors have expression state consumed in a different block.
275 Environment::ExprJoinBehavior JoinBehavior = Environment::DiscardExprState;
276 for (const CFGBlock *Pred : Preds) {
277 if (Pred && AC.ACFG.containsExprConsumedInDifferentBlock(B: *Pred)) {
278 JoinBehavior = Environment::KeepExprState;
279 break;
280 }
281 }
282
283 JoinedStateBuilder Builder(AC, JoinBehavior);
284 for (const CFGBlock *Pred : Preds) {
285 // Skip if the `Block` is unreachable or control flow cannot get past it.
286 if (!Pred || Pred->hasNoReturnElement())
287 continue;
288
289 // Skip if `Pred` was not evaluated yet. This could happen if `Pred` has a
290 // loop back edge to `Block`.
291 const std::optional<TypeErasedDataflowAnalysisState> &MaybePredState =
292 AC.BlockStates[Pred->getBlockID()];
293 if (!MaybePredState)
294 continue;
295
296 const TypeErasedDataflowAnalysisState &PredState = *MaybePredState;
297 const Expr *Cond = getTerminatorCondition(TerminatorStmt: Pred->getTerminatorStmt());
298 if (Cond == nullptr) {
299 Builder.addUnowned(State: PredState);
300 continue;
301 }
302
303 bool BranchVal = blockIndexInPredecessor(Pred: *Pred, Block) == 0;
304
305 // `transferBranch` may need to mutate the environment to describe the
306 // dynamic effect of the terminator for a given branch. Copy now.
307 TypeErasedDataflowAnalysisState Copy = MaybePredState->fork();
308 if (AC.Analysis.builtinOptions()) {
309 auto *CondVal = Copy.Env.get<BoolValue>(E: *Cond);
310 // In transferCFGBlock(), we ensure that we always have a `Value`
311 // for the terminator condition, so assert this. We consciously
312 // assert ourselves instead of asserting via `cast()` so that we get
313 // a more meaningful line number if the assertion fails.
314 assert(CondVal != nullptr);
315 BoolValue *AssertedVal =
316 BranchVal ? CondVal : &Copy.Env.makeNot(Val&: *CondVal);
317 Copy.Env.assume(AssertedVal->formula());
318 }
319 AC.Analysis.transferBranchTypeErased(BranchVal, Cond, Copy.Lattice,
320 Copy.Env);
321 Builder.addOwned(State: std::move(Copy));
322 }
323 return std::move(Builder).take();
324}
325
326/// Built-in transfer function for `CFGStmt`.
327static void
328builtinTransferStatement(unsigned CurBlockID, const CFGStmt &Elt,
329 TypeErasedDataflowAnalysisState &InputState,
330 AnalysisContext &AC) {
331 const Stmt *S = Elt.getStmt();
332 assert(S != nullptr);
333 transfer(StmtToEnv: StmtToEnvMap(AC.ACFG, AC.BlockStates, CurBlockID, InputState), S: *S,
334 Env&: InputState.Env, Model&: AC.Analysis);
335}
336
337/// Built-in transfer function for `CFGInitializer`.
338static void
339builtinTransferInitializer(const CFGInitializer &Elt,
340 TypeErasedDataflowAnalysisState &InputState) {
341 const CXXCtorInitializer *Init = Elt.getInitializer();
342 assert(Init != nullptr);
343
344 auto &Env = InputState.Env;
345 auto &ThisLoc = *Env.getThisPointeeStorageLocation();
346
347 if (!Init->isAnyMemberInitializer())
348 // FIXME: Handle base initialization
349 return;
350
351 auto *InitExpr = Init->getInit();
352 assert(InitExpr != nullptr);
353
354 const FieldDecl *Member = nullptr;
355 RecordStorageLocation *ParentLoc = &ThisLoc;
356 StorageLocation *MemberLoc = nullptr;
357 if (Init->isMemberInitializer()) {
358 Member = Init->getMember();
359 MemberLoc = ThisLoc.getChild(*Member);
360 } else {
361 IndirectFieldDecl *IndirectField = Init->getIndirectMember();
362 assert(IndirectField != nullptr);
363 MemberLoc = &ThisLoc;
364 for (const auto *I : IndirectField->chain()) {
365 Member = cast<FieldDecl>(Val: I);
366 ParentLoc = cast<RecordStorageLocation>(Val: MemberLoc);
367 MemberLoc = ParentLoc->getChild(*Member);
368 }
369 }
370 assert(Member != nullptr);
371
372 // FIXME: Instead of these case distinctions, we would ideally want to be able
373 // to simply use `Environment::createObject()` here, the same way that we do
374 // this in `TransferVisitor::VisitInitListExpr()`. However, this would require
375 // us to be able to build a list of fields that we then use to initialize an
376 // `RecordStorageLocation` -- and the problem is that, when we get here,
377 // the `RecordStorageLocation` already exists. We should explore if there's
378 // anything that we can do to change this.
379 if (Member->getType()->isReferenceType()) {
380 auto *InitExprLoc = Env.getStorageLocation(E: *InitExpr);
381 if (InitExprLoc == nullptr)
382 return;
383
384 ParentLoc->setChild(*Member, InitExprLoc);
385 // Record-type initializers construct themselves directly into the result
386 // object, so there is no need to handle them here.
387 } else if (!Member->getType()->isRecordType()) {
388 assert(MemberLoc != nullptr);
389 if (auto *InitExprVal = Env.getValue(E: *InitExpr))
390 Env.setValue(Loc: *MemberLoc, Val&: *InitExprVal);
391 }
392}
393
394static void builtinTransfer(unsigned CurBlockID, const CFGElement &Elt,
395 TypeErasedDataflowAnalysisState &State,
396 AnalysisContext &AC) {
397 switch (Elt.getKind()) {
398 case CFGElement::Statement:
399 builtinTransferStatement(CurBlockID, Elt: Elt.castAs<CFGStmt>(), InputState&: State, AC);
400 break;
401 case CFGElement::Initializer:
402 builtinTransferInitializer(Elt: Elt.castAs<CFGInitializer>(), InputState&: State);
403 break;
404 case CFGElement::LifetimeEnds:
405 // Removing declarations when their lifetime ends serves two purposes:
406 // - Eliminate unnecessary clutter from `Environment::DeclToLoc`
407 // - Allow us to assert that, when joining two `Environment`s, the two
408 // `DeclToLoc` maps never contain entries that map the same declaration to
409 // different storage locations.
410 if (const ValueDecl *VD = Elt.castAs<CFGLifetimeEnds>().getVarDecl())
411 State.Env.removeDecl(D: *VD);
412 break;
413 default:
414 // FIXME: Evaluate other kinds of `CFGElement`
415 break;
416 }
417}
418
419/// Transfers `State` by evaluating each element in the `Block` based on the
420/// `AC.Analysis` specified.
421///
422/// Built-in transfer functions (if the option for `ApplyBuiltinTransfer` is set
423/// by the analysis) will be applied to the element before evaluation by the
424/// user-specified analysis.
425/// `PostVisitCFG` (if provided) will be applied to the element after evaluation
426/// by the user-specified analysis.
427static TypeErasedDataflowAnalysisState
428transferCFGBlock(const CFGBlock &Block, AnalysisContext &AC,
429 const CFGEltCallbacksTypeErased &PostAnalysisCallbacks = {}) {
430 AC.Log.enterBlock(Block, PostVisit: PostAnalysisCallbacks.Before != nullptr ||
431 PostAnalysisCallbacks.After != nullptr);
432 auto State = computeBlockInputState(Block, AC);
433 AC.Log.recordState(State);
434 int ElementIdx = 1;
435 for (const auto &Element : Block) {
436 PrettyStackTraceCFGElement CrashInfo(Element, Block.getBlockID(),
437 ElementIdx++, "transferCFGBlock");
438
439 AC.Log.enterElement(Element);
440
441 if (PostAnalysisCallbacks.Before) {
442 PostAnalysisCallbacks.Before(Element, State);
443 }
444
445 // Built-in analysis
446 if (AC.Analysis.builtinOptions()) {
447 builtinTransfer(CurBlockID: Block.getBlockID(), Elt: Element, State, AC);
448 }
449
450 // User-provided analysis
451 AC.Analysis.transferTypeErased(Element, State.Lattice, State.Env);
452
453 if (PostAnalysisCallbacks.After) {
454 PostAnalysisCallbacks.After(Element, State);
455 }
456
457 AC.Log.recordState(State);
458 }
459
460 // If we have a terminator, evaluate its condition.
461 // This `Expr` may not appear as a `CFGElement` anywhere else, and it's
462 // important that we evaluate it here (rather than while processing the
463 // terminator) so that we put the corresponding value in the right
464 // environment.
465 if (const Expr *TerminatorCond =
466 dyn_cast_or_null<Expr>(Val: Block.getTerminatorCondition())) {
467 if (State.Env.getValue(E: *TerminatorCond) == nullptr)
468 // FIXME: This only runs the builtin transfer, not the analysis-specific
469 // transfer. Fixing this isn't trivial, as the analysis-specific transfer
470 // takes a `CFGElement` as input, but some expressions only show up as a
471 // terminator condition, but not as a `CFGElement`. The condition of an if
472 // statement is one such example.
473 transfer(StmtToEnvMap(AC.ACFG, AC.BlockStates, Block.getBlockID(), State),
474 *TerminatorCond, State.Env, AC.Analysis);
475
476 // If the transfer function didn't produce a value, create an atom so that
477 // we have *some* value for the condition expression. This ensures that
478 // when we extend the flow condition, it actually changes.
479 if (State.Env.getValue(E: *TerminatorCond) == nullptr)
480 State.Env.setValue(E: *TerminatorCond, Val&: State.Env.makeAtomicBoolValue());
481 AC.Log.recordState(State);
482 }
483
484 return State;
485}
486
487llvm::Expected<std::vector<std::optional<TypeErasedDataflowAnalysisState>>>
488runTypeErasedDataflowAnalysis(
489 const AdornedCFG &ACFG, TypeErasedDataflowAnalysis &Analysis,
490 const Environment &InitEnv,
491 const CFGEltCallbacksTypeErased &PostAnalysisCallbacks,
492 std::int32_t MaxBlockVisits) {
493 PrettyStackTraceAnalysis CrashInfo(ACFG, "runTypeErasedDataflowAnalysis");
494
495 std::optional<Environment> MaybeStartingEnv;
496 if (InitEnv.callStackSize() == 0) {
497 MaybeStartingEnv = InitEnv.fork();
498 MaybeStartingEnv->initialize();
499 }
500 const Environment &StartingEnv =
501 MaybeStartingEnv ? *MaybeStartingEnv : InitEnv;
502
503 const clang::CFG &CFG = ACFG.getCFG();
504 PostOrderCFGView POV(&CFG);
505 ForwardDataflowWorklist Worklist(CFG, &POV);
506
507 std::vector<std::optional<TypeErasedDataflowAnalysisState>> BlockStates(
508 CFG.size());
509
510 // The entry basic block doesn't contain statements so it can be skipped.
511 const CFGBlock &Entry = CFG.getEntry();
512 BlockStates[Entry.getBlockID()] = {Analysis.typeErasedInitialElement(),
513 StartingEnv.fork()};
514 Worklist.enqueueSuccessors(Block: &Entry);
515
516 AnalysisContext AC(ACFG, Analysis, StartingEnv, BlockStates);
517 std::int32_t BlockVisits = 0;
518 while (const CFGBlock *Block = Worklist.dequeue()) {
519 LLVM_DEBUG(llvm::dbgs()
520 << "Processing Block " << Block->getBlockID() << "\n");
521 if (++BlockVisits > MaxBlockVisits) {
522 return llvm::createStringError(EC: std::errc::timed_out,
523 Fmt: "maximum number of blocks processed");
524 }
525
526 const std::optional<TypeErasedDataflowAnalysisState> &OldBlockState =
527 BlockStates[Block->getBlockID()];
528 TypeErasedDataflowAnalysisState NewBlockState =
529 transferCFGBlock(Block: *Block, AC);
530 LLVM_DEBUG({
531 llvm::errs() << "New Env:\n";
532 NewBlockState.Env.dump();
533 });
534
535 if (OldBlockState) {
536 LLVM_DEBUG({
537 llvm::errs() << "Old Env:\n";
538 OldBlockState->Env.dump();
539 });
540 if (isBackedgeNode(B: *Block)) {
541 LatticeJoinEffect Effect1 = Analysis.widenTypeErased(
542 Current&: NewBlockState.Lattice, Previous: OldBlockState->Lattice);
543 LatticeJoinEffect Effect2 =
544 NewBlockState.Env.widen(PrevEnv: OldBlockState->Env, Model&: Analysis);
545 if (Effect1 == LatticeJoinEffect::Unchanged &&
546 Effect2 == LatticeJoinEffect::Unchanged) {
547 // The state of `Block` didn't change from widening so there's no need
548 // to revisit its successors.
549 AC.Log.blockConverged();
550 continue;
551 }
552 } else if (Analysis.isEqualTypeErased(OldBlockState->Lattice,
553 NewBlockState.Lattice) &&
554 OldBlockState->Env.equivalentTo(Other: NewBlockState.Env, Model&: Analysis)) {
555 // The state of `Block` didn't change after transfer so there's no need
556 // to revisit its successors.
557 AC.Log.blockConverged();
558 continue;
559 }
560 }
561
562 BlockStates[Block->getBlockID()] = std::move(NewBlockState);
563
564 // Do not add unreachable successor blocks to `Worklist`.
565 if (Block->hasNoReturnElement())
566 continue;
567
568 Worklist.enqueueSuccessors(Block);
569 }
570 // FIXME: Consider evaluating unreachable basic blocks (those that have a
571 // state set to `std::nullopt` at this point) to also analyze dead code.
572
573 if (PostAnalysisCallbacks.Before || PostAnalysisCallbacks.After) {
574 for (const CFGBlock *Block : ACFG.getCFG()) {
575 // Skip blocks that were not evaluated.
576 if (!BlockStates[Block->getBlockID()])
577 continue;
578 transferCFGBlock(Block: *Block, AC, PostAnalysisCallbacks);
579 }
580 }
581
582 return std::move(BlockStates);
583}
584
585} // namespace dataflow
586} // namespace clang
587

Provided by KDAB

Privacy Policy
Update your C++ knowledge – Modern C++11/14/17 Training
Find out more

source code of clang/lib/Analysis/FlowSensitive/TypeErasedDataflowAnalysis.cpp