1//=== AnalysisBasedWarnings.cpp - Sema warnings based on libAnalysis ------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file defines analysis_warnings::[Policy,Executor].
10// Together they are used by Sema to issue warnings based on inexpensive
11// static analysis algorithms in libAnalysis.
12//
13//===----------------------------------------------------------------------===//
14
15#include "clang/Sema/AnalysisBasedWarnings.h"
16#include "clang/AST/Decl.h"
17#include "clang/AST/DeclCXX.h"
18#include "clang/AST/DeclObjC.h"
19#include "clang/AST/DynamicRecursiveASTVisitor.h"
20#include "clang/AST/EvaluatedExprVisitor.h"
21#include "clang/AST/Expr.h"
22#include "clang/AST/ExprCXX.h"
23#include "clang/AST/ExprObjC.h"
24#include "clang/AST/OperationKinds.h"
25#include "clang/AST/ParentMap.h"
26#include "clang/AST/StmtCXX.h"
27#include "clang/AST/StmtObjC.h"
28#include "clang/AST/Type.h"
29#include "clang/Analysis/Analyses/CFGReachabilityAnalysis.h"
30#include "clang/Analysis/Analyses/CalledOnceCheck.h"
31#include "clang/Analysis/Analyses/Consumed.h"
32#include "clang/Analysis/Analyses/ReachableCode.h"
33#include "clang/Analysis/Analyses/ThreadSafety.h"
34#include "clang/Analysis/Analyses/UninitializedValues.h"
35#include "clang/Analysis/Analyses/UnsafeBufferUsage.h"
36#include "clang/Analysis/AnalysisDeclContext.h"
37#include "clang/Analysis/CFG.h"
38#include "clang/Analysis/CFGStmtMap.h"
39#include "clang/Basic/Diagnostic.h"
40#include "clang/Basic/DiagnosticSema.h"
41#include "clang/Basic/SourceLocation.h"
42#include "clang/Basic/SourceManager.h"
43#include "clang/Lex/Preprocessor.h"
44#include "clang/Sema/ScopeInfo.h"
45#include "clang/Sema/SemaInternal.h"
46#include "llvm/ADT/ArrayRef.h"
47#include "llvm/ADT/BitVector.h"
48#include "llvm/ADT/MapVector.h"
49#include "llvm/ADT/STLFunctionalExtras.h"
50#include "llvm/ADT/SmallVector.h"
51#include "llvm/ADT/StringRef.h"
52#include <algorithm>
53#include <deque>
54#include <iterator>
55#include <optional>
56
57using namespace clang;
58
59//===----------------------------------------------------------------------===//
60// Unreachable code analysis.
61//===----------------------------------------------------------------------===//
62
63namespace {
64 class UnreachableCodeHandler : public reachable_code::Callback {
65 Sema &S;
66 SourceRange PreviousSilenceableCondVal;
67
68 public:
69 UnreachableCodeHandler(Sema &s) : S(s) {}
70
71 void HandleUnreachable(reachable_code::UnreachableKind UK, SourceLocation L,
72 SourceRange SilenceableCondVal, SourceRange R1,
73 SourceRange R2, bool HasFallThroughAttr) override {
74 // If the diagnosed code is `[[fallthrough]];` and
75 // `-Wunreachable-code-fallthrough` is enabled, suppress `code will never
76 // be executed` warning to avoid generating diagnostic twice
77 if (HasFallThroughAttr &&
78 !S.getDiagnostics().isIgnored(diag::warn_unreachable_fallthrough_attr,
79 SourceLocation()))
80 return;
81
82 // Avoid reporting multiple unreachable code diagnostics that are
83 // triggered by the same conditional value.
84 if (PreviousSilenceableCondVal.isValid() &&
85 SilenceableCondVal.isValid() &&
86 PreviousSilenceableCondVal == SilenceableCondVal)
87 return;
88 PreviousSilenceableCondVal = SilenceableCondVal;
89
90 unsigned diag = diag::warn_unreachable;
91 switch (UK) {
92 case reachable_code::UK_Break:
93 diag = diag::warn_unreachable_break;
94 break;
95 case reachable_code::UK_Return:
96 diag = diag::warn_unreachable_return;
97 break;
98 case reachable_code::UK_Loop_Increment:
99 diag = diag::warn_unreachable_loop_increment;
100 break;
101 case reachable_code::UK_Other:
102 break;
103 }
104
105 S.Diag(L, diag) << R1 << R2;
106
107 SourceLocation Open = SilenceableCondVal.getBegin();
108 if (Open.isValid()) {
109 SourceLocation Close = SilenceableCondVal.getEnd();
110 Close = S.getLocForEndOfToken(Loc: Close);
111 if (Close.isValid()) {
112 S.Diag(Open, diag::note_unreachable_silence)
113 << FixItHint::CreateInsertion(Open, "/* DISABLES CODE */ (")
114 << FixItHint::CreateInsertion(Close, ")");
115 }
116 }
117 }
118 };
119} // anonymous namespace
120
121/// CheckUnreachable - Check for unreachable code.
122static void CheckUnreachable(Sema &S, AnalysisDeclContext &AC) {
123 // As a heuristic prune all diagnostics not in the main file. Currently
124 // the majority of warnings in headers are false positives. These
125 // are largely caused by configuration state, e.g. preprocessor
126 // defined code, etc.
127 //
128 // Note that this is also a performance optimization. Analyzing
129 // headers many times can be expensive.
130 if (!S.getSourceManager().isInMainFile(Loc: AC.getDecl()->getBeginLoc()))
131 return;
132
133 UnreachableCodeHandler UC(S);
134 reachable_code::FindUnreachableCode(AC, PP&: S.getPreprocessor(), CB&: UC);
135}
136
137namespace {
138/// Warn on logical operator errors in CFGBuilder
139class LogicalErrorHandler : public CFGCallback {
140 Sema &S;
141
142public:
143 LogicalErrorHandler(Sema &S) : S(S) {}
144
145 static bool HasMacroID(const Expr *E) {
146 if (E->getExprLoc().isMacroID())
147 return true;
148
149 // Recurse to children.
150 for (const Stmt *SubStmt : E->children())
151 if (const Expr *SubExpr = dyn_cast_or_null<Expr>(SubStmt))
152 if (HasMacroID(SubExpr))
153 return true;
154
155 return false;
156 }
157
158 void logicAlwaysTrue(const BinaryOperator *B, bool isAlwaysTrue) override {
159 if (HasMacroID(B))
160 return;
161
162 unsigned DiagID = isAlwaysTrue
163 ? diag::warn_tautological_negation_or_compare
164 : diag::warn_tautological_negation_and_compare;
165 SourceRange DiagRange = B->getSourceRange();
166 S.Diag(B->getExprLoc(), DiagID) << DiagRange;
167 }
168
169 void compareAlwaysTrue(const BinaryOperator *B,
170 bool isAlwaysTrueOrFalse) override {
171 if (HasMacroID(B))
172 return;
173
174 SourceRange DiagRange = B->getSourceRange();
175 S.Diag(B->getExprLoc(), diag::warn_tautological_overlap_comparison)
176 << DiagRange << isAlwaysTrueOrFalse;
177 }
178
179 void compareBitwiseEquality(const BinaryOperator *B,
180 bool isAlwaysTrue) override {
181 if (HasMacroID(B))
182 return;
183
184 SourceRange DiagRange = B->getSourceRange();
185 S.Diag(B->getExprLoc(), diag::warn_comparison_bitwise_always)
186 << DiagRange << isAlwaysTrue;
187 }
188
189 void compareBitwiseOr(const BinaryOperator *B) override {
190 if (HasMacroID(B))
191 return;
192
193 SourceRange DiagRange = B->getSourceRange();
194 S.Diag(B->getExprLoc(), diag::warn_comparison_bitwise_or) << DiagRange;
195 }
196
197 static bool hasActiveDiagnostics(DiagnosticsEngine &Diags,
198 SourceLocation Loc) {
199 return !Diags.isIgnored(diag::warn_tautological_overlap_comparison, Loc) ||
200 !Diags.isIgnored(diag::warn_comparison_bitwise_or, Loc) ||
201 !Diags.isIgnored(diag::warn_tautological_negation_and_compare, Loc);
202 }
203};
204} // anonymous namespace
205
206//===----------------------------------------------------------------------===//
207// Check for infinite self-recursion in functions
208//===----------------------------------------------------------------------===//
209
210// Returns true if the function is called anywhere within the CFGBlock.
211// For member functions, the additional condition of being call from the
212// this pointer is required.
213static bool hasRecursiveCallInPath(const FunctionDecl *FD, CFGBlock &Block) {
214 // Process all the Stmt's in this block to find any calls to FD.
215 for (const auto &B : Block) {
216 if (B.getKind() != CFGElement::Statement)
217 continue;
218
219 const CallExpr *CE = dyn_cast<CallExpr>(Val: B.getAs<CFGStmt>()->getStmt());
220 if (!CE || !CE->getCalleeDecl() ||
221 CE->getCalleeDecl()->getCanonicalDecl() != FD)
222 continue;
223
224 // Skip function calls which are qualified with a templated class.
225 if (const DeclRefExpr *DRE =
226 dyn_cast<DeclRefExpr>(Val: CE->getCallee()->IgnoreParenImpCasts())) {
227 if (NestedNameSpecifier *NNS = DRE->getQualifier()) {
228 if (NNS->getKind() == NestedNameSpecifier::TypeSpec &&
229 isa<TemplateSpecializationType>(Val: NNS->getAsType())) {
230 continue;
231 }
232 }
233 }
234
235 const CXXMemberCallExpr *MCE = dyn_cast<CXXMemberCallExpr>(Val: CE);
236 if (!MCE || isa<CXXThisExpr>(Val: MCE->getImplicitObjectArgument()) ||
237 !MCE->getMethodDecl()->isVirtual())
238 return true;
239 }
240 return false;
241}
242
243// Returns true if every path from the entry block passes through a call to FD.
244static bool checkForRecursiveFunctionCall(const FunctionDecl *FD, CFG *cfg) {
245 llvm::SmallPtrSet<CFGBlock *, 16> Visited;
246 llvm::SmallVector<CFGBlock *, 16> WorkList;
247 // Keep track of whether we found at least one recursive path.
248 bool foundRecursion = false;
249
250 const unsigned ExitID = cfg->getExit().getBlockID();
251
252 // Seed the work list with the entry block.
253 WorkList.push_back(Elt: &cfg->getEntry());
254
255 while (!WorkList.empty()) {
256 CFGBlock *Block = WorkList.pop_back_val();
257
258 for (auto I = Block->succ_begin(), E = Block->succ_end(); I != E; ++I) {
259 if (CFGBlock *SuccBlock = *I) {
260 if (!Visited.insert(Ptr: SuccBlock).second)
261 continue;
262
263 // Found a path to the exit node without a recursive call.
264 if (ExitID == SuccBlock->getBlockID())
265 return false;
266
267 // If the successor block contains a recursive call, end analysis there.
268 if (hasRecursiveCallInPath(FD, Block&: *SuccBlock)) {
269 foundRecursion = true;
270 continue;
271 }
272
273 WorkList.push_back(Elt: SuccBlock);
274 }
275 }
276 }
277 return foundRecursion;
278}
279
280static void checkRecursiveFunction(Sema &S, const FunctionDecl *FD,
281 const Stmt *Body, AnalysisDeclContext &AC) {
282 FD = FD->getCanonicalDecl();
283
284 // Only run on non-templated functions and non-templated members of
285 // templated classes.
286 if (FD->getTemplatedKind() != FunctionDecl::TK_NonTemplate &&
287 FD->getTemplatedKind() != FunctionDecl::TK_MemberSpecialization)
288 return;
289
290 CFG *cfg = AC.getCFG();
291 if (!cfg) return;
292
293 // If the exit block is unreachable, skip processing the function.
294 if (cfg->getExit().pred_empty())
295 return;
296
297 // Emit diagnostic if a recursive function call is detected for all paths.
298 if (checkForRecursiveFunctionCall(FD, cfg))
299 S.Diag(Body->getBeginLoc(), diag::warn_infinite_recursive_function);
300}
301
302//===----------------------------------------------------------------------===//
303// Check for throw in a non-throwing function.
304//===----------------------------------------------------------------------===//
305
306/// Determine whether an exception thrown by E, unwinding from ThrowBlock,
307/// can reach ExitBlock.
308static bool throwEscapes(Sema &S, const CXXThrowExpr *E, CFGBlock &ThrowBlock,
309 CFG *Body) {
310 SmallVector<CFGBlock *, 16> Stack;
311 llvm::BitVector Queued(Body->getNumBlockIDs());
312
313 Stack.push_back(Elt: &ThrowBlock);
314 Queued[ThrowBlock.getBlockID()] = true;
315
316 while (!Stack.empty()) {
317 CFGBlock &UnwindBlock = *Stack.pop_back_val();
318
319 for (auto &Succ : UnwindBlock.succs()) {
320 if (!Succ.isReachable() || Queued[Succ->getBlockID()])
321 continue;
322
323 if (Succ->getBlockID() == Body->getExit().getBlockID())
324 return true;
325
326 if (auto *Catch =
327 dyn_cast_or_null<CXXCatchStmt>(Val: Succ->getLabel())) {
328 QualType Caught = Catch->getCaughtType();
329 if (Caught.isNull() || // catch (...) catches everything
330 !E->getSubExpr() || // throw; is considered cuaght by any handler
331 S.handlerCanCatch(HandlerType: Caught, ExceptionType: E->getSubExpr()->getType()))
332 // Exception doesn't escape via this path.
333 break;
334 } else {
335 Stack.push_back(Elt: Succ);
336 Queued[Succ->getBlockID()] = true;
337 }
338 }
339 }
340
341 return false;
342}
343
344static void visitReachableThrows(
345 CFG *BodyCFG,
346 llvm::function_ref<void(const CXXThrowExpr *, CFGBlock &)> Visit) {
347 llvm::BitVector Reachable(BodyCFG->getNumBlockIDs());
348 clang::reachable_code::ScanReachableFromBlock(Start: &BodyCFG->getEntry(), Reachable);
349 for (CFGBlock *B : *BodyCFG) {
350 if (!Reachable[B->getBlockID()])
351 continue;
352 for (CFGElement &E : *B) {
353 std::optional<CFGStmt> S = E.getAs<CFGStmt>();
354 if (!S)
355 continue;
356 if (auto *Throw = dyn_cast<CXXThrowExpr>(Val: S->getStmt()))
357 Visit(Throw, *B);
358 }
359 }
360}
361
362static void EmitDiagForCXXThrowInNonThrowingFunc(Sema &S, SourceLocation OpLoc,
363 const FunctionDecl *FD) {
364 if (!S.getSourceManager().isInSystemHeader(Loc: OpLoc) &&
365 FD->getTypeSourceInfo()) {
366 S.Diag(OpLoc, diag::warn_throw_in_noexcept_func) << FD;
367 if (S.getLangOpts().CPlusPlus11 &&
368 (isa<CXXDestructorDecl>(Val: FD) ||
369 FD->getDeclName().getCXXOverloadedOperator() == OO_Delete ||
370 FD->getDeclName().getCXXOverloadedOperator() == OO_Array_Delete)) {
371 if (const auto *Ty = FD->getTypeSourceInfo()->getType()->
372 getAs<FunctionProtoType>())
373 S.Diag(FD->getLocation(), diag::note_throw_in_dtor)
374 << !isa<CXXDestructorDecl>(FD) << !Ty->hasExceptionSpec()
375 << FD->getExceptionSpecSourceRange();
376 } else
377 S.Diag(FD->getLocation(), diag::note_throw_in_function)
378 << FD->getExceptionSpecSourceRange();
379 }
380}
381
382static void checkThrowInNonThrowingFunc(Sema &S, const FunctionDecl *FD,
383 AnalysisDeclContext &AC) {
384 CFG *BodyCFG = AC.getCFG();
385 if (!BodyCFG)
386 return;
387 if (BodyCFG->getExit().pred_empty())
388 return;
389 visitReachableThrows(BodyCFG, Visit: [&](const CXXThrowExpr *Throw, CFGBlock &Block) {
390 if (throwEscapes(S, E: Throw, ThrowBlock&: Block, Body: BodyCFG))
391 EmitDiagForCXXThrowInNonThrowingFunc(S, OpLoc: Throw->getThrowLoc(), FD);
392 });
393}
394
395static bool isNoexcept(const FunctionDecl *FD) {
396 const auto *FPT = FD->getType()->castAs<FunctionProtoType>();
397 if (FPT->isNothrow() || FD->hasAttr<NoThrowAttr>())
398 return true;
399 return false;
400}
401
402//===----------------------------------------------------------------------===//
403// Check for missing return value.
404//===----------------------------------------------------------------------===//
405
406enum ControlFlowKind {
407 UnknownFallThrough,
408 NeverFallThrough,
409 MaybeFallThrough,
410 AlwaysFallThrough,
411 NeverFallThroughOrReturn
412};
413
414/// CheckFallThrough - Check that we don't fall off the end of a
415/// Statement that should return a value.
416///
417/// \returns AlwaysFallThrough iff we always fall off the end of the statement,
418/// MaybeFallThrough iff we might or might not fall off the end,
419/// NeverFallThroughOrReturn iff we never fall off the end of the statement or
420/// return. We assume NeverFallThrough iff we never fall off the end of the
421/// statement but we may return. We assume that functions not marked noreturn
422/// will return.
423static ControlFlowKind CheckFallThrough(AnalysisDeclContext &AC) {
424 CFG *cfg = AC.getCFG();
425 if (!cfg) return UnknownFallThrough;
426
427 // The CFG leaves in dead things, and we don't want the dead code paths to
428 // confuse us, so we mark all live things first.
429 llvm::BitVector live(cfg->getNumBlockIDs());
430 unsigned count = reachable_code::ScanReachableFromBlock(Start: &cfg->getEntry(),
431 Reachable&: live);
432
433 bool AddEHEdges = AC.getAddEHEdges();
434 if (!AddEHEdges && count != cfg->getNumBlockIDs())
435 // When there are things remaining dead, and we didn't add EH edges
436 // from CallExprs to the catch clauses, we have to go back and
437 // mark them as live.
438 for (const auto *B : *cfg) {
439 if (!live[B->getBlockID()]) {
440 if (B->pred_begin() == B->pred_end()) {
441 const Stmt *Term = B->getTerminatorStmt();
442 if (isa_and_nonnull<CXXTryStmt>(Val: Term))
443 // When not adding EH edges from calls, catch clauses
444 // can otherwise seem dead. Avoid noting them as dead.
445 count += reachable_code::ScanReachableFromBlock(Start: B, Reachable&: live);
446 continue;
447 }
448 }
449 }
450
451 // Now we know what is live, we check the live precessors of the exit block
452 // and look for fall through paths, being careful to ignore normal returns,
453 // and exceptional paths.
454 bool HasLiveReturn = false;
455 bool HasFakeEdge = false;
456 bool HasPlainEdge = false;
457 bool HasAbnormalEdge = false;
458
459 // Ignore default cases that aren't likely to be reachable because all
460 // enums in a switch(X) have explicit case statements.
461 CFGBlock::FilterOptions FO;
462 FO.IgnoreDefaultsWithCoveredEnums = 1;
463
464 for (CFGBlock::filtered_pred_iterator I =
465 cfg->getExit().filtered_pred_start_end(f: FO);
466 I.hasMore(); ++I) {
467 const CFGBlock &B = **I;
468 if (!live[B.getBlockID()])
469 continue;
470
471 // Skip blocks which contain an element marked as no-return. They don't
472 // represent actually viable edges into the exit block, so mark them as
473 // abnormal.
474 if (B.hasNoReturnElement()) {
475 HasAbnormalEdge = true;
476 continue;
477 }
478
479 // Destructors can appear after the 'return' in the CFG. This is
480 // normal. We need to look pass the destructors for the return
481 // statement (if it exists).
482 CFGBlock::const_reverse_iterator ri = B.rbegin(), re = B.rend();
483
484 for ( ; ri != re ; ++ri)
485 if (ri->getAs<CFGStmt>())
486 break;
487
488 // No more CFGElements in the block?
489 if (ri == re) {
490 const Stmt *Term = B.getTerminatorStmt();
491 if (Term && (isa<CXXTryStmt>(Val: Term) || isa<ObjCAtTryStmt>(Val: Term))) {
492 HasAbnormalEdge = true;
493 continue;
494 }
495 // A labeled empty statement, or the entry block...
496 HasPlainEdge = true;
497 continue;
498 }
499
500 CFGStmt CS = ri->castAs<CFGStmt>();
501 const Stmt *S = CS.getStmt();
502 if (isa<ReturnStmt>(Val: S) || isa<CoreturnStmt>(Val: S)) {
503 HasLiveReturn = true;
504 continue;
505 }
506 if (isa<ObjCAtThrowStmt>(Val: S)) {
507 HasFakeEdge = true;
508 continue;
509 }
510 if (isa<CXXThrowExpr>(Val: S)) {
511 HasFakeEdge = true;
512 continue;
513 }
514 if (isa<MSAsmStmt>(Val: S)) {
515 // TODO: Verify this is correct.
516 HasFakeEdge = true;
517 HasLiveReturn = true;
518 continue;
519 }
520 if (isa<CXXTryStmt>(Val: S)) {
521 HasAbnormalEdge = true;
522 continue;
523 }
524 if (!llvm::is_contained(Range: B.succs(), Element: &cfg->getExit())) {
525 HasAbnormalEdge = true;
526 continue;
527 }
528
529 HasPlainEdge = true;
530 }
531 if (!HasPlainEdge) {
532 if (HasLiveReturn)
533 return NeverFallThrough;
534 return NeverFallThroughOrReturn;
535 }
536 if (HasAbnormalEdge || HasFakeEdge || HasLiveReturn)
537 return MaybeFallThrough;
538 // This says AlwaysFallThrough for calls to functions that are not marked
539 // noreturn, that don't return. If people would like this warning to be more
540 // accurate, such functions should be marked as noreturn.
541 return AlwaysFallThrough;
542}
543
544namespace {
545
546struct CheckFallThroughDiagnostics {
547 unsigned diag_FallThrough_HasNoReturn = 0;
548 unsigned diag_FallThrough_ReturnsNonVoid = 0;
549 unsigned diag_NeverFallThroughOrReturn = 0;
550 unsigned FunKind; // TODO: use diag::FalloffFunctionKind
551 SourceLocation FuncLoc;
552
553 static CheckFallThroughDiagnostics MakeForFunction(Sema &S,
554 const Decl *Func) {
555 CheckFallThroughDiagnostics D;
556 D.FuncLoc = Func->getLocation();
557 D.diag_FallThrough_HasNoReturn = diag::warn_noreturn_has_return_expr;
558 D.diag_FallThrough_ReturnsNonVoid = diag::warn_falloff_nonvoid;
559
560 // Don't suggest that virtual functions be marked "noreturn", since they
561 // might be overridden by non-noreturn functions.
562 bool isVirtualMethod = false;
563 if (const CXXMethodDecl *Method = dyn_cast<CXXMethodDecl>(Val: Func))
564 isVirtualMethod = Method->isVirtual();
565
566 // Don't suggest that template instantiations be marked "noreturn"
567 bool isTemplateInstantiation = false;
568 if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(Val: Func)) {
569 isTemplateInstantiation = Function->isTemplateInstantiation();
570 if (!S.getLangOpts().CPlusPlus && !S.getLangOpts().C99 &&
571 Function->isMain()) {
572 D.diag_FallThrough_ReturnsNonVoid = diag::ext_main_no_return;
573 }
574 }
575
576 if (!isVirtualMethod && !isTemplateInstantiation)
577 D.diag_NeverFallThroughOrReturn = diag::warn_suggest_noreturn_function;
578
579 D.FunKind = diag::FalloffFunctionKind::Function;
580 return D;
581 }
582
583 static CheckFallThroughDiagnostics MakeForCoroutine(const Decl *Func) {
584 CheckFallThroughDiagnostics D;
585 D.FuncLoc = Func->getLocation();
586 D.diag_FallThrough_ReturnsNonVoid = diag::warn_falloff_nonvoid;
587 D.FunKind = diag::FalloffFunctionKind::Coroutine;
588 return D;
589 }
590
591 static CheckFallThroughDiagnostics MakeForBlock() {
592 CheckFallThroughDiagnostics D;
593 D.diag_FallThrough_HasNoReturn = diag::err_noreturn_has_return_expr;
594 D.diag_FallThrough_ReturnsNonVoid = diag::err_falloff_nonvoid;
595 D.FunKind = diag::FalloffFunctionKind::Block;
596 return D;
597 }
598
599 static CheckFallThroughDiagnostics MakeForLambda() {
600 CheckFallThroughDiagnostics D;
601 D.diag_FallThrough_HasNoReturn = diag::err_noreturn_has_return_expr;
602 D.diag_FallThrough_ReturnsNonVoid = diag::warn_falloff_nonvoid;
603 D.FunKind = diag::FalloffFunctionKind::Lambda;
604 return D;
605 }
606
607 bool checkDiagnostics(DiagnosticsEngine &D, bool ReturnsVoid,
608 bool HasNoReturn) const {
609 if (FunKind == diag::FalloffFunctionKind::Function) {
610 return (ReturnsVoid ||
611 D.isIgnored(diag::warn_falloff_nonvoid, FuncLoc)) &&
612 (!HasNoReturn ||
613 D.isIgnored(diag::warn_noreturn_has_return_expr, FuncLoc)) &&
614 (!ReturnsVoid ||
615 D.isIgnored(diag::warn_suggest_noreturn_block, FuncLoc));
616 }
617 if (FunKind == diag::FalloffFunctionKind::Coroutine) {
618 return (ReturnsVoid ||
619 D.isIgnored(diag::warn_falloff_nonvoid, FuncLoc)) &&
620 (!HasNoReturn);
621 }
622 // For blocks / lambdas.
623 return ReturnsVoid && !HasNoReturn;
624 }
625};
626
627} // anonymous namespace
628
629/// CheckFallThroughForBody - Check that we don't fall off the end of a
630/// function that should return a value. Check that we don't fall off the end
631/// of a noreturn function. We assume that functions and blocks not marked
632/// noreturn will return.
633static void CheckFallThroughForBody(Sema &S, const Decl *D, const Stmt *Body,
634 QualType BlockType,
635 const CheckFallThroughDiagnostics &CD,
636 AnalysisDeclContext &AC) {
637
638 bool ReturnsVoid = false;
639 bool HasNoReturn = false;
640
641 if (const auto *FD = dyn_cast<FunctionDecl>(Val: D)) {
642 if (const auto *CBody = dyn_cast<CoroutineBodyStmt>(Val: Body))
643 ReturnsVoid = CBody->getFallthroughHandler() != nullptr;
644 else
645 ReturnsVoid = FD->getReturnType()->isVoidType();
646 HasNoReturn = FD->isNoReturn();
647 }
648 else if (const auto *MD = dyn_cast<ObjCMethodDecl>(Val: D)) {
649 ReturnsVoid = MD->getReturnType()->isVoidType();
650 HasNoReturn = MD->hasAttr<NoReturnAttr>();
651 }
652 else if (isa<BlockDecl>(Val: D)) {
653 if (const FunctionType *FT =
654 BlockType->getPointeeType()->getAs<FunctionType>()) {
655 if (FT->getReturnType()->isVoidType())
656 ReturnsVoid = true;
657 if (FT->getNoReturnAttr())
658 HasNoReturn = true;
659 }
660 }
661
662 DiagnosticsEngine &Diags = S.getDiagnostics();
663
664 // Short circuit for compilation speed.
665 if (CD.checkDiagnostics(D&: Diags, ReturnsVoid, HasNoReturn))
666 return;
667 SourceLocation LBrace = Body->getBeginLoc(), RBrace = Body->getEndLoc();
668
669 // cpu_dispatch functions permit empty function bodies for ICC compatibility.
670 if (D->getAsFunction() && D->getAsFunction()->isCPUDispatchMultiVersion())
671 return;
672
673 // Either in a function body compound statement, or a function-try-block.
674 switch (int FallThroughType = CheckFallThrough(AC)) {
675 case UnknownFallThrough:
676 break;
677
678 case MaybeFallThrough:
679 case AlwaysFallThrough:
680 if (HasNoReturn) {
681 if (CD.diag_FallThrough_HasNoReturn)
682 S.Diag(RBrace, CD.diag_FallThrough_HasNoReturn) << CD.FunKind;
683 } else if (!ReturnsVoid && CD.diag_FallThrough_ReturnsNonVoid) {
684 bool NotInAllControlPaths = FallThroughType == MaybeFallThrough;
685 S.Diag(RBrace, CD.diag_FallThrough_ReturnsNonVoid)
686 << CD.FunKind << NotInAllControlPaths;
687 }
688 break;
689 case NeverFallThroughOrReturn:
690 if (ReturnsVoid && !HasNoReturn && CD.diag_NeverFallThroughOrReturn) {
691 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(Val: D)) {
692 S.Diag(LBrace, CD.diag_NeverFallThroughOrReturn) << 0 << FD;
693 } else if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(Val: D)) {
694 S.Diag(LBrace, CD.diag_NeverFallThroughOrReturn) << 1 << MD;
695 } else {
696 S.Diag(LBrace, CD.diag_NeverFallThroughOrReturn);
697 }
698 }
699 break;
700 case NeverFallThrough:
701 break;
702 }
703}
704
705//===----------------------------------------------------------------------===//
706// -Wuninitialized
707//===----------------------------------------------------------------------===//
708
709namespace {
710/// ContainsReference - A visitor class to search for references to
711/// a particular declaration (the needle) within any evaluated component of an
712/// expression (recursively).
713class ContainsReference : public ConstEvaluatedExprVisitor<ContainsReference> {
714 bool FoundReference;
715 const DeclRefExpr *Needle;
716
717public:
718 typedef ConstEvaluatedExprVisitor<ContainsReference> Inherited;
719
720 ContainsReference(ASTContext &Context, const DeclRefExpr *Needle)
721 : Inherited(Context), FoundReference(false), Needle(Needle) {}
722
723 void VisitExpr(const Expr *E) {
724 // Stop evaluating if we already have a reference.
725 if (FoundReference)
726 return;
727
728 Inherited::VisitExpr(E);
729 }
730
731 void VisitDeclRefExpr(const DeclRefExpr *E) {
732 if (E == Needle)
733 FoundReference = true;
734 else
735 Inherited::VisitDeclRefExpr(E);
736 }
737
738 bool doesContainReference() const { return FoundReference; }
739};
740} // anonymous namespace
741
742static bool SuggestInitializationFixit(Sema &S, const VarDecl *VD) {
743 QualType VariableTy = VD->getType().getCanonicalType();
744 if (VariableTy->isBlockPointerType() &&
745 !VD->hasAttr<BlocksAttr>()) {
746 S.Diag(VD->getLocation(), diag::note_block_var_fixit_add_initialization)
747 << VD->getDeclName()
748 << FixItHint::CreateInsertion(VD->getLocation(), "__block ");
749 return true;
750 }
751
752 // Don't issue a fixit if there is already an initializer.
753 if (VD->getInit())
754 return false;
755
756 // Don't suggest a fixit inside macros.
757 if (VD->getEndLoc().isMacroID())
758 return false;
759
760 SourceLocation Loc = S.getLocForEndOfToken(Loc: VD->getEndLoc());
761
762 // Suggest possible initialization (if any).
763 std::string Init = S.getFixItZeroInitializerForType(T: VariableTy, Loc);
764 if (Init.empty())
765 return false;
766
767 S.Diag(Loc, diag::note_var_fixit_add_initialization) << VD->getDeclName()
768 << FixItHint::CreateInsertion(Loc, Init);
769 return true;
770}
771
772/// Create a fixit to remove an if-like statement, on the assumption that its
773/// condition is CondVal.
774static void CreateIfFixit(Sema &S, const Stmt *If, const Stmt *Then,
775 const Stmt *Else, bool CondVal,
776 FixItHint &Fixit1, FixItHint &Fixit2) {
777 if (CondVal) {
778 // If condition is always true, remove all but the 'then'.
779 Fixit1 = FixItHint::CreateRemoval(
780 RemoveRange: CharSourceRange::getCharRange(B: If->getBeginLoc(), E: Then->getBeginLoc()));
781 if (Else) {
782 SourceLocation ElseKwLoc = S.getLocForEndOfToken(Loc: Then->getEndLoc());
783 Fixit2 =
784 FixItHint::CreateRemoval(RemoveRange: SourceRange(ElseKwLoc, Else->getEndLoc()));
785 }
786 } else {
787 // If condition is always false, remove all but the 'else'.
788 if (Else)
789 Fixit1 = FixItHint::CreateRemoval(RemoveRange: CharSourceRange::getCharRange(
790 B: If->getBeginLoc(), E: Else->getBeginLoc()));
791 else
792 Fixit1 = FixItHint::CreateRemoval(RemoveRange: If->getSourceRange());
793 }
794}
795
796/// DiagUninitUse -- Helper function to produce a diagnostic for an
797/// uninitialized use of a variable.
798static void DiagUninitUse(Sema &S, const VarDecl *VD, const UninitUse &Use,
799 bool IsCapturedByBlock) {
800 bool Diagnosed = false;
801
802 switch (Use.getKind()) {
803 case UninitUse::Always:
804 S.Diag(Use.getUser()->getBeginLoc(), diag::warn_uninit_var)
805 << VD->getDeclName() << IsCapturedByBlock
806 << Use.getUser()->getSourceRange();
807 return;
808
809 case UninitUse::AfterDecl:
810 case UninitUse::AfterCall:
811 S.Diag(VD->getLocation(), diag::warn_sometimes_uninit_var)
812 << VD->getDeclName() << IsCapturedByBlock
813 << (Use.getKind() == UninitUse::AfterDecl ? 4 : 5)
814 << const_cast<DeclContext*>(VD->getLexicalDeclContext())
815 << VD->getSourceRange();
816 S.Diag(Use.getUser()->getBeginLoc(), diag::note_uninit_var_use)
817 << IsCapturedByBlock << Use.getUser()->getSourceRange();
818 return;
819
820 case UninitUse::Maybe:
821 case UninitUse::Sometimes:
822 // Carry on to report sometimes-uninitialized branches, if possible,
823 // or a 'may be used uninitialized' diagnostic otherwise.
824 break;
825 }
826
827 // Diagnose each branch which leads to a sometimes-uninitialized use.
828 for (UninitUse::branch_iterator I = Use.branch_begin(), E = Use.branch_end();
829 I != E; ++I) {
830 assert(Use.getKind() == UninitUse::Sometimes);
831
832 const Expr *User = Use.getUser();
833 const Stmt *Term = I->Terminator;
834
835 // Information used when building the diagnostic.
836 unsigned DiagKind;
837 StringRef Str;
838 SourceRange Range;
839
840 // FixIts to suppress the diagnostic by removing the dead condition.
841 // For all binary terminators, branch 0 is taken if the condition is true,
842 // and branch 1 is taken if the condition is false.
843 int RemoveDiagKind = -1;
844 const char *FixitStr =
845 S.getLangOpts().CPlusPlus ? (I->Output ? "true" : "false")
846 : (I->Output ? "1" : "0");
847 FixItHint Fixit1, Fixit2;
848
849 switch (Term ? Term->getStmtClass() : Stmt::DeclStmtClass) {
850 default:
851 // Don't know how to report this. Just fall back to 'may be used
852 // uninitialized'. FIXME: Can this happen?
853 continue;
854
855 // "condition is true / condition is false".
856 case Stmt::IfStmtClass: {
857 const IfStmt *IS = cast<IfStmt>(Val: Term);
858 DiagKind = 0;
859 Str = "if";
860 Range = IS->getCond()->getSourceRange();
861 RemoveDiagKind = 0;
862 CreateIfFixit(S, IS, IS->getThen(), IS->getElse(),
863 I->Output, Fixit1, Fixit2);
864 break;
865 }
866 case Stmt::ConditionalOperatorClass: {
867 const ConditionalOperator *CO = cast<ConditionalOperator>(Val: Term);
868 DiagKind = 0;
869 Str = "?:";
870 Range = CO->getCond()->getSourceRange();
871 RemoveDiagKind = 0;
872 CreateIfFixit(S, CO, CO->getTrueExpr(), CO->getFalseExpr(),
873 I->Output, Fixit1, Fixit2);
874 break;
875 }
876 case Stmt::BinaryOperatorClass: {
877 const BinaryOperator *BO = cast<BinaryOperator>(Val: Term);
878 if (!BO->isLogicalOp())
879 continue;
880 DiagKind = 0;
881 Str = BO->getOpcodeStr();
882 Range = BO->getLHS()->getSourceRange();
883 RemoveDiagKind = 0;
884 if ((BO->getOpcode() == BO_LAnd && I->Output) ||
885 (BO->getOpcode() == BO_LOr && !I->Output))
886 // true && y -> y, false || y -> y.
887 Fixit1 = FixItHint::CreateRemoval(
888 RemoveRange: SourceRange(BO->getBeginLoc(), BO->getOperatorLoc()));
889 else
890 // false && y -> false, true || y -> true.
891 Fixit1 = FixItHint::CreateReplacement(BO->getSourceRange(), FixitStr);
892 break;
893 }
894
895 // "loop is entered / loop is exited".
896 case Stmt::WhileStmtClass:
897 DiagKind = 1;
898 Str = "while";
899 Range = cast<WhileStmt>(Val: Term)->getCond()->getSourceRange();
900 RemoveDiagKind = 1;
901 Fixit1 = FixItHint::CreateReplacement(RemoveRange: Range, Code: FixitStr);
902 break;
903 case Stmt::ForStmtClass:
904 DiagKind = 1;
905 Str = "for";
906 Range = cast<ForStmt>(Val: Term)->getCond()->getSourceRange();
907 RemoveDiagKind = 1;
908 if (I->Output)
909 Fixit1 = FixItHint::CreateRemoval(RemoveRange: Range);
910 else
911 Fixit1 = FixItHint::CreateReplacement(RemoveRange: Range, Code: FixitStr);
912 break;
913 case Stmt::CXXForRangeStmtClass:
914 if (I->Output == 1) {
915 // The use occurs if a range-based for loop's body never executes.
916 // That may be impossible, and there's no syntactic fix for this,
917 // so treat it as a 'may be uninitialized' case.
918 continue;
919 }
920 DiagKind = 1;
921 Str = "for";
922 Range = cast<CXXForRangeStmt>(Val: Term)->getRangeInit()->getSourceRange();
923 break;
924
925 // "condition is true / loop is exited".
926 case Stmt::DoStmtClass:
927 DiagKind = 2;
928 Str = "do";
929 Range = cast<DoStmt>(Val: Term)->getCond()->getSourceRange();
930 RemoveDiagKind = 1;
931 Fixit1 = FixItHint::CreateReplacement(RemoveRange: Range, Code: FixitStr);
932 break;
933
934 // "switch case is taken".
935 case Stmt::CaseStmtClass:
936 DiagKind = 3;
937 Str = "case";
938 Range = cast<CaseStmt>(Val: Term)->getLHS()->getSourceRange();
939 break;
940 case Stmt::DefaultStmtClass:
941 DiagKind = 3;
942 Str = "default";
943 Range = cast<DefaultStmt>(Val: Term)->getDefaultLoc();
944 break;
945 }
946
947 S.Diag(Range.getBegin(), diag::warn_sometimes_uninit_var)
948 << VD->getDeclName() << IsCapturedByBlock << DiagKind
949 << Str << I->Output << Range;
950 S.Diag(User->getBeginLoc(), diag::note_uninit_var_use)
951 << IsCapturedByBlock << User->getSourceRange();
952 if (RemoveDiagKind != -1)
953 S.Diag(Fixit1.RemoveRange.getBegin(), diag::note_uninit_fixit_remove_cond)
954 << RemoveDiagKind << Str << I->Output << Fixit1 << Fixit2;
955
956 Diagnosed = true;
957 }
958
959 if (!Diagnosed)
960 S.Diag(Use.getUser()->getBeginLoc(), diag::warn_maybe_uninit_var)
961 << VD->getDeclName() << IsCapturedByBlock
962 << Use.getUser()->getSourceRange();
963}
964
965/// Diagnose uninitialized const reference usages.
966static bool DiagnoseUninitializedConstRefUse(Sema &S, const VarDecl *VD,
967 const UninitUse &Use) {
968 S.Diag(Use.getUser()->getBeginLoc(), diag::warn_uninit_const_reference)
969 << VD->getDeclName() << Use.getUser()->getSourceRange();
970 return true;
971}
972
973/// DiagnoseUninitializedUse -- Helper function for diagnosing uses of an
974/// uninitialized variable. This manages the different forms of diagnostic
975/// emitted for particular types of uses. Returns true if the use was diagnosed
976/// as a warning. If a particular use is one we omit warnings for, returns
977/// false.
978static bool DiagnoseUninitializedUse(Sema &S, const VarDecl *VD,
979 const UninitUse &Use,
980 bool alwaysReportSelfInit = false) {
981 if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(Val: Use.getUser())) {
982 // Inspect the initializer of the variable declaration which is
983 // being referenced prior to its initialization. We emit
984 // specialized diagnostics for self-initialization, and we
985 // specifically avoid warning about self references which take the
986 // form of:
987 //
988 // int x = x;
989 //
990 // This is used to indicate to GCC that 'x' is intentionally left
991 // uninitialized. Proven code paths which access 'x' in
992 // an uninitialized state after this will still warn.
993 if (const Expr *Initializer = VD->getInit()) {
994 if (!alwaysReportSelfInit && DRE == Initializer->IgnoreParenImpCasts())
995 return false;
996
997 ContainsReference CR(S.Context, DRE);
998 CR.Visit(Initializer);
999 if (CR.doesContainReference()) {
1000 S.Diag(DRE->getBeginLoc(), diag::warn_uninit_self_reference_in_init)
1001 << VD->getDeclName() << VD->getLocation() << DRE->getSourceRange();
1002 return true;
1003 }
1004 }
1005
1006 DiagUninitUse(S, VD, Use, IsCapturedByBlock: false);
1007 } else {
1008 const BlockExpr *BE = cast<BlockExpr>(Val: Use.getUser());
1009 if (VD->getType()->isBlockPointerType() && !VD->hasAttr<BlocksAttr>())
1010 S.Diag(BE->getBeginLoc(),
1011 diag::warn_uninit_byref_blockvar_captured_by_block)
1012 << VD->getDeclName()
1013 << VD->getType().getQualifiers().hasObjCLifetime();
1014 else
1015 DiagUninitUse(S, VD, Use, IsCapturedByBlock: true);
1016 }
1017
1018 // Report where the variable was declared when the use wasn't within
1019 // the initializer of that declaration & we didn't already suggest
1020 // an initialization fixit.
1021 if (!SuggestInitializationFixit(S, VD))
1022 S.Diag(VD->getBeginLoc(), diag::note_var_declared_here)
1023 << VD->getDeclName();
1024
1025 return true;
1026}
1027
1028namespace {
1029class FallthroughMapper : public DynamicRecursiveASTVisitor {
1030public:
1031 FallthroughMapper(Sema &S) : FoundSwitchStatements(false), S(S) {
1032 ShouldWalkTypesOfTypeLocs = false;
1033 }
1034
1035 bool foundSwitchStatements() const { return FoundSwitchStatements; }
1036
1037 void markFallthroughVisited(const AttributedStmt *Stmt) {
1038 bool Found = FallthroughStmts.erase(Ptr: Stmt);
1039 assert(Found);
1040 (void)Found;
1041 }
1042
1043 typedef llvm::SmallPtrSet<const AttributedStmt *, 8> AttrStmts;
1044
1045 const AttrStmts &getFallthroughStmts() const { return FallthroughStmts; }
1046
1047 void fillReachableBlocks(CFG *Cfg) {
1048 assert(ReachableBlocks.empty() && "ReachableBlocks already filled");
1049 std::deque<const CFGBlock *> BlockQueue;
1050
1051 ReachableBlocks.insert(Ptr: &Cfg->getEntry());
1052 BlockQueue.push_back(x: &Cfg->getEntry());
1053 // Mark all case blocks reachable to avoid problems with switching on
1054 // constants, covered enums, etc.
1055 // These blocks can contain fall-through annotations, and we don't want to
1056 // issue a warn_fallthrough_attr_unreachable for them.
1057 for (const auto *B : *Cfg) {
1058 const Stmt *L = B->getLabel();
1059 if (isa_and_nonnull<SwitchCase>(Val: L) && ReachableBlocks.insert(Ptr: B).second)
1060 BlockQueue.push_back(x: B);
1061 }
1062
1063 while (!BlockQueue.empty()) {
1064 const CFGBlock *P = BlockQueue.front();
1065 BlockQueue.pop_front();
1066 for (const CFGBlock *B : P->succs()) {
1067 if (B && ReachableBlocks.insert(Ptr: B).second)
1068 BlockQueue.push_back(x: B);
1069 }
1070 }
1071 }
1072
1073 bool checkFallThroughIntoBlock(const CFGBlock &B, int &AnnotatedCnt,
1074 bool IsTemplateInstantiation) {
1075 assert(!ReachableBlocks.empty() && "ReachableBlocks empty");
1076
1077 int UnannotatedCnt = 0;
1078 AnnotatedCnt = 0;
1079
1080 std::deque<const CFGBlock *> BlockQueue(B.pred_begin(), B.pred_end());
1081 while (!BlockQueue.empty()) {
1082 const CFGBlock *P = BlockQueue.front();
1083 BlockQueue.pop_front();
1084 if (!P)
1085 continue;
1086
1087 const Stmt *Term = P->getTerminatorStmt();
1088 if (isa_and_nonnull<SwitchStmt>(Val: Term))
1089 continue; // Switch statement, good.
1090
1091 const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(Val: P->getLabel());
1092 if (SW && SW->getSubStmt() == B.getLabel() && P->begin() == P->end())
1093 continue; // Previous case label has no statements, good.
1094
1095 const LabelStmt *L = dyn_cast_or_null<LabelStmt>(Val: P->getLabel());
1096 if (L && L->getSubStmt() == B.getLabel() && P->begin() == P->end())
1097 continue; // Case label is preceded with a normal label, good.
1098
1099 if (!ReachableBlocks.count(Ptr: P)) {
1100 for (const CFGElement &Elem : llvm::reverse(C: *P)) {
1101 if (std::optional<CFGStmt> CS = Elem.getAs<CFGStmt>()) {
1102 if (const AttributedStmt *AS = asFallThroughAttr(S: CS->getStmt())) {
1103 // Don't issue a warning for an unreachable fallthrough
1104 // attribute in template instantiations as it may not be
1105 // unreachable in all instantiations of the template.
1106 if (!IsTemplateInstantiation)
1107 S.Diag(AS->getBeginLoc(),
1108 diag::warn_unreachable_fallthrough_attr);
1109 markFallthroughVisited(Stmt: AS);
1110 ++AnnotatedCnt;
1111 break;
1112 }
1113 // Don't care about other unreachable statements.
1114 }
1115 }
1116 // If there are no unreachable statements, this may be a special
1117 // case in CFG:
1118 // case X: {
1119 // A a; // A has a destructor.
1120 // break;
1121 // }
1122 // // <<<< This place is represented by a 'hanging' CFG block.
1123 // case Y:
1124 continue;
1125 }
1126
1127 const Stmt *LastStmt = getLastStmt(B: *P);
1128 if (const AttributedStmt *AS = asFallThroughAttr(S: LastStmt)) {
1129 markFallthroughVisited(Stmt: AS);
1130 ++AnnotatedCnt;
1131 continue; // Fallthrough annotation, good.
1132 }
1133
1134 if (!LastStmt) { // This block contains no executable statements.
1135 // Traverse its predecessors.
1136 std::copy(first: P->pred_begin(), last: P->pred_end(),
1137 result: std::back_inserter(x&: BlockQueue));
1138 continue;
1139 }
1140
1141 ++UnannotatedCnt;
1142 }
1143 return !!UnannotatedCnt;
1144 }
1145
1146 bool VisitAttributedStmt(AttributedStmt *S) override {
1147 if (asFallThroughAttr(S))
1148 FallthroughStmts.insert(Ptr: S);
1149 return true;
1150 }
1151
1152 bool VisitSwitchStmt(SwitchStmt *S) override {
1153 FoundSwitchStatements = true;
1154 return true;
1155 }
1156
1157 // We don't want to traverse local type declarations. We analyze their
1158 // methods separately.
1159 bool TraverseDecl(Decl *D) override { return true; }
1160
1161 // We analyze lambda bodies separately. Skip them here.
1162 bool TraverseLambdaExpr(LambdaExpr *LE) override {
1163 // Traverse the captures, but not the body.
1164 for (const auto C : zip(t: LE->captures(), u: LE->capture_inits()))
1165 TraverseLambdaCapture(LE, &std::get<0>(t: C), std::get<1>(t: C));
1166 return true;
1167 }
1168
1169 private:
1170
1171 static const AttributedStmt *asFallThroughAttr(const Stmt *S) {
1172 if (const AttributedStmt *AS = dyn_cast_or_null<AttributedStmt>(Val: S)) {
1173 if (hasSpecificAttr<FallThroughAttr>(AS->getAttrs()))
1174 return AS;
1175 }
1176 return nullptr;
1177 }
1178
1179 static const Stmt *getLastStmt(const CFGBlock &B) {
1180 if (const Stmt *Term = B.getTerminatorStmt())
1181 return Term;
1182 for (const CFGElement &Elem : llvm::reverse(C: B))
1183 if (std::optional<CFGStmt> CS = Elem.getAs<CFGStmt>())
1184 return CS->getStmt();
1185 // Workaround to detect a statement thrown out by CFGBuilder:
1186 // case X: {} case Y:
1187 // case X: ; case Y:
1188 if (const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(Val: B.getLabel()))
1189 if (!isa<SwitchCase>(Val: SW->getSubStmt()))
1190 return SW->getSubStmt();
1191
1192 return nullptr;
1193 }
1194
1195 bool FoundSwitchStatements;
1196 AttrStmts FallthroughStmts;
1197 Sema &S;
1198 llvm::SmallPtrSet<const CFGBlock *, 16> ReachableBlocks;
1199};
1200} // anonymous namespace
1201
1202static StringRef getFallthroughAttrSpelling(Preprocessor &PP,
1203 SourceLocation Loc) {
1204 TokenValue FallthroughTokens[] = {
1205 tok::l_square, tok::l_square,
1206 PP.getIdentifierInfo(Name: "fallthrough"),
1207 tok::r_square, tok::r_square
1208 };
1209
1210 TokenValue ClangFallthroughTokens[] = {
1211 tok::l_square, tok::l_square, PP.getIdentifierInfo(Name: "clang"),
1212 tok::coloncolon, PP.getIdentifierInfo(Name: "fallthrough"),
1213 tok::r_square, tok::r_square
1214 };
1215
1216 bool PreferClangAttr = !PP.getLangOpts().CPlusPlus17 && !PP.getLangOpts().C23;
1217
1218 StringRef MacroName;
1219 if (PreferClangAttr)
1220 MacroName = PP.getLastMacroWithSpelling(Loc, Tokens: ClangFallthroughTokens);
1221 if (MacroName.empty())
1222 MacroName = PP.getLastMacroWithSpelling(Loc, Tokens: FallthroughTokens);
1223 if (MacroName.empty() && !PreferClangAttr)
1224 MacroName = PP.getLastMacroWithSpelling(Loc, Tokens: ClangFallthroughTokens);
1225 if (MacroName.empty()) {
1226 if (!PreferClangAttr)
1227 MacroName = "[[fallthrough]]";
1228 else if (PP.getLangOpts().CPlusPlus)
1229 MacroName = "[[clang::fallthrough]]";
1230 else
1231 MacroName = "__attribute__((fallthrough))";
1232 }
1233 return MacroName;
1234}
1235
1236static void DiagnoseSwitchLabelsFallthrough(Sema &S, AnalysisDeclContext &AC,
1237 bool PerFunction) {
1238 FallthroughMapper FM(S);
1239 FM.TraverseStmt(AC.getBody());
1240
1241 if (!FM.foundSwitchStatements())
1242 return;
1243
1244 if (PerFunction && FM.getFallthroughStmts().empty())
1245 return;
1246
1247 CFG *Cfg = AC.getCFG();
1248
1249 if (!Cfg)
1250 return;
1251
1252 FM.fillReachableBlocks(Cfg);
1253
1254 for (const CFGBlock *B : llvm::reverse(C&: *Cfg)) {
1255 const Stmt *Label = B->getLabel();
1256
1257 if (!isa_and_nonnull<SwitchCase>(Val: Label))
1258 continue;
1259
1260 int AnnotatedCnt;
1261
1262 bool IsTemplateInstantiation = false;
1263 if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(Val: AC.getDecl()))
1264 IsTemplateInstantiation = Function->isTemplateInstantiation();
1265 if (!FM.checkFallThroughIntoBlock(B: *B, AnnotatedCnt,
1266 IsTemplateInstantiation))
1267 continue;
1268
1269 S.Diag(Label->getBeginLoc(),
1270 PerFunction ? diag::warn_unannotated_fallthrough_per_function
1271 : diag::warn_unannotated_fallthrough);
1272
1273 if (!AnnotatedCnt) {
1274 SourceLocation L = Label->getBeginLoc();
1275 if (L.isMacroID())
1276 continue;
1277
1278 const Stmt *Term = B->getTerminatorStmt();
1279 // Skip empty cases.
1280 while (B->empty() && !Term && B->succ_size() == 1) {
1281 B = *B->succ_begin();
1282 Term = B->getTerminatorStmt();
1283 }
1284 if (!(B->empty() && isa_and_nonnull<BreakStmt>(Val: Term))) {
1285 Preprocessor &PP = S.getPreprocessor();
1286 StringRef AnnotationSpelling = getFallthroughAttrSpelling(PP, Loc: L);
1287 SmallString<64> TextToInsert(AnnotationSpelling);
1288 TextToInsert += "; ";
1289 S.Diag(L, diag::note_insert_fallthrough_fixit)
1290 << AnnotationSpelling
1291 << FixItHint::CreateInsertion(L, TextToInsert);
1292 }
1293 S.Diag(L, diag::note_insert_break_fixit)
1294 << FixItHint::CreateInsertion(L, "break; ");
1295 }
1296 }
1297
1298 for (const auto *F : FM.getFallthroughStmts())
1299 S.Diag(F->getBeginLoc(), diag::err_fallthrough_attr_invalid_placement);
1300}
1301
1302static bool isInLoop(const ASTContext &Ctx, const ParentMap &PM,
1303 const Stmt *S) {
1304 assert(S);
1305
1306 do {
1307 switch (S->getStmtClass()) {
1308 case Stmt::ForStmtClass:
1309 case Stmt::WhileStmtClass:
1310 case Stmt::CXXForRangeStmtClass:
1311 case Stmt::ObjCForCollectionStmtClass:
1312 return true;
1313 case Stmt::DoStmtClass: {
1314 Expr::EvalResult Result;
1315 if (!cast<DoStmt>(Val: S)->getCond()->EvaluateAsInt(Result, Ctx))
1316 return true;
1317 return Result.Val.getInt().getBoolValue();
1318 }
1319 default:
1320 break;
1321 }
1322 } while ((S = PM.getParent(S)));
1323
1324 return false;
1325}
1326
1327static void diagnoseRepeatedUseOfWeak(Sema &S,
1328 const sema::FunctionScopeInfo *CurFn,
1329 const Decl *D,
1330 const ParentMap &PM) {
1331 typedef sema::FunctionScopeInfo::WeakObjectProfileTy WeakObjectProfileTy;
1332 typedef sema::FunctionScopeInfo::WeakObjectUseMap WeakObjectUseMap;
1333 typedef sema::FunctionScopeInfo::WeakUseVector WeakUseVector;
1334 typedef std::pair<const Stmt *, WeakObjectUseMap::const_iterator>
1335 StmtUsesPair;
1336
1337 ASTContext &Ctx = S.getASTContext();
1338
1339 const WeakObjectUseMap &WeakMap = CurFn->getWeakObjectUses();
1340
1341 // Extract all weak objects that are referenced more than once.
1342 SmallVector<StmtUsesPair, 8> UsesByStmt;
1343 for (WeakObjectUseMap::const_iterator I = WeakMap.begin(), E = WeakMap.end();
1344 I != E; ++I) {
1345 const WeakUseVector &Uses = I->second;
1346
1347 // Find the first read of the weak object.
1348 WeakUseVector::const_iterator UI = Uses.begin(), UE = Uses.end();
1349 for ( ; UI != UE; ++UI) {
1350 if (UI->isUnsafe())
1351 break;
1352 }
1353
1354 // If there were only writes to this object, don't warn.
1355 if (UI == UE)
1356 continue;
1357
1358 // If there was only one read, followed by any number of writes, and the
1359 // read is not within a loop, don't warn. Additionally, don't warn in a
1360 // loop if the base object is a local variable -- local variables are often
1361 // changed in loops.
1362 if (UI == Uses.begin()) {
1363 WeakUseVector::const_iterator UI2 = UI;
1364 for (++UI2; UI2 != UE; ++UI2)
1365 if (UI2->isUnsafe())
1366 break;
1367
1368 if (UI2 == UE) {
1369 if (!isInLoop(Ctx, PM, UI->getUseExpr()))
1370 continue;
1371
1372 const WeakObjectProfileTy &Profile = I->first;
1373 if (!Profile.isExactProfile())
1374 continue;
1375
1376 const NamedDecl *Base = Profile.getBase();
1377 if (!Base)
1378 Base = Profile.getProperty();
1379 assert(Base && "A profile always has a base or property.");
1380
1381 if (const VarDecl *BaseVar = dyn_cast<VarDecl>(Val: Base))
1382 if (BaseVar->hasLocalStorage() && !isa<ParmVarDecl>(Val: Base))
1383 continue;
1384 }
1385 }
1386
1387 UsesByStmt.push_back(Elt: StmtUsesPair(UI->getUseExpr(), I));
1388 }
1389
1390 if (UsesByStmt.empty())
1391 return;
1392
1393 // Sort by first use so that we emit the warnings in a deterministic order.
1394 SourceManager &SM = S.getSourceManager();
1395 llvm::sort(C&: UsesByStmt,
1396 Comp: [&SM](const StmtUsesPair &LHS, const StmtUsesPair &RHS) {
1397 return SM.isBeforeInTranslationUnit(LHS: LHS.first->getBeginLoc(),
1398 RHS: RHS.first->getBeginLoc());
1399 });
1400
1401 // Classify the current code body for better warning text.
1402 // This enum should stay in sync with the cases in
1403 // warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak.
1404 // FIXME: Should we use a common classification enum and the same set of
1405 // possibilities all throughout Sema?
1406 enum {
1407 Function,
1408 Method,
1409 Block,
1410 Lambda
1411 } FunctionKind;
1412
1413 if (isa<sema::BlockScopeInfo>(Val: CurFn))
1414 FunctionKind = Block;
1415 else if (isa<sema::LambdaScopeInfo>(Val: CurFn))
1416 FunctionKind = Lambda;
1417 else if (isa<ObjCMethodDecl>(Val: D))
1418 FunctionKind = Method;
1419 else
1420 FunctionKind = Function;
1421
1422 // Iterate through the sorted problems and emit warnings for each.
1423 for (const auto &P : UsesByStmt) {
1424 const Stmt *FirstRead = P.first;
1425 const WeakObjectProfileTy &Key = P.second->first;
1426 const WeakUseVector &Uses = P.second->second;
1427
1428 // For complicated expressions like 'a.b.c' and 'x.b.c', WeakObjectProfileTy
1429 // may not contain enough information to determine that these are different
1430 // properties. We can only be 100% sure of a repeated use in certain cases,
1431 // and we adjust the diagnostic kind accordingly so that the less certain
1432 // case can be turned off if it is too noisy.
1433 unsigned DiagKind;
1434 if (Key.isExactProfile())
1435 DiagKind = diag::warn_arc_repeated_use_of_weak;
1436 else
1437 DiagKind = diag::warn_arc_possible_repeated_use_of_weak;
1438
1439 // Classify the weak object being accessed for better warning text.
1440 // This enum should stay in sync with the cases in
1441 // warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak.
1442 enum {
1443 Variable,
1444 Property,
1445 ImplicitProperty,
1446 Ivar
1447 } ObjectKind;
1448
1449 const NamedDecl *KeyProp = Key.getProperty();
1450 if (isa<VarDecl>(Val: KeyProp))
1451 ObjectKind = Variable;
1452 else if (isa<ObjCPropertyDecl>(Val: KeyProp))
1453 ObjectKind = Property;
1454 else if (isa<ObjCMethodDecl>(Val: KeyProp))
1455 ObjectKind = ImplicitProperty;
1456 else if (isa<ObjCIvarDecl>(Val: KeyProp))
1457 ObjectKind = Ivar;
1458 else
1459 llvm_unreachable("Unexpected weak object kind!");
1460
1461 // Do not warn about IBOutlet weak property receivers being set to null
1462 // since they are typically only used from the main thread.
1463 if (const ObjCPropertyDecl *Prop = dyn_cast<ObjCPropertyDecl>(Val: KeyProp))
1464 if (Prop->hasAttr<IBOutletAttr>())
1465 continue;
1466
1467 // Show the first time the object was read.
1468 S.Diag(FirstRead->getBeginLoc(), DiagKind)
1469 << int(ObjectKind) << KeyProp << int(FunctionKind)
1470 << FirstRead->getSourceRange();
1471
1472 // Print all the other accesses as notes.
1473 for (const auto &Use : Uses) {
1474 if (Use.getUseExpr() == FirstRead)
1475 continue;
1476 S.Diag(Use.getUseExpr()->getBeginLoc(),
1477 diag::note_arc_weak_also_accessed_here)
1478 << Use.getUseExpr()->getSourceRange();
1479 }
1480 }
1481}
1482
1483namespace clang {
1484namespace {
1485typedef SmallVector<PartialDiagnosticAt, 1> OptionalNotes;
1486typedef std::pair<PartialDiagnosticAt, OptionalNotes> DelayedDiag;
1487typedef std::list<DelayedDiag> DiagList;
1488
1489struct SortDiagBySourceLocation {
1490 SourceManager &SM;
1491 SortDiagBySourceLocation(SourceManager &SM) : SM(SM) {}
1492
1493 bool operator()(const DelayedDiag &left, const DelayedDiag &right) {
1494 // Although this call will be slow, this is only called when outputting
1495 // multiple warnings.
1496 return SM.isBeforeInTranslationUnit(LHS: left.first.first, RHS: right.first.first);
1497 }
1498};
1499} // anonymous namespace
1500} // namespace clang
1501
1502namespace {
1503class UninitValsDiagReporter : public UninitVariablesHandler {
1504 Sema &S;
1505 typedef SmallVector<UninitUse, 2> UsesVec;
1506 typedef llvm::PointerIntPair<UsesVec *, 1, bool> MappedType;
1507 // Prefer using MapVector to DenseMap, so that iteration order will be
1508 // the same as insertion order. This is needed to obtain a deterministic
1509 // order of diagnostics when calling flushDiagnostics().
1510 typedef llvm::MapVector<const VarDecl *, MappedType> UsesMap;
1511 UsesMap uses;
1512 UsesMap constRefUses;
1513
1514public:
1515 UninitValsDiagReporter(Sema &S) : S(S) {}
1516 ~UninitValsDiagReporter() override { flushDiagnostics(); }
1517
1518 MappedType &getUses(UsesMap &um, const VarDecl *vd) {
1519 MappedType &V = um[vd];
1520 if (!V.getPointer())
1521 V.setPointer(new UsesVec());
1522 return V;
1523 }
1524
1525 void handleUseOfUninitVariable(const VarDecl *vd,
1526 const UninitUse &use) override {
1527 getUses(um&: uses, vd).getPointer()->push_back(Elt: use);
1528 }
1529
1530 void handleConstRefUseOfUninitVariable(const VarDecl *vd,
1531 const UninitUse &use) override {
1532 getUses(um&: constRefUses, vd).getPointer()->push_back(Elt: use);
1533 }
1534
1535 void handleSelfInit(const VarDecl *vd) override {
1536 getUses(um&: uses, vd).setInt(true);
1537 getUses(um&: constRefUses, vd).setInt(true);
1538 }
1539
1540 void flushDiagnostics() {
1541 for (const auto &P : uses) {
1542 const VarDecl *vd = P.first;
1543 const MappedType &V = P.second;
1544
1545 UsesVec *vec = V.getPointer();
1546 bool hasSelfInit = V.getInt();
1547
1548 // Specially handle the case where we have uses of an uninitialized
1549 // variable, but the root cause is an idiomatic self-init. We want
1550 // to report the diagnostic at the self-init since that is the root cause.
1551 if (!vec->empty() && hasSelfInit && hasAlwaysUninitializedUse(vec))
1552 DiagnoseUninitializedUse(S, VD: vd,
1553 Use: UninitUse(vd->getInit()->IgnoreParenCasts(),
1554 /* isAlwaysUninit */ true),
1555 /* alwaysReportSelfInit */ true);
1556 else {
1557 // Sort the uses by their SourceLocations. While not strictly
1558 // guaranteed to produce them in line/column order, this will provide
1559 // a stable ordering.
1560 llvm::sort(C&: *vec, Comp: [](const UninitUse &a, const UninitUse &b) {
1561 // Prefer a more confident report over a less confident one.
1562 if (a.getKind() != b.getKind())
1563 return a.getKind() > b.getKind();
1564 return a.getUser()->getBeginLoc() < b.getUser()->getBeginLoc();
1565 });
1566
1567 for (const auto &U : *vec) {
1568 // If we have self-init, downgrade all uses to 'may be uninitialized'.
1569 UninitUse Use = hasSelfInit ? UninitUse(U.getUser(), false) : U;
1570
1571 if (DiagnoseUninitializedUse(S, VD: vd, Use))
1572 // Skip further diagnostics for this variable. We try to warn only
1573 // on the first point at which a variable is used uninitialized.
1574 break;
1575 }
1576 }
1577
1578 // Release the uses vector.
1579 delete vec;
1580 }
1581
1582 uses.clear();
1583
1584 // Flush all const reference uses diags.
1585 for (const auto &P : constRefUses) {
1586 const VarDecl *vd = P.first;
1587 const MappedType &V = P.second;
1588
1589 UsesVec *vec = V.getPointer();
1590 bool hasSelfInit = V.getInt();
1591
1592 if (!vec->empty() && hasSelfInit && hasAlwaysUninitializedUse(vec))
1593 DiagnoseUninitializedUse(S, VD: vd,
1594 Use: UninitUse(vd->getInit()->IgnoreParenCasts(),
1595 /* isAlwaysUninit */ true),
1596 /* alwaysReportSelfInit */ true);
1597 else {
1598 for (const auto &U : *vec) {
1599 if (DiagnoseUninitializedConstRefUse(S, VD: vd, Use: U))
1600 break;
1601 }
1602 }
1603
1604 // Release the uses vector.
1605 delete vec;
1606 }
1607
1608 constRefUses.clear();
1609 }
1610
1611private:
1612 static bool hasAlwaysUninitializedUse(const UsesVec* vec) {
1613 return llvm::any_of(Range: *vec, P: [](const UninitUse &U) {
1614 return U.getKind() == UninitUse::Always ||
1615 U.getKind() == UninitUse::AfterCall ||
1616 U.getKind() == UninitUse::AfterDecl;
1617 });
1618 }
1619};
1620
1621/// Inter-procedural data for the called-once checker.
1622class CalledOnceInterProceduralData {
1623public:
1624 // Add the delayed warning for the given block.
1625 void addDelayedWarning(const BlockDecl *Block,
1626 PartialDiagnosticAt &&Warning) {
1627 DelayedBlockWarnings[Block].emplace_back(Args: std::move(Warning));
1628 }
1629 // Report all of the warnings we've gathered for the given block.
1630 void flushWarnings(const BlockDecl *Block, Sema &S) {
1631 for (const PartialDiagnosticAt &Delayed : DelayedBlockWarnings[Block])
1632 S.Diag(Delayed.first, Delayed.second);
1633
1634 discardWarnings(Block);
1635 }
1636 // Discard all of the warnings we've gathered for the given block.
1637 void discardWarnings(const BlockDecl *Block) {
1638 DelayedBlockWarnings.erase(Val: Block);
1639 }
1640
1641private:
1642 using DelayedDiagnostics = SmallVector<PartialDiagnosticAt, 2>;
1643 llvm::DenseMap<const BlockDecl *, DelayedDiagnostics> DelayedBlockWarnings;
1644};
1645
1646class CalledOnceCheckReporter : public CalledOnceCheckHandler {
1647public:
1648 CalledOnceCheckReporter(Sema &S, CalledOnceInterProceduralData &Data)
1649 : S(S), Data(Data) {}
1650 void handleDoubleCall(const ParmVarDecl *Parameter, const Expr *Call,
1651 const Expr *PrevCall, bool IsCompletionHandler,
1652 bool Poised) override {
1653 auto DiagToReport = IsCompletionHandler
1654 ? diag::warn_completion_handler_called_twice
1655 : diag::warn_called_once_gets_called_twice;
1656 S.Diag(Call->getBeginLoc(), DiagToReport) << Parameter;
1657 S.Diag(PrevCall->getBeginLoc(), diag::note_called_once_gets_called_twice)
1658 << Poised;
1659 }
1660
1661 void handleNeverCalled(const ParmVarDecl *Parameter,
1662 bool IsCompletionHandler) override {
1663 auto DiagToReport = IsCompletionHandler
1664 ? diag::warn_completion_handler_never_called
1665 : diag::warn_called_once_never_called;
1666 S.Diag(Parameter->getBeginLoc(), DiagToReport)
1667 << Parameter << /* Captured */ false;
1668 }
1669
1670 void handleNeverCalled(const ParmVarDecl *Parameter, const Decl *Function,
1671 const Stmt *Where, NeverCalledReason Reason,
1672 bool IsCalledDirectly,
1673 bool IsCompletionHandler) override {
1674 auto DiagToReport = IsCompletionHandler
1675 ? diag::warn_completion_handler_never_called_when
1676 : diag::warn_called_once_never_called_when;
1677 PartialDiagnosticAt Warning(Where->getBeginLoc(), S.PDiag(DiagID: DiagToReport)
1678 << Parameter
1679 << IsCalledDirectly
1680 << (unsigned)Reason);
1681
1682 if (const auto *Block = dyn_cast<BlockDecl>(Val: Function)) {
1683 // We shouldn't report these warnings on blocks immediately
1684 Data.addDelayedWarning(Block, Warning: std::move(Warning));
1685 } else {
1686 S.Diag(Warning.first, Warning.second);
1687 }
1688 }
1689
1690 void handleCapturedNeverCalled(const ParmVarDecl *Parameter,
1691 const Decl *Where,
1692 bool IsCompletionHandler) override {
1693 auto DiagToReport = IsCompletionHandler
1694 ? diag::warn_completion_handler_never_called
1695 : diag::warn_called_once_never_called;
1696 S.Diag(Where->getBeginLoc(), DiagToReport)
1697 << Parameter << /* Captured */ true;
1698 }
1699
1700 void
1701 handleBlockThatIsGuaranteedToBeCalledOnce(const BlockDecl *Block) override {
1702 Data.flushWarnings(Block, S);
1703 }
1704
1705 void handleBlockWithNoGuarantees(const BlockDecl *Block) override {
1706 Data.discardWarnings(Block);
1707 }
1708
1709private:
1710 Sema &S;
1711 CalledOnceInterProceduralData &Data;
1712};
1713
1714constexpr unsigned CalledOnceWarnings[] = {
1715 diag::warn_called_once_never_called,
1716 diag::warn_called_once_never_called_when,
1717 diag::warn_called_once_gets_called_twice};
1718
1719constexpr unsigned CompletionHandlerWarnings[]{
1720 diag::warn_completion_handler_never_called,
1721 diag::warn_completion_handler_never_called_when,
1722 diag::warn_completion_handler_called_twice};
1723
1724bool shouldAnalyzeCalledOnceImpl(llvm::ArrayRef<unsigned> DiagIDs,
1725 const DiagnosticsEngine &Diags,
1726 SourceLocation At) {
1727 return llvm::any_of(Range&: DiagIDs, P: [&Diags, At](unsigned DiagID) {
1728 return !Diags.isIgnored(DiagID, Loc: At);
1729 });
1730}
1731
1732bool shouldAnalyzeCalledOnceConventions(const DiagnosticsEngine &Diags,
1733 SourceLocation At) {
1734 return shouldAnalyzeCalledOnceImpl(CompletionHandlerWarnings, Diags, At);
1735}
1736
1737bool shouldAnalyzeCalledOnceParameters(const DiagnosticsEngine &Diags,
1738 SourceLocation At) {
1739 return shouldAnalyzeCalledOnceImpl(CalledOnceWarnings, Diags, At) ||
1740 shouldAnalyzeCalledOnceConventions(Diags, At);
1741}
1742} // anonymous namespace
1743
1744//===----------------------------------------------------------------------===//
1745// -Wthread-safety
1746//===----------------------------------------------------------------------===//
1747namespace clang {
1748namespace threadSafety {
1749namespace {
1750class ThreadSafetyReporter : public clang::threadSafety::ThreadSafetyHandler {
1751 Sema &S;
1752 DiagList Warnings;
1753 SourceLocation FunLocation, FunEndLocation;
1754
1755 const FunctionDecl *CurrentFunction;
1756 bool Verbose;
1757
1758 OptionalNotes getNotes() const {
1759 if (Verbose && CurrentFunction) {
1760 PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
1761 S.PDiag(diag::note_thread_warning_in_fun)
1762 << CurrentFunction);
1763 return OptionalNotes(1, FNote);
1764 }
1765 return OptionalNotes();
1766 }
1767
1768 OptionalNotes getNotes(const PartialDiagnosticAt &Note) const {
1769 OptionalNotes ONS(1, Note);
1770 if (Verbose && CurrentFunction) {
1771 PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
1772 S.PDiag(diag::note_thread_warning_in_fun)
1773 << CurrentFunction);
1774 ONS.push_back(Elt: std::move(FNote));
1775 }
1776 return ONS;
1777 }
1778
1779 OptionalNotes getNotes(const PartialDiagnosticAt &Note1,
1780 const PartialDiagnosticAt &Note2) const {
1781 OptionalNotes ONS;
1782 ONS.push_back(Elt: Note1);
1783 ONS.push_back(Elt: Note2);
1784 if (Verbose && CurrentFunction) {
1785 PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
1786 S.PDiag(diag::note_thread_warning_in_fun)
1787 << CurrentFunction);
1788 ONS.push_back(Elt: std::move(FNote));
1789 }
1790 return ONS;
1791 }
1792
1793 OptionalNotes makeLockedHereNote(SourceLocation LocLocked, StringRef Kind) {
1794 return LocLocked.isValid()
1795 ? getNotes(PartialDiagnosticAt(
1796 LocLocked, S.PDiag(diag::note_locked_here) << Kind))
1797 : getNotes();
1798 }
1799
1800 OptionalNotes makeUnlockedHereNote(SourceLocation LocUnlocked,
1801 StringRef Kind) {
1802 return LocUnlocked.isValid()
1803 ? getNotes(PartialDiagnosticAt(
1804 LocUnlocked, S.PDiag(diag::note_unlocked_here) << Kind))
1805 : getNotes();
1806 }
1807
1808 OptionalNotes makeManagedMismatchNoteForParam(SourceLocation DeclLoc) {
1809 return DeclLoc.isValid()
1810 ? getNotes(PartialDiagnosticAt(
1811 DeclLoc,
1812 S.PDiag(diag::note_managed_mismatch_here_for_param)))
1813 : getNotes();
1814 }
1815
1816 public:
1817 ThreadSafetyReporter(Sema &S, SourceLocation FL, SourceLocation FEL)
1818 : S(S), FunLocation(FL), FunEndLocation(FEL),
1819 CurrentFunction(nullptr), Verbose(false) {}
1820
1821 void setVerbose(bool b) { Verbose = b; }
1822
1823 /// Emit all buffered diagnostics in order of sourcelocation.
1824 /// We need to output diagnostics produced while iterating through
1825 /// the lockset in deterministic order, so this function orders diagnostics
1826 /// and outputs them.
1827 void emitDiagnostics() {
1828 Warnings.sort(comp: SortDiagBySourceLocation(S.getSourceManager()));
1829 for (const auto &Diag : Warnings) {
1830 S.Diag(Diag.first.first, Diag.first.second);
1831 for (const auto &Note : Diag.second)
1832 S.Diag(Note.first, Note.second);
1833 }
1834 }
1835
1836 void handleUnmatchedUnderlyingMutexes(SourceLocation Loc, SourceLocation DLoc,
1837 Name scopeName, StringRef Kind,
1838 Name expected, Name actual) override {
1839 PartialDiagnosticAt Warning(Loc,
1840 S.PDiag(diag::warn_unmatched_underlying_mutexes)
1841 << Kind << scopeName << expected << actual);
1842 Warnings.emplace_back(args: std::move(Warning),
1843 args: makeManagedMismatchNoteForParam(DeclLoc: DLoc));
1844 }
1845
1846 void handleExpectMoreUnderlyingMutexes(SourceLocation Loc,
1847 SourceLocation DLoc, Name scopeName,
1848 StringRef Kind,
1849 Name expected) override {
1850 PartialDiagnosticAt Warning(
1851 Loc, S.PDiag(diag::warn_expect_more_underlying_mutexes)
1852 << Kind << scopeName << expected);
1853 Warnings.emplace_back(args: std::move(Warning),
1854 args: makeManagedMismatchNoteForParam(DeclLoc: DLoc));
1855 }
1856
1857 void handleExpectFewerUnderlyingMutexes(SourceLocation Loc,
1858 SourceLocation DLoc, Name scopeName,
1859 StringRef Kind,
1860 Name actual) override {
1861 PartialDiagnosticAt Warning(
1862 Loc, S.PDiag(diag::warn_expect_fewer_underlying_mutexes)
1863 << Kind << scopeName << actual);
1864 Warnings.emplace_back(args: std::move(Warning),
1865 args: makeManagedMismatchNoteForParam(DeclLoc: DLoc));
1866 }
1867
1868 void handleInvalidLockExp(SourceLocation Loc) override {
1869 PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_cannot_resolve_lock)
1870 << Loc);
1871 Warnings.emplace_back(args: std::move(Warning), args: getNotes());
1872 }
1873
1874 void handleUnmatchedUnlock(StringRef Kind, Name LockName, SourceLocation Loc,
1875 SourceLocation LocPreviousUnlock) override {
1876 if (Loc.isInvalid())
1877 Loc = FunLocation;
1878 PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_unlock_but_no_lock)
1879 << Kind << LockName);
1880 Warnings.emplace_back(args: std::move(Warning),
1881 args: makeUnlockedHereNote(LocUnlocked: LocPreviousUnlock, Kind));
1882 }
1883
1884 void handleIncorrectUnlockKind(StringRef Kind, Name LockName,
1885 LockKind Expected, LockKind Received,
1886 SourceLocation LocLocked,
1887 SourceLocation LocUnlock) override {
1888 if (LocUnlock.isInvalid())
1889 LocUnlock = FunLocation;
1890 PartialDiagnosticAt Warning(
1891 LocUnlock, S.PDiag(diag::warn_unlock_kind_mismatch)
1892 << Kind << LockName << Received << Expected);
1893 Warnings.emplace_back(args: std::move(Warning),
1894 args: makeLockedHereNote(LocLocked, Kind));
1895 }
1896
1897 void handleDoubleLock(StringRef Kind, Name LockName, SourceLocation LocLocked,
1898 SourceLocation LocDoubleLock) override {
1899 if (LocDoubleLock.isInvalid())
1900 LocDoubleLock = FunLocation;
1901 PartialDiagnosticAt Warning(LocDoubleLock, S.PDiag(diag::warn_double_lock)
1902 << Kind << LockName);
1903 Warnings.emplace_back(args: std::move(Warning),
1904 args: makeLockedHereNote(LocLocked, Kind));
1905 }
1906
1907 void handleMutexHeldEndOfScope(StringRef Kind, Name LockName,
1908 SourceLocation LocLocked,
1909 SourceLocation LocEndOfScope,
1910 LockErrorKind LEK,
1911 bool ReentrancyMismatch) override {
1912 unsigned DiagID = 0;
1913 switch (LEK) {
1914 case LEK_LockedSomePredecessors:
1915 DiagID = diag::warn_lock_some_predecessors;
1916 break;
1917 case LEK_LockedSomeLoopIterations:
1918 DiagID = diag::warn_expecting_lock_held_on_loop;
1919 break;
1920 case LEK_LockedAtEndOfFunction:
1921 DiagID = diag::warn_no_unlock;
1922 break;
1923 case LEK_NotLockedAtEndOfFunction:
1924 DiagID = diag::warn_expecting_locked;
1925 break;
1926 }
1927 if (LocEndOfScope.isInvalid())
1928 LocEndOfScope = FunEndLocation;
1929
1930 PartialDiagnosticAt Warning(LocEndOfScope, S.PDiag(DiagID)
1931 << Kind << LockName
1932 << ReentrancyMismatch);
1933 Warnings.emplace_back(args: std::move(Warning),
1934 args: makeLockedHereNote(LocLocked, Kind));
1935 }
1936
1937 void handleExclusiveAndShared(StringRef Kind, Name LockName,
1938 SourceLocation Loc1,
1939 SourceLocation Loc2) override {
1940 PartialDiagnosticAt Warning(Loc1,
1941 S.PDiag(diag::warn_lock_exclusive_and_shared)
1942 << Kind << LockName);
1943 PartialDiagnosticAt Note(Loc2, S.PDiag(diag::note_lock_exclusive_and_shared)
1944 << Kind << LockName);
1945 Warnings.emplace_back(args: std::move(Warning), args: getNotes(Note));
1946 }
1947
1948 void handleNoMutexHeld(const NamedDecl *D, ProtectedOperationKind POK,
1949 AccessKind AK, SourceLocation Loc) override {
1950 assert((POK == POK_VarAccess || POK == POK_VarDereference) &&
1951 "Only works for variables");
1952 unsigned DiagID = POK == POK_VarAccess?
1953 diag::warn_variable_requires_any_lock:
1954 diag::warn_var_deref_requires_any_lock;
1955 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID)
1956 << D << getLockKindFromAccessKind(AK));
1957 Warnings.emplace_back(args: std::move(Warning), args: getNotes());
1958 }
1959
1960 void handleMutexNotHeld(StringRef Kind, const NamedDecl *D,
1961 ProtectedOperationKind POK, Name LockName,
1962 LockKind LK, SourceLocation Loc,
1963 Name *PossibleMatch) override {
1964 unsigned DiagID = 0;
1965 if (PossibleMatch) {
1966 switch (POK) {
1967 case POK_VarAccess:
1968 DiagID = diag::warn_variable_requires_lock_precise;
1969 break;
1970 case POK_VarDereference:
1971 DiagID = diag::warn_var_deref_requires_lock_precise;
1972 break;
1973 case POK_FunctionCall:
1974 DiagID = diag::warn_fun_requires_lock_precise;
1975 break;
1976 case POK_PassByRef:
1977 DiagID = diag::warn_guarded_pass_by_reference;
1978 break;
1979 case POK_PtPassByRef:
1980 DiagID = diag::warn_pt_guarded_pass_by_reference;
1981 break;
1982 case POK_ReturnByRef:
1983 DiagID = diag::warn_guarded_return_by_reference;
1984 break;
1985 case POK_PtReturnByRef:
1986 DiagID = diag::warn_pt_guarded_return_by_reference;
1987 break;
1988 case POK_PassPointer:
1989 DiagID = diag::warn_guarded_pass_pointer;
1990 break;
1991 case POK_PtPassPointer:
1992 DiagID = diag::warn_pt_guarded_pass_pointer;
1993 break;
1994 case POK_ReturnPointer:
1995 DiagID = diag::warn_guarded_return_pointer;
1996 break;
1997 case POK_PtReturnPointer:
1998 DiagID = diag::warn_pt_guarded_return_pointer;
1999 break;
2000 }
2001 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << Kind
2002 << D
2003 << LockName << LK);
2004 PartialDiagnosticAt Note(Loc, S.PDiag(diag::note_found_mutex_near_match)
2005 << *PossibleMatch);
2006 if (Verbose && POK == POK_VarAccess) {
2007 PartialDiagnosticAt VNote(D->getLocation(),
2008 S.PDiag(diag::note_guarded_by_declared_here)
2009 << D->getDeclName());
2010 Warnings.emplace_back(args: std::move(Warning), args: getNotes(Note1: Note, Note2: VNote));
2011 } else
2012 Warnings.emplace_back(args: std::move(Warning), args: getNotes(Note));
2013 } else {
2014 switch (POK) {
2015 case POK_VarAccess:
2016 DiagID = diag::warn_variable_requires_lock;
2017 break;
2018 case POK_VarDereference:
2019 DiagID = diag::warn_var_deref_requires_lock;
2020 break;
2021 case POK_FunctionCall:
2022 DiagID = diag::warn_fun_requires_lock;
2023 break;
2024 case POK_PassByRef:
2025 DiagID = diag::warn_guarded_pass_by_reference;
2026 break;
2027 case POK_PtPassByRef:
2028 DiagID = diag::warn_pt_guarded_pass_by_reference;
2029 break;
2030 case POK_ReturnByRef:
2031 DiagID = diag::warn_guarded_return_by_reference;
2032 break;
2033 case POK_PtReturnByRef:
2034 DiagID = diag::warn_pt_guarded_return_by_reference;
2035 break;
2036 case POK_PassPointer:
2037 DiagID = diag::warn_guarded_pass_pointer;
2038 break;
2039 case POK_PtPassPointer:
2040 DiagID = diag::warn_pt_guarded_pass_pointer;
2041 break;
2042 case POK_ReturnPointer:
2043 DiagID = diag::warn_guarded_return_pointer;
2044 break;
2045 case POK_PtReturnPointer:
2046 DiagID = diag::warn_pt_guarded_return_pointer;
2047 break;
2048 }
2049 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << Kind
2050 << D
2051 << LockName << LK);
2052 if (Verbose && POK == POK_VarAccess) {
2053 PartialDiagnosticAt Note(D->getLocation(),
2054 S.PDiag(diag::note_guarded_by_declared_here));
2055 Warnings.emplace_back(args: std::move(Warning), args: getNotes(Note));
2056 } else
2057 Warnings.emplace_back(args: std::move(Warning), args: getNotes());
2058 }
2059 }
2060
2061 void handleNegativeNotHeld(StringRef Kind, Name LockName, Name Neg,
2062 SourceLocation Loc) override {
2063 PartialDiagnosticAt Warning(Loc,
2064 S.PDiag(diag::warn_acquire_requires_negative_cap)
2065 << Kind << LockName << Neg);
2066 Warnings.emplace_back(args: std::move(Warning), args: getNotes());
2067 }
2068
2069 void handleNegativeNotHeld(const NamedDecl *D, Name LockName,
2070 SourceLocation Loc) override {
2071 PartialDiagnosticAt Warning(
2072 Loc, S.PDiag(diag::warn_fun_requires_negative_cap) << D << LockName);
2073 Warnings.emplace_back(args: std::move(Warning), args: getNotes());
2074 }
2075
2076 void handleFunExcludesLock(StringRef Kind, Name FunName, Name LockName,
2077 SourceLocation Loc) override {
2078 PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_fun_excludes_mutex)
2079 << Kind << FunName << LockName);
2080 Warnings.emplace_back(args: std::move(Warning), args: getNotes());
2081 }
2082
2083 void handleLockAcquiredBefore(StringRef Kind, Name L1Name, Name L2Name,
2084 SourceLocation Loc) override {
2085 PartialDiagnosticAt Warning(Loc,
2086 S.PDiag(diag::warn_acquired_before) << Kind << L1Name << L2Name);
2087 Warnings.emplace_back(args: std::move(Warning), args: getNotes());
2088 }
2089
2090 void handleBeforeAfterCycle(Name L1Name, SourceLocation Loc) override {
2091 PartialDiagnosticAt Warning(Loc,
2092 S.PDiag(diag::warn_acquired_before_after_cycle) << L1Name);
2093 Warnings.emplace_back(args: std::move(Warning), args: getNotes());
2094 }
2095
2096 void enterFunction(const FunctionDecl* FD) override {
2097 CurrentFunction = FD;
2098 }
2099
2100 void leaveFunction(const FunctionDecl* FD) override {
2101 CurrentFunction = nullptr;
2102 }
2103};
2104} // anonymous namespace
2105} // namespace threadSafety
2106} // namespace clang
2107
2108//===----------------------------------------------------------------------===//
2109// -Wconsumed
2110//===----------------------------------------------------------------------===//
2111
2112namespace clang {
2113namespace consumed {
2114namespace {
2115class ConsumedWarningsHandler : public ConsumedWarningsHandlerBase {
2116
2117 Sema &S;
2118 DiagList Warnings;
2119
2120public:
2121
2122 ConsumedWarningsHandler(Sema &S) : S(S) {}
2123
2124 void emitDiagnostics() override {
2125 Warnings.sort(comp: SortDiagBySourceLocation(S.getSourceManager()));
2126 for (const auto &Diag : Warnings) {
2127 S.Diag(Diag.first.first, Diag.first.second);
2128 for (const auto &Note : Diag.second)
2129 S.Diag(Note.first, Note.second);
2130 }
2131 }
2132
2133 void warnLoopStateMismatch(SourceLocation Loc,
2134 StringRef VariableName) override {
2135 PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_loop_state_mismatch) <<
2136 VariableName);
2137
2138 Warnings.emplace_back(args: std::move(Warning), args: OptionalNotes());
2139 }
2140
2141 void warnParamReturnTypestateMismatch(SourceLocation Loc,
2142 StringRef VariableName,
2143 StringRef ExpectedState,
2144 StringRef ObservedState) override {
2145
2146 PartialDiagnosticAt Warning(Loc, S.PDiag(
2147 diag::warn_param_return_typestate_mismatch) << VariableName <<
2148 ExpectedState << ObservedState);
2149
2150 Warnings.emplace_back(args: std::move(Warning), args: OptionalNotes());
2151 }
2152
2153 void warnParamTypestateMismatch(SourceLocation Loc, StringRef ExpectedState,
2154 StringRef ObservedState) override {
2155
2156 PartialDiagnosticAt Warning(Loc, S.PDiag(
2157 diag::warn_param_typestate_mismatch) << ExpectedState << ObservedState);
2158
2159 Warnings.emplace_back(args: std::move(Warning), args: OptionalNotes());
2160 }
2161
2162 void warnReturnTypestateForUnconsumableType(SourceLocation Loc,
2163 StringRef TypeName) override {
2164 PartialDiagnosticAt Warning(Loc, S.PDiag(
2165 diag::warn_return_typestate_for_unconsumable_type) << TypeName);
2166
2167 Warnings.emplace_back(args: std::move(Warning), args: OptionalNotes());
2168 }
2169
2170 void warnReturnTypestateMismatch(SourceLocation Loc, StringRef ExpectedState,
2171 StringRef ObservedState) override {
2172
2173 PartialDiagnosticAt Warning(Loc, S.PDiag(
2174 diag::warn_return_typestate_mismatch) << ExpectedState << ObservedState);
2175
2176 Warnings.emplace_back(args: std::move(Warning), args: OptionalNotes());
2177 }
2178
2179 void warnUseOfTempInInvalidState(StringRef MethodName, StringRef State,
2180 SourceLocation Loc) override {
2181
2182 PartialDiagnosticAt Warning(Loc, S.PDiag(
2183 diag::warn_use_of_temp_in_invalid_state) << MethodName << State);
2184
2185 Warnings.emplace_back(args: std::move(Warning), args: OptionalNotes());
2186 }
2187
2188 void warnUseInInvalidState(StringRef MethodName, StringRef VariableName,
2189 StringRef State, SourceLocation Loc) override {
2190
2191 PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_use_in_invalid_state) <<
2192 MethodName << VariableName << State);
2193
2194 Warnings.emplace_back(args: std::move(Warning), args: OptionalNotes());
2195 }
2196};
2197} // anonymous namespace
2198} // namespace consumed
2199} // namespace clang
2200
2201//===----------------------------------------------------------------------===//
2202// Unsafe buffer usage analysis.
2203//===----------------------------------------------------------------------===//
2204
2205namespace {
2206class UnsafeBufferUsageReporter : public UnsafeBufferUsageHandler {
2207 Sema &S;
2208 bool SuggestSuggestions; // Recommend -fsafe-buffer-usage-suggestions?
2209
2210 // Lists as a string the names of variables in `VarGroupForVD` except for `VD`
2211 // itself:
2212 std::string listVariableGroupAsString(
2213 const VarDecl *VD, const ArrayRef<const VarDecl *> &VarGroupForVD) const {
2214 if (VarGroupForVD.size() <= 1)
2215 return "";
2216
2217 std::vector<StringRef> VarNames;
2218 auto PutInQuotes = [](StringRef S) -> std::string {
2219 return "'" + S.str() + "'";
2220 };
2221
2222 for (auto *V : VarGroupForVD) {
2223 if (V == VD)
2224 continue;
2225 VarNames.push_back(V->getName());
2226 }
2227 if (VarNames.size() == 1) {
2228 return PutInQuotes(VarNames[0]);
2229 }
2230 if (VarNames.size() == 2) {
2231 return PutInQuotes(VarNames[0]) + " and " + PutInQuotes(VarNames[1]);
2232 }
2233 assert(VarGroupForVD.size() > 3);
2234 const unsigned N = VarNames.size() -
2235 2; // need to print the last two names as "..., X, and Y"
2236 std::string AllVars = "";
2237
2238 for (unsigned I = 0; I < N; ++I)
2239 AllVars.append(str: PutInQuotes(VarNames[I]) + ", ");
2240 AllVars.append(str: PutInQuotes(VarNames[N]) + ", and " +
2241 PutInQuotes(VarNames[N + 1]));
2242 return AllVars;
2243 }
2244
2245public:
2246 UnsafeBufferUsageReporter(Sema &S, bool SuggestSuggestions)
2247 : S(S), SuggestSuggestions(SuggestSuggestions) {}
2248
2249 void handleUnsafeOperation(const Stmt *Operation, bool IsRelatedToDecl,
2250 ASTContext &Ctx) override {
2251 SourceLocation Loc;
2252 SourceRange Range;
2253 unsigned MsgParam = 0;
2254 NamedDecl *D = nullptr;
2255 if (const auto *ASE = dyn_cast<ArraySubscriptExpr>(Val: Operation)) {
2256 Loc = ASE->getBase()->getExprLoc();
2257 Range = ASE->getBase()->getSourceRange();
2258 MsgParam = 2;
2259 } else if (const auto *BO = dyn_cast<BinaryOperator>(Val: Operation)) {
2260 BinaryOperator::Opcode Op = BO->getOpcode();
2261 if (Op == BO_Add || Op == BO_AddAssign || Op == BO_Sub ||
2262 Op == BO_SubAssign) {
2263 if (BO->getRHS()->getType()->isIntegerType()) {
2264 Loc = BO->getLHS()->getExprLoc();
2265 Range = BO->getLHS()->getSourceRange();
2266 } else {
2267 Loc = BO->getRHS()->getExprLoc();
2268 Range = BO->getRHS()->getSourceRange();
2269 }
2270 MsgParam = 1;
2271 }
2272 } else if (const auto *UO = dyn_cast<UnaryOperator>(Val: Operation)) {
2273 UnaryOperator::Opcode Op = UO->getOpcode();
2274 if (Op == UO_PreInc || Op == UO_PreDec || Op == UO_PostInc ||
2275 Op == UO_PostDec) {
2276 Loc = UO->getSubExpr()->getExprLoc();
2277 Range = UO->getSubExpr()->getSourceRange();
2278 MsgParam = 1;
2279 }
2280 } else {
2281 if (isa<CallExpr>(Val: Operation) || isa<CXXConstructExpr>(Val: Operation)) {
2282 // note_unsafe_buffer_operation doesn't have this mode yet.
2283 assert(!IsRelatedToDecl && "Not implemented yet!");
2284 MsgParam = 3;
2285 } else if (isa<MemberExpr>(Val: Operation)) {
2286 // note_unsafe_buffer_operation doesn't have this mode yet.
2287 assert(!IsRelatedToDecl && "Not implemented yet!");
2288 auto *ME = cast<MemberExpr>(Val: Operation);
2289 D = ME->getMemberDecl();
2290 MsgParam = 5;
2291 } else if (const auto *ECE = dyn_cast<ExplicitCastExpr>(Val: Operation)) {
2292 QualType destType = ECE->getType();
2293 bool destTypeComplete = true;
2294
2295 if (!isa<PointerType>(Val: destType))
2296 return;
2297 destType = destType.getTypePtr()->getPointeeType();
2298 if (const auto *D = destType->getAsTagDecl())
2299 destTypeComplete = D->isCompleteDefinition();
2300
2301 // If destination type is incomplete, it is unsafe to cast to anyway, no
2302 // need to check its type:
2303 if (destTypeComplete) {
2304 const uint64_t dSize = Ctx.getTypeSize(T: destType);
2305 QualType srcType = ECE->getSubExpr()->getType();
2306
2307 assert(srcType->isPointerType());
2308
2309 const uint64_t sSize =
2310 Ctx.getTypeSize(T: srcType.getTypePtr()->getPointeeType());
2311
2312 if (sSize >= dSize)
2313 return;
2314 }
2315 if (const auto *CE = dyn_cast<CXXMemberCallExpr>(
2316 ECE->getSubExpr()->IgnoreParens())) {
2317 D = CE->getMethodDecl();
2318 }
2319
2320 if (!D)
2321 return;
2322
2323 MsgParam = 4;
2324 }
2325 Loc = Operation->getBeginLoc();
2326 Range = Operation->getSourceRange();
2327 }
2328 if (IsRelatedToDecl) {
2329 assert(!SuggestSuggestions &&
2330 "Variables blamed for unsafe buffer usage without suggestions!");
2331 S.Diag(Loc, diag::note_unsafe_buffer_operation) << MsgParam << Range;
2332 } else {
2333 if (D) {
2334 S.Diag(Loc, diag::warn_unsafe_buffer_operation)
2335 << MsgParam << D << Range;
2336 } else {
2337 S.Diag(Loc, diag::warn_unsafe_buffer_operation) << MsgParam << Range;
2338 }
2339 if (SuggestSuggestions) {
2340 S.Diag(Loc, diag::note_safe_buffer_usage_suggestions_disabled);
2341 }
2342 }
2343 }
2344
2345 void handleUnsafeLibcCall(const CallExpr *Call, unsigned PrintfInfo,
2346 ASTContext &Ctx,
2347 const Expr *UnsafeArg = nullptr) override {
2348 S.Diag(Call->getBeginLoc(), diag::warn_unsafe_buffer_libc_call)
2349 << Call->getDirectCallee() // We've checked there is a direct callee
2350 << Call->getSourceRange();
2351 if (PrintfInfo > 0) {
2352 SourceRange R =
2353 UnsafeArg ? UnsafeArg->getSourceRange() : Call->getSourceRange();
2354 S.Diag(R.getBegin(), diag::note_unsafe_buffer_printf_call)
2355 << PrintfInfo << R;
2356 }
2357 }
2358
2359 void handleUnsafeOperationInContainer(const Stmt *Operation,
2360 bool IsRelatedToDecl,
2361 ASTContext &Ctx) override {
2362 SourceLocation Loc;
2363 SourceRange Range;
2364 unsigned MsgParam = 0;
2365
2366 // This function only handles SpanTwoParamConstructorGadget so far, which
2367 // always gives a CXXConstructExpr.
2368 const auto *CtorExpr = cast<CXXConstructExpr>(Val: Operation);
2369 Loc = CtorExpr->getLocation();
2370
2371 S.Diag(Loc, diag::warn_unsafe_buffer_usage_in_container);
2372 if (IsRelatedToDecl) {
2373 assert(!SuggestSuggestions &&
2374 "Variables blamed for unsafe buffer usage without suggestions!");
2375 S.Diag(Loc, diag::note_unsafe_buffer_operation) << MsgParam << Range;
2376 }
2377 }
2378
2379 void handleUnsafeVariableGroup(const VarDecl *Variable,
2380 const VariableGroupsManager &VarGrpMgr,
2381 FixItList &&Fixes, const Decl *D,
2382 const FixitStrategy &VarTargetTypes) override {
2383 assert(!SuggestSuggestions &&
2384 "Unsafe buffer usage fixits displayed without suggestions!");
2385 S.Diag(Variable->getLocation(), diag::warn_unsafe_buffer_variable)
2386 << Variable << (Variable->getType()->isPointerType() ? 0 : 1)
2387 << Variable->getSourceRange();
2388 if (!Fixes.empty()) {
2389 assert(isa<NamedDecl>(D) &&
2390 "Fix-its are generated only for `NamedDecl`s");
2391 const NamedDecl *ND = cast<NamedDecl>(Val: D);
2392 bool BriefMsg = false;
2393 // If the variable group involves parameters, the diagnostic message will
2394 // NOT explain how the variables are grouped as the reason is non-trivial
2395 // and irrelavant to users' experience:
2396 const auto VarGroupForVD = VarGrpMgr.getGroupOfVar(Var: Variable, HasParm: &BriefMsg);
2397 unsigned FixItStrategy = 0;
2398 switch (VarTargetTypes.lookup(VD: Variable)) {
2399 case clang::FixitStrategy::Kind::Span:
2400 FixItStrategy = 0;
2401 break;
2402 case clang::FixitStrategy::Kind::Array:
2403 FixItStrategy = 1;
2404 break;
2405 default:
2406 assert(false && "We support only std::span and std::array");
2407 };
2408
2409 const auto &FD =
2410 S.Diag(Variable->getLocation(),
2411 BriefMsg ? diag::note_unsafe_buffer_variable_fixit_together
2412 : diag::note_unsafe_buffer_variable_fixit_group);
2413
2414 FD << Variable << FixItStrategy;
2415 FD << listVariableGroupAsString(VD: Variable, VarGroupForVD)
2416 << (VarGroupForVD.size() > 1) << ND;
2417 for (const auto &F : Fixes) {
2418 FD << F;
2419 }
2420 }
2421
2422#ifndef NDEBUG
2423 if (areDebugNotesRequested())
2424 for (const DebugNote &Note: DebugNotesByVar[Variable])
2425 S.Diag(Note.first, diag::note_safe_buffer_debug_mode) << Note.second;
2426#endif
2427 }
2428
2429 bool isSafeBufferOptOut(const SourceLocation &Loc) const override {
2430 return S.PP.isSafeBufferOptOut(SourceMgr: S.getSourceManager(), Loc);
2431 }
2432
2433 bool ignoreUnsafeBufferInContainer(const SourceLocation &Loc) const override {
2434 return S.Diags.isIgnored(diag::warn_unsafe_buffer_usage_in_container, Loc);
2435 }
2436
2437 bool ignoreUnsafeBufferInLibcCall(const SourceLocation &Loc) const override {
2438 return S.Diags.isIgnored(diag::warn_unsafe_buffer_libc_call, Loc);
2439 }
2440
2441 // Returns the text representation of clang::unsafe_buffer_usage attribute.
2442 // `WSSuffix` holds customized "white-space"s, e.g., newline or whilespace
2443 // characters.
2444 std::string
2445 getUnsafeBufferUsageAttributeTextAt(SourceLocation Loc,
2446 StringRef WSSuffix = "") const override {
2447 Preprocessor &PP = S.getPreprocessor();
2448 TokenValue ClangUnsafeBufferUsageTokens[] = {
2449 tok::l_square,
2450 tok::l_square,
2451 PP.getIdentifierInfo(Name: "clang"),
2452 tok::coloncolon,
2453 PP.getIdentifierInfo(Name: "unsafe_buffer_usage"),
2454 tok::r_square,
2455 tok::r_square};
2456
2457 StringRef MacroName;
2458
2459 // The returned macro (it returns) is guaranteed not to be function-like:
2460 MacroName = PP.getLastMacroWithSpelling(Loc, Tokens: ClangUnsafeBufferUsageTokens);
2461 if (MacroName.empty())
2462 MacroName = "[[clang::unsafe_buffer_usage]]";
2463 return MacroName.str() + WSSuffix.str();
2464 }
2465};
2466} // namespace
2467
2468//===----------------------------------------------------------------------===//
2469// AnalysisBasedWarnings - Worker object used by Sema to execute analysis-based
2470// warnings on a function, method, or block.
2471//===----------------------------------------------------------------------===//
2472
2473sema::AnalysisBasedWarnings::Policy::Policy() {
2474 enableCheckFallThrough = 1;
2475 enableCheckUnreachable = 0;
2476 enableThreadSafetyAnalysis = 0;
2477 enableConsumedAnalysis = 0;
2478}
2479
2480/// InterProceduralData aims to be a storage of whatever data should be passed
2481/// between analyses of different functions.
2482///
2483/// At the moment, its primary goal is to make the information gathered during
2484/// the analysis of the blocks available during the analysis of the enclosing
2485/// function. This is important due to the fact that blocks are analyzed before
2486/// the enclosed function is even parsed fully, so it is not viable to access
2487/// anything in the outer scope while analyzing the block. On the other hand,
2488/// re-building CFG for blocks and re-analyzing them when we do have all the
2489/// information (i.e. during the analysis of the enclosing function) seems to be
2490/// ill-designed.
2491class sema::AnalysisBasedWarnings::InterProceduralData {
2492public:
2493 // It is important to analyze blocks within functions because it's a very
2494 // common pattern to capture completion handler parameters by blocks.
2495 CalledOnceInterProceduralData CalledOnceData;
2496};
2497
2498template <typename... Ts>
2499static bool areAnyEnabled(DiagnosticsEngine &D, SourceLocation Loc,
2500 Ts... Diags) {
2501 return (!D.isIgnored(DiagID: Diags, Loc) || ...);
2502}
2503
2504sema::AnalysisBasedWarnings::AnalysisBasedWarnings(Sema &s)
2505 : S(s), IPData(std::make_unique<InterProceduralData>()),
2506 NumFunctionsAnalyzed(0), NumFunctionsWithBadCFGs(0), NumCFGBlocks(0),
2507 MaxCFGBlocksPerFunction(0), NumUninitAnalysisFunctions(0),
2508 NumUninitAnalysisVariables(0), MaxUninitAnalysisVariablesPerFunction(0),
2509 NumUninitAnalysisBlockVisits(0),
2510 MaxUninitAnalysisBlockVisitsPerFunction(0) {
2511}
2512
2513// We need this here for unique_ptr with forward declared class.
2514sema::AnalysisBasedWarnings::~AnalysisBasedWarnings() = default;
2515
2516sema::AnalysisBasedWarnings::Policy
2517sema::AnalysisBasedWarnings::getPolicyInEffectAt(SourceLocation Loc) {
2518 using namespace diag;
2519 DiagnosticsEngine &D = S.getDiagnostics();
2520 Policy P;
2521
2522 // Note: The enabled checks should be kept in sync with the switch in
2523 // SemaPPCallbacks::PragmaDiagnostic().
2524 P.enableCheckUnreachable =
2525 PolicyOverrides.enableCheckUnreachable ||
2526 areAnyEnabled(D, Loc, warn_unreachable, warn_unreachable_break,
2527 warn_unreachable_return, warn_unreachable_loop_increment);
2528
2529 P.enableThreadSafetyAnalysis = PolicyOverrides.enableThreadSafetyAnalysis ||
2530 areAnyEnabled(D, Loc, warn_double_lock);
2531
2532 P.enableConsumedAnalysis = PolicyOverrides.enableConsumedAnalysis ||
2533 areAnyEnabled(D, Loc, warn_use_in_invalid_state);
2534 return P;
2535}
2536
2537void sema::AnalysisBasedWarnings::clearOverrides() {
2538 PolicyOverrides.enableCheckUnreachable = false;
2539 PolicyOverrides.enableConsumedAnalysis = false;
2540 PolicyOverrides.enableThreadSafetyAnalysis = false;
2541}
2542
2543static void flushDiagnostics(Sema &S, const sema::FunctionScopeInfo *fscope) {
2544 for (const auto &D : fscope->PossiblyUnreachableDiags)
2545 S.Diag(D.Loc, D.PD);
2546}
2547
2548// An AST Visitor that calls a callback function on each callable DEFINITION
2549// that is NOT in a dependent context:
2550class CallableVisitor : public DynamicRecursiveASTVisitor {
2551private:
2552 llvm::function_ref<void(const Decl *)> Callback;
2553 const Module *const TUModule;
2554
2555public:
2556 CallableVisitor(llvm::function_ref<void(const Decl *)> Callback,
2557 const Module *const TUModule)
2558 : Callback(Callback), TUModule(TUModule) {
2559 ShouldVisitTemplateInstantiations = true;
2560 ShouldVisitImplicitCode = false;
2561 }
2562
2563 bool TraverseDecl(Decl *Node) override {
2564 // For performance reasons, only validate the current translation unit's
2565 // module, and not modules it depends on.
2566 // See https://issues.chromium.org/issues/351909443 for details.
2567 if (Node && Node->getOwningModule() == TUModule)
2568 return DynamicRecursiveASTVisitor::TraverseDecl(Node);
2569 return true;
2570 }
2571
2572 bool VisitFunctionDecl(FunctionDecl *Node) override {
2573 if (cast<DeclContext>(Val: Node)->isDependentContext())
2574 return true; // Not to analyze dependent decl
2575 // `FunctionDecl->hasBody()` returns true if the function has a body
2576 // somewhere defined. But we want to know if this `Node` has a body
2577 // child. So we use `doesThisDeclarationHaveABody`:
2578 if (Node->doesThisDeclarationHaveABody())
2579 Callback(Node);
2580 return true;
2581 }
2582
2583 bool VisitBlockDecl(BlockDecl *Node) override {
2584 if (cast<DeclContext>(Val: Node)->isDependentContext())
2585 return true; // Not to analyze dependent decl
2586 Callback(Node);
2587 return true;
2588 }
2589
2590 bool VisitObjCMethodDecl(ObjCMethodDecl *Node) override {
2591 if (cast<DeclContext>(Val: Node)->isDependentContext())
2592 return true; // Not to analyze dependent decl
2593 if (Node->hasBody())
2594 Callback(Node);
2595 return true;
2596 }
2597
2598 bool VisitLambdaExpr(LambdaExpr *Node) override {
2599 return VisitFunctionDecl(Node->getCallOperator());
2600 }
2601};
2602
2603void clang::sema::AnalysisBasedWarnings::IssueWarnings(
2604 TranslationUnitDecl *TU) {
2605 if (!TU)
2606 return; // This is unexpected, give up quietly.
2607
2608 DiagnosticsEngine &Diags = S.getDiagnostics();
2609
2610 if (S.hasUncompilableErrorOccurred() || Diags.getIgnoreAllWarnings())
2611 // exit if having uncompilable errors or ignoring all warnings:
2612 return;
2613
2614 DiagnosticOptions &DiagOpts = Diags.getDiagnosticOptions();
2615
2616 // UnsafeBufferUsage analysis settings.
2617 bool UnsafeBufferUsageCanEmitSuggestions = S.getLangOpts().CPlusPlus20;
2618 bool UnsafeBufferUsageShouldEmitSuggestions = // Should != Can.
2619 UnsafeBufferUsageCanEmitSuggestions &&
2620 DiagOpts.ShowSafeBufferUsageSuggestions;
2621 bool UnsafeBufferUsageShouldSuggestSuggestions =
2622 UnsafeBufferUsageCanEmitSuggestions &&
2623 !DiagOpts.ShowSafeBufferUsageSuggestions;
2624 UnsafeBufferUsageReporter R(S, UnsafeBufferUsageShouldSuggestSuggestions);
2625
2626 // The Callback function that performs analyses:
2627 auto CallAnalyzers = [&](const Decl *Node) -> void {
2628 if (Node->hasAttr<UnsafeBufferUsageAttr>())
2629 return;
2630
2631 // Perform unsafe buffer usage analysis:
2632 if (!Diags.isIgnored(diag::warn_unsafe_buffer_operation,
2633 Node->getBeginLoc()) ||
2634 !Diags.isIgnored(diag::warn_unsafe_buffer_variable,
2635 Node->getBeginLoc()) ||
2636 !Diags.isIgnored(diag::warn_unsafe_buffer_usage_in_container,
2637 Node->getBeginLoc()) ||
2638 !Diags.isIgnored(diag::warn_unsafe_buffer_libc_call,
2639 Node->getBeginLoc())) {
2640 clang::checkUnsafeBufferUsage(D: Node, Handler&: R,
2641 EmitSuggestions: UnsafeBufferUsageShouldEmitSuggestions);
2642 }
2643
2644 // More analysis ...
2645 };
2646 // Emit per-function analysis-based warnings that require the whole-TU
2647 // reasoning. Check if any of them is enabled at all before scanning the AST:
2648 if (!Diags.isIgnored(diag::warn_unsafe_buffer_operation, SourceLocation()) ||
2649 !Diags.isIgnored(diag::warn_unsafe_buffer_variable, SourceLocation()) ||
2650 !Diags.isIgnored(diag::warn_unsafe_buffer_usage_in_container,
2651 SourceLocation()) ||
2652 (!Diags.isIgnored(diag::warn_unsafe_buffer_libc_call, SourceLocation()) &&
2653 S.getLangOpts().CPlusPlus /* only warn about libc calls in C++ */)) {
2654 CallableVisitor(CallAnalyzers, TU->getOwningModule())
2655 .TraverseTranslationUnitDecl(TU);
2656 }
2657}
2658
2659void clang::sema::AnalysisBasedWarnings::IssueWarnings(
2660 sema::AnalysisBasedWarnings::Policy P, sema::FunctionScopeInfo *fscope,
2661 const Decl *D, QualType BlockType) {
2662
2663 // We avoid doing analysis-based warnings when there are errors for
2664 // two reasons:
2665 // (1) The CFGs often can't be constructed (if the body is invalid), so
2666 // don't bother trying.
2667 // (2) The code already has problems; running the analysis just takes more
2668 // time.
2669 DiagnosticsEngine &Diags = S.getDiagnostics();
2670
2671 // Do not do any analysis if we are going to just ignore them.
2672 if (Diags.getIgnoreAllWarnings() ||
2673 (Diags.getSuppressSystemWarnings() &&
2674 S.SourceMgr.isInSystemHeader(Loc: D->getLocation())))
2675 return;
2676
2677 // For code in dependent contexts, we'll do this at instantiation time.
2678 if (cast<DeclContext>(Val: D)->isDependentContext())
2679 return;
2680
2681 if (S.hasUncompilableErrorOccurred()) {
2682 // Flush out any possibly unreachable diagnostics.
2683 flushDiagnostics(S, fscope);
2684 return;
2685 }
2686
2687 const Stmt *Body = D->getBody();
2688 assert(Body);
2689
2690 // Construct the analysis context with the specified CFG build options.
2691 AnalysisDeclContext AC(/* AnalysisDeclContextManager */ nullptr, D);
2692
2693 // Don't generate EH edges for CallExprs as we'd like to avoid the n^2
2694 // explosion for destructors that can result and the compile time hit.
2695 AC.getCFGBuildOptions().PruneTriviallyFalseEdges = true;
2696 AC.getCFGBuildOptions().AddEHEdges = false;
2697 AC.getCFGBuildOptions().AddInitializers = true;
2698 AC.getCFGBuildOptions().AddImplicitDtors = true;
2699 AC.getCFGBuildOptions().AddTemporaryDtors = true;
2700 AC.getCFGBuildOptions().AddCXXNewAllocator = false;
2701 AC.getCFGBuildOptions().AddCXXDefaultInitExprInCtors = true;
2702
2703 // Force that certain expressions appear as CFGElements in the CFG. This
2704 // is used to speed up various analyses.
2705 // FIXME: This isn't the right factoring. This is here for initial
2706 // prototyping, but we need a way for analyses to say what expressions they
2707 // expect to always be CFGElements and then fill in the BuildOptions
2708 // appropriately. This is essentially a layering violation.
2709 if (P.enableCheckUnreachable || P.enableThreadSafetyAnalysis ||
2710 P.enableConsumedAnalysis) {
2711 // Unreachable code analysis and thread safety require a linearized CFG.
2712 AC.getCFGBuildOptions().setAllAlwaysAdd();
2713 }
2714 else {
2715 AC.getCFGBuildOptions()
2716 .setAlwaysAdd(Stmt::BinaryOperatorClass)
2717 .setAlwaysAdd(Stmt::CompoundAssignOperatorClass)
2718 .setAlwaysAdd(Stmt::BlockExprClass)
2719 .setAlwaysAdd(Stmt::CStyleCastExprClass)
2720 .setAlwaysAdd(Stmt::DeclRefExprClass)
2721 .setAlwaysAdd(Stmt::ImplicitCastExprClass)
2722 .setAlwaysAdd(Stmt::UnaryOperatorClass);
2723 }
2724
2725 // Install the logical handler.
2726 std::optional<LogicalErrorHandler> LEH;
2727 if (LogicalErrorHandler::hasActiveDiagnostics(Diags, Loc: D->getBeginLoc())) {
2728 LEH.emplace(args&: S);
2729 AC.getCFGBuildOptions().Observer = &*LEH;
2730 }
2731
2732 // Emit delayed diagnostics.
2733 if (!fscope->PossiblyUnreachableDiags.empty()) {
2734 bool analyzed = false;
2735
2736 // Register the expressions with the CFGBuilder.
2737 for (const auto &D : fscope->PossiblyUnreachableDiags) {
2738 for (const Stmt *S : D.Stmts)
2739 AC.registerForcedBlockExpression(stmt: S);
2740 }
2741
2742 if (AC.getCFG()) {
2743 analyzed = true;
2744 for (const auto &D : fscope->PossiblyUnreachableDiags) {
2745 bool AllReachable = true;
2746 for (const Stmt *S : D.Stmts) {
2747 const CFGBlock *block = AC.getBlockForRegisteredExpression(stmt: S);
2748 CFGReverseBlockReachabilityAnalysis *cra =
2749 AC.getCFGReachablityAnalysis();
2750 // FIXME: We should be able to assert that block is non-null, but
2751 // the CFG analysis can skip potentially-evaluated expressions in
2752 // edge cases; see test/Sema/vla-2.c.
2753 if (block && cra) {
2754 // Can this block be reached from the entrance?
2755 if (!cra->isReachable(Src: &AC.getCFG()->getEntry(), Dst: block)) {
2756 AllReachable = false;
2757 break;
2758 }
2759 }
2760 // If we cannot map to a basic block, assume the statement is
2761 // reachable.
2762 }
2763
2764 if (AllReachable)
2765 S.Diag(D.Loc, D.PD);
2766 }
2767 }
2768
2769 if (!analyzed)
2770 flushDiagnostics(S, fscope);
2771 }
2772
2773 // Warning: check missing 'return'
2774 if (P.enableCheckFallThrough) {
2775 const CheckFallThroughDiagnostics &CD =
2776 (isa<BlockDecl>(Val: D) ? CheckFallThroughDiagnostics::MakeForBlock()
2777 : (isa<CXXMethodDecl>(Val: D) &&
2778 cast<CXXMethodDecl>(Val: D)->getOverloadedOperator() == OO_Call &&
2779 cast<CXXMethodDecl>(Val: D)->getParent()->isLambda())
2780 ? CheckFallThroughDiagnostics::MakeForLambda()
2781 : (fscope->isCoroutine()
2782 ? CheckFallThroughDiagnostics::MakeForCoroutine(Func: D)
2783 : CheckFallThroughDiagnostics::MakeForFunction(S, Func: D)));
2784 CheckFallThroughForBody(S, D, Body, BlockType, CD, AC);
2785 }
2786
2787 // Warning: check for unreachable code
2788 if (P.enableCheckUnreachable) {
2789 // Only check for unreachable code on non-template instantiations.
2790 // Different template instantiations can effectively change the control-flow
2791 // and it is very difficult to prove that a snippet of code in a template
2792 // is unreachable for all instantiations.
2793 bool isTemplateInstantiation = false;
2794 if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(Val: D))
2795 isTemplateInstantiation = Function->isTemplateInstantiation();
2796 if (!isTemplateInstantiation)
2797 CheckUnreachable(S, AC);
2798 }
2799
2800 // Check for thread safety violations
2801 if (P.enableThreadSafetyAnalysis) {
2802 SourceLocation FL = AC.getDecl()->getLocation();
2803 SourceLocation FEL = AC.getDecl()->getEndLoc();
2804 threadSafety::ThreadSafetyReporter Reporter(S, FL, FEL);
2805 if (!Diags.isIgnored(diag::warn_thread_safety_beta, D->getBeginLoc()))
2806 Reporter.setIssueBetaWarnings(true);
2807 if (!Diags.isIgnored(diag::warn_thread_safety_verbose, D->getBeginLoc()))
2808 Reporter.setVerbose(true);
2809
2810 threadSafety::runThreadSafetyAnalysis(AC, Handler&: Reporter,
2811 Bset: &S.ThreadSafetyDeclCache);
2812 Reporter.emitDiagnostics();
2813 }
2814
2815 // Check for violations of consumed properties.
2816 if (P.enableConsumedAnalysis) {
2817 consumed::ConsumedWarningsHandler WarningHandler(S);
2818 consumed::ConsumedAnalyzer Analyzer(WarningHandler);
2819 Analyzer.run(AC);
2820 }
2821
2822 if (!Diags.isIgnored(diag::warn_uninit_var, D->getBeginLoc()) ||
2823 !Diags.isIgnored(diag::warn_sometimes_uninit_var, D->getBeginLoc()) ||
2824 !Diags.isIgnored(diag::warn_maybe_uninit_var, D->getBeginLoc()) ||
2825 !Diags.isIgnored(diag::warn_uninit_const_reference, D->getBeginLoc())) {
2826 if (CFG *cfg = AC.getCFG()) {
2827 UninitValsDiagReporter reporter(S);
2828 UninitVariablesAnalysisStats stats;
2829 std::memset(s: &stats, c: 0, n: sizeof(UninitVariablesAnalysisStats));
2830 runUninitializedVariablesAnalysis(dc: *cast<DeclContext>(Val: D), cfg: *cfg, ac&: AC,
2831 handler&: reporter, stats);
2832
2833 if (S.CollectStats && stats.NumVariablesAnalyzed > 0) {
2834 ++NumUninitAnalysisFunctions;
2835 NumUninitAnalysisVariables += stats.NumVariablesAnalyzed;
2836 NumUninitAnalysisBlockVisits += stats.NumBlockVisits;
2837 MaxUninitAnalysisVariablesPerFunction =
2838 std::max(a: MaxUninitAnalysisVariablesPerFunction,
2839 b: stats.NumVariablesAnalyzed);
2840 MaxUninitAnalysisBlockVisitsPerFunction =
2841 std::max(a: MaxUninitAnalysisBlockVisitsPerFunction,
2842 b: stats.NumBlockVisits);
2843 }
2844 }
2845 }
2846
2847 // Check for violations of "called once" parameter properties.
2848 if (S.getLangOpts().ObjC && !S.getLangOpts().CPlusPlus &&
2849 shouldAnalyzeCalledOnceParameters(Diags, At: D->getBeginLoc())) {
2850 if (AC.getCFG()) {
2851 CalledOnceCheckReporter Reporter(S, IPData->CalledOnceData);
2852 checkCalledOnceParameters(
2853 AC, Handler&: Reporter,
2854 CheckConventionalParameters: shouldAnalyzeCalledOnceConventions(Diags, At: D->getBeginLoc()));
2855 }
2856 }
2857
2858 bool FallThroughDiagFull =
2859 !Diags.isIgnored(diag::warn_unannotated_fallthrough, D->getBeginLoc());
2860 bool FallThroughDiagPerFunction = !Diags.isIgnored(
2861 diag::warn_unannotated_fallthrough_per_function, D->getBeginLoc());
2862 if (FallThroughDiagFull || FallThroughDiagPerFunction ||
2863 fscope->HasFallthroughStmt) {
2864 DiagnoseSwitchLabelsFallthrough(S, AC, PerFunction: !FallThroughDiagFull);
2865 }
2866
2867 if (S.getLangOpts().ObjCWeak &&
2868 !Diags.isIgnored(diag::warn_arc_repeated_use_of_weak, D->getBeginLoc()))
2869 diagnoseRepeatedUseOfWeak(S, CurFn: fscope, D, PM: AC.getParentMap());
2870
2871
2872 // Check for infinite self-recursion in functions
2873 if (!Diags.isIgnored(diag::warn_infinite_recursive_function,
2874 D->getBeginLoc())) {
2875 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(Val: D)) {
2876 checkRecursiveFunction(S, FD, Body, AC);
2877 }
2878 }
2879
2880 // Check for throw out of non-throwing function.
2881 if (!Diags.isIgnored(diag::warn_throw_in_noexcept_func, D->getBeginLoc()))
2882 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(Val: D))
2883 if (S.getLangOpts().CPlusPlus && !fscope->isCoroutine() && isNoexcept(FD))
2884 checkThrowInNonThrowingFunc(S, FD, AC);
2885
2886 // If none of the previous checks caused a CFG build, trigger one here
2887 // for the logical error handler.
2888 if (LogicalErrorHandler::hasActiveDiagnostics(Diags, Loc: D->getBeginLoc())) {
2889 AC.getCFG();
2890 }
2891
2892 // Clear any of our policy overrides.
2893 clearOverrides();
2894
2895 // Collect statistics about the CFG if it was built.
2896 if (S.CollectStats && AC.isCFGBuilt()) {
2897 ++NumFunctionsAnalyzed;
2898 if (CFG *cfg = AC.getCFG()) {
2899 // If we successfully built a CFG for this context, record some more
2900 // detail information about it.
2901 NumCFGBlocks += cfg->getNumBlockIDs();
2902 MaxCFGBlocksPerFunction = std::max(a: MaxCFGBlocksPerFunction,
2903 b: cfg->getNumBlockIDs());
2904 } else {
2905 ++NumFunctionsWithBadCFGs;
2906 }
2907 }
2908}
2909
2910void clang::sema::AnalysisBasedWarnings::PrintStats() const {
2911 llvm::errs() << "\n*** Analysis Based Warnings Stats:\n";
2912
2913 unsigned NumCFGsBuilt = NumFunctionsAnalyzed - NumFunctionsWithBadCFGs;
2914 unsigned AvgCFGBlocksPerFunction =
2915 !NumCFGsBuilt ? 0 : NumCFGBlocks/NumCFGsBuilt;
2916 llvm::errs() << NumFunctionsAnalyzed << " functions analyzed ("
2917 << NumFunctionsWithBadCFGs << " w/o CFGs).\n"
2918 << " " << NumCFGBlocks << " CFG blocks built.\n"
2919 << " " << AvgCFGBlocksPerFunction
2920 << " average CFG blocks per function.\n"
2921 << " " << MaxCFGBlocksPerFunction
2922 << " max CFG blocks per function.\n";
2923
2924 unsigned AvgUninitVariablesPerFunction = !NumUninitAnalysisFunctions ? 0
2925 : NumUninitAnalysisVariables/NumUninitAnalysisFunctions;
2926 unsigned AvgUninitBlockVisitsPerFunction = !NumUninitAnalysisFunctions ? 0
2927 : NumUninitAnalysisBlockVisits/NumUninitAnalysisFunctions;
2928 llvm::errs() << NumUninitAnalysisFunctions
2929 << " functions analyzed for uninitialiazed variables\n"
2930 << " " << NumUninitAnalysisVariables << " variables analyzed.\n"
2931 << " " << AvgUninitVariablesPerFunction
2932 << " average variables per function.\n"
2933 << " " << MaxUninitAnalysisVariablesPerFunction
2934 << " max variables per function.\n"
2935 << " " << NumUninitAnalysisBlockVisits << " block visits.\n"
2936 << " " << AvgUninitBlockVisitsPerFunction
2937 << " average block visits per function.\n"
2938 << " " << MaxUninitAnalysisBlockVisitsPerFunction
2939 << " max block visits per function.\n";
2940}
2941

Provided by KDAB

Privacy Policy
Update your C++ knowledge – Modern C++11/14/17 Training
Find out more

source code of clang/lib/Sema/AnalysisBasedWarnings.cpp