xref: /llvm-project/clang/lib/Sema/AnalysisBasedWarnings.cpp (revision c1e7e4500c6e3b921f5e0cda8ba8d8d66e086db6)
1 //=== AnalysisBasedWarnings.cpp - Sema warnings based on libAnalysis ------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file defines analysis_warnings::[Policy,Executor].
10 // Together they are used by Sema to issue warnings based on inexpensive
11 // static analysis algorithms in libAnalysis.
12 //
13 //===----------------------------------------------------------------------===//
14 
15 #include "clang/Sema/AnalysisBasedWarnings.h"
16 #include "clang/AST/Decl.h"
17 #include "clang/AST/DeclCXX.h"
18 #include "clang/AST/DeclObjC.h"
19 #include "clang/AST/DynamicRecursiveASTVisitor.h"
20 #include "clang/AST/EvaluatedExprVisitor.h"
21 #include "clang/AST/Expr.h"
22 #include "clang/AST/ExprCXX.h"
23 #include "clang/AST/ExprObjC.h"
24 #include "clang/AST/OperationKinds.h"
25 #include "clang/AST/ParentMap.h"
26 #include "clang/AST/StmtCXX.h"
27 #include "clang/AST/StmtObjC.h"
28 #include "clang/AST/Type.h"
29 #include "clang/Analysis/Analyses/CFGReachabilityAnalysis.h"
30 #include "clang/Analysis/Analyses/CalledOnceCheck.h"
31 #include "clang/Analysis/Analyses/Consumed.h"
32 #include "clang/Analysis/Analyses/ReachableCode.h"
33 #include "clang/Analysis/Analyses/ThreadSafety.h"
34 #include "clang/Analysis/Analyses/UninitializedValues.h"
35 #include "clang/Analysis/Analyses/UnsafeBufferUsage.h"
36 #include "clang/Analysis/AnalysisDeclContext.h"
37 #include "clang/Analysis/CFG.h"
38 #include "clang/Analysis/CFGStmtMap.h"
39 #include "clang/Basic/Diagnostic.h"
40 #include "clang/Basic/DiagnosticSema.h"
41 #include "clang/Basic/SourceLocation.h"
42 #include "clang/Basic/SourceManager.h"
43 #include "clang/Lex/Preprocessor.h"
44 #include "clang/Sema/ScopeInfo.h"
45 #include "clang/Sema/SemaInternal.h"
46 #include "llvm/ADT/ArrayRef.h"
47 #include "llvm/ADT/BitVector.h"
48 #include "llvm/ADT/MapVector.h"
49 #include "llvm/ADT/STLFunctionalExtras.h"
50 #include "llvm/ADT/SmallVector.h"
51 #include "llvm/ADT/StringRef.h"
52 #include <algorithm>
53 #include <deque>
54 #include <iterator>
55 #include <optional>
56 
57 using namespace clang;
58 
59 //===----------------------------------------------------------------------===//
60 // Unreachable code analysis.
61 //===----------------------------------------------------------------------===//
62 
63 namespace {
64   class UnreachableCodeHandler : public reachable_code::Callback {
65     Sema &S;
66     SourceRange PreviousSilenceableCondVal;
67 
68   public:
69     UnreachableCodeHandler(Sema &s) : S(s) {}
70 
71     void HandleUnreachable(reachable_code::UnreachableKind UK, SourceLocation L,
72                            SourceRange SilenceableCondVal, SourceRange R1,
73                            SourceRange R2, bool HasFallThroughAttr) override {
74       // If the diagnosed code is `[[fallthrough]];` and
75       // `-Wunreachable-code-fallthrough` is  enabled, suppress `code will never
76       // be executed` warning to avoid generating diagnostic twice
77       if (HasFallThroughAttr &&
78           !S.getDiagnostics().isIgnored(diag::warn_unreachable_fallthrough_attr,
79                                         SourceLocation()))
80         return;
81 
82       // Avoid reporting multiple unreachable code diagnostics that are
83       // triggered by the same conditional value.
84       if (PreviousSilenceableCondVal.isValid() &&
85           SilenceableCondVal.isValid() &&
86           PreviousSilenceableCondVal == SilenceableCondVal)
87         return;
88       PreviousSilenceableCondVal = SilenceableCondVal;
89 
90       unsigned diag = diag::warn_unreachable;
91       switch (UK) {
92         case reachable_code::UK_Break:
93           diag = diag::warn_unreachable_break;
94           break;
95         case reachable_code::UK_Return:
96           diag = diag::warn_unreachable_return;
97           break;
98         case reachable_code::UK_Loop_Increment:
99           diag = diag::warn_unreachable_loop_increment;
100           break;
101         case reachable_code::UK_Other:
102           break;
103       }
104 
105       S.Diag(L, diag) << R1 << R2;
106 
107       SourceLocation Open = SilenceableCondVal.getBegin();
108       if (Open.isValid()) {
109         SourceLocation Close = SilenceableCondVal.getEnd();
110         Close = S.getLocForEndOfToken(Close);
111         if (Close.isValid()) {
112           S.Diag(Open, diag::note_unreachable_silence)
113             << FixItHint::CreateInsertion(Open, "/* DISABLES CODE */ (")
114             << FixItHint::CreateInsertion(Close, ")");
115         }
116       }
117     }
118   };
119 } // anonymous namespace
120 
121 /// CheckUnreachable - Check for unreachable code.
122 static void CheckUnreachable(Sema &S, AnalysisDeclContext &AC) {
123   // As a heuristic prune all diagnostics not in the main file.  Currently
124   // the majority of warnings in headers are false positives.  These
125   // are largely caused by configuration state, e.g. preprocessor
126   // defined code, etc.
127   //
128   // Note that this is also a performance optimization.  Analyzing
129   // headers many times can be expensive.
130   if (!S.getSourceManager().isInMainFile(AC.getDecl()->getBeginLoc()))
131     return;
132 
133   UnreachableCodeHandler UC(S);
134   reachable_code::FindUnreachableCode(AC, S.getPreprocessor(), UC);
135 }
136 
137 namespace {
138 /// Warn on logical operator errors in CFGBuilder
139 class LogicalErrorHandler : public CFGCallback {
140   Sema &S;
141 
142 public:
143   LogicalErrorHandler(Sema &S) : S(S) {}
144 
145   static bool HasMacroID(const Expr *E) {
146     if (E->getExprLoc().isMacroID())
147       return true;
148 
149     // Recurse to children.
150     for (const Stmt *SubStmt : E->children())
151       if (const Expr *SubExpr = dyn_cast_or_null<Expr>(SubStmt))
152         if (HasMacroID(SubExpr))
153           return true;
154 
155     return false;
156   }
157 
158   void logicAlwaysTrue(const BinaryOperator *B, bool isAlwaysTrue) override {
159     if (HasMacroID(B))
160       return;
161 
162     unsigned DiagID = isAlwaysTrue
163                           ? diag::warn_tautological_negation_or_compare
164                           : diag::warn_tautological_negation_and_compare;
165     SourceRange DiagRange = B->getSourceRange();
166     S.Diag(B->getExprLoc(), DiagID) << DiagRange;
167   }
168 
169   void compareAlwaysTrue(const BinaryOperator *B, bool isAlwaysTrue) override {
170     if (HasMacroID(B))
171       return;
172 
173     SourceRange DiagRange = B->getSourceRange();
174     S.Diag(B->getExprLoc(), diag::warn_tautological_overlap_comparison)
175         << DiagRange << isAlwaysTrue;
176   }
177 
178   void compareBitwiseEquality(const BinaryOperator *B,
179                               bool isAlwaysTrue) override {
180     if (HasMacroID(B))
181       return;
182 
183     SourceRange DiagRange = B->getSourceRange();
184     S.Diag(B->getExprLoc(), diag::warn_comparison_bitwise_always)
185         << DiagRange << isAlwaysTrue;
186   }
187 
188   void compareBitwiseOr(const BinaryOperator *B) override {
189     if (HasMacroID(B))
190       return;
191 
192     SourceRange DiagRange = B->getSourceRange();
193     S.Diag(B->getExprLoc(), diag::warn_comparison_bitwise_or) << DiagRange;
194   }
195 
196   static bool hasActiveDiagnostics(DiagnosticsEngine &Diags,
197                                    SourceLocation Loc) {
198     return !Diags.isIgnored(diag::warn_tautological_overlap_comparison, Loc) ||
199            !Diags.isIgnored(diag::warn_comparison_bitwise_or, Loc) ||
200            !Diags.isIgnored(diag::warn_tautological_negation_and_compare, Loc);
201   }
202 };
203 } // anonymous namespace
204 
205 //===----------------------------------------------------------------------===//
206 // Check for infinite self-recursion in functions
207 //===----------------------------------------------------------------------===//
208 
209 // Returns true if the function is called anywhere within the CFGBlock.
210 // For member functions, the additional condition of being call from the
211 // this pointer is required.
212 static bool hasRecursiveCallInPath(const FunctionDecl *FD, CFGBlock &Block) {
213   // Process all the Stmt's in this block to find any calls to FD.
214   for (const auto &B : Block) {
215     if (B.getKind() != CFGElement::Statement)
216       continue;
217 
218     const CallExpr *CE = dyn_cast<CallExpr>(B.getAs<CFGStmt>()->getStmt());
219     if (!CE || !CE->getCalleeDecl() ||
220         CE->getCalleeDecl()->getCanonicalDecl() != FD)
221       continue;
222 
223     // Skip function calls which are qualified with a templated class.
224     if (const DeclRefExpr *DRE =
225             dyn_cast<DeclRefExpr>(CE->getCallee()->IgnoreParenImpCasts())) {
226       if (NestedNameSpecifier *NNS = DRE->getQualifier()) {
227         if (NNS->getKind() == NestedNameSpecifier::TypeSpec &&
228             isa<TemplateSpecializationType>(NNS->getAsType())) {
229           continue;
230         }
231       }
232     }
233 
234     const CXXMemberCallExpr *MCE = dyn_cast<CXXMemberCallExpr>(CE);
235     if (!MCE || isa<CXXThisExpr>(MCE->getImplicitObjectArgument()) ||
236         !MCE->getMethodDecl()->isVirtual())
237       return true;
238   }
239   return false;
240 }
241 
242 // Returns true if every path from the entry block passes through a call to FD.
243 static bool checkForRecursiveFunctionCall(const FunctionDecl *FD, CFG *cfg) {
244   llvm::SmallPtrSet<CFGBlock *, 16> Visited;
245   llvm::SmallVector<CFGBlock *, 16> WorkList;
246   // Keep track of whether we found at least one recursive path.
247   bool foundRecursion = false;
248 
249   const unsigned ExitID = cfg->getExit().getBlockID();
250 
251   // Seed the work list with the entry block.
252   WorkList.push_back(&cfg->getEntry());
253 
254   while (!WorkList.empty()) {
255     CFGBlock *Block = WorkList.pop_back_val();
256 
257     for (auto I = Block->succ_begin(), E = Block->succ_end(); I != E; ++I) {
258       if (CFGBlock *SuccBlock = *I) {
259         if (!Visited.insert(SuccBlock).second)
260           continue;
261 
262         // Found a path to the exit node without a recursive call.
263         if (ExitID == SuccBlock->getBlockID())
264           return false;
265 
266         // If the successor block contains a recursive call, end analysis there.
267         if (hasRecursiveCallInPath(FD, *SuccBlock)) {
268           foundRecursion = true;
269           continue;
270         }
271 
272         WorkList.push_back(SuccBlock);
273       }
274     }
275   }
276   return foundRecursion;
277 }
278 
279 static void checkRecursiveFunction(Sema &S, const FunctionDecl *FD,
280                                    const Stmt *Body, AnalysisDeclContext &AC) {
281   FD = FD->getCanonicalDecl();
282 
283   // Only run on non-templated functions and non-templated members of
284   // templated classes.
285   if (FD->getTemplatedKind() != FunctionDecl::TK_NonTemplate &&
286       FD->getTemplatedKind() != FunctionDecl::TK_MemberSpecialization)
287     return;
288 
289   CFG *cfg = AC.getCFG();
290   if (!cfg) return;
291 
292   // If the exit block is unreachable, skip processing the function.
293   if (cfg->getExit().pred_empty())
294     return;
295 
296   // Emit diagnostic if a recursive function call is detected for all paths.
297   if (checkForRecursiveFunctionCall(FD, cfg))
298     S.Diag(Body->getBeginLoc(), diag::warn_infinite_recursive_function);
299 }
300 
301 //===----------------------------------------------------------------------===//
302 // Check for throw in a non-throwing function.
303 //===----------------------------------------------------------------------===//
304 
305 /// Determine whether an exception thrown by E, unwinding from ThrowBlock,
306 /// can reach ExitBlock.
307 static bool throwEscapes(Sema &S, const CXXThrowExpr *E, CFGBlock &ThrowBlock,
308                          CFG *Body) {
309   SmallVector<CFGBlock *, 16> Stack;
310   llvm::BitVector Queued(Body->getNumBlockIDs());
311 
312   Stack.push_back(&ThrowBlock);
313   Queued[ThrowBlock.getBlockID()] = true;
314 
315   while (!Stack.empty()) {
316     CFGBlock &UnwindBlock = *Stack.back();
317     Stack.pop_back();
318 
319     for (auto &Succ : UnwindBlock.succs()) {
320       if (!Succ.isReachable() || Queued[Succ->getBlockID()])
321         continue;
322 
323       if (Succ->getBlockID() == Body->getExit().getBlockID())
324         return true;
325 
326       if (auto *Catch =
327               dyn_cast_or_null<CXXCatchStmt>(Succ->getLabel())) {
328         QualType Caught = Catch->getCaughtType();
329         if (Caught.isNull() || // catch (...) catches everything
330             !E->getSubExpr() || // throw; is considered cuaght by any handler
331             S.handlerCanCatch(Caught, E->getSubExpr()->getType()))
332           // Exception doesn't escape via this path.
333           break;
334       } else {
335         Stack.push_back(Succ);
336         Queued[Succ->getBlockID()] = true;
337       }
338     }
339   }
340 
341   return false;
342 }
343 
344 static void visitReachableThrows(
345     CFG *BodyCFG,
346     llvm::function_ref<void(const CXXThrowExpr *, CFGBlock &)> Visit) {
347   llvm::BitVector Reachable(BodyCFG->getNumBlockIDs());
348   clang::reachable_code::ScanReachableFromBlock(&BodyCFG->getEntry(), Reachable);
349   for (CFGBlock *B : *BodyCFG) {
350     if (!Reachable[B->getBlockID()])
351       continue;
352     for (CFGElement &E : *B) {
353       std::optional<CFGStmt> S = E.getAs<CFGStmt>();
354       if (!S)
355         continue;
356       if (auto *Throw = dyn_cast<CXXThrowExpr>(S->getStmt()))
357         Visit(Throw, *B);
358     }
359   }
360 }
361 
362 static void EmitDiagForCXXThrowInNonThrowingFunc(Sema &S, SourceLocation OpLoc,
363                                                  const FunctionDecl *FD) {
364   if (!S.getSourceManager().isInSystemHeader(OpLoc) &&
365       FD->getTypeSourceInfo()) {
366     S.Diag(OpLoc, diag::warn_throw_in_noexcept_func) << FD;
367     if (S.getLangOpts().CPlusPlus11 &&
368         (isa<CXXDestructorDecl>(FD) ||
369          FD->getDeclName().getCXXOverloadedOperator() == OO_Delete ||
370          FD->getDeclName().getCXXOverloadedOperator() == OO_Array_Delete)) {
371       if (const auto *Ty = FD->getTypeSourceInfo()->getType()->
372                                          getAs<FunctionProtoType>())
373         S.Diag(FD->getLocation(), diag::note_throw_in_dtor)
374             << !isa<CXXDestructorDecl>(FD) << !Ty->hasExceptionSpec()
375             << FD->getExceptionSpecSourceRange();
376     } else
377       S.Diag(FD->getLocation(), diag::note_throw_in_function)
378           << FD->getExceptionSpecSourceRange();
379   }
380 }
381 
382 static void checkThrowInNonThrowingFunc(Sema &S, const FunctionDecl *FD,
383                                         AnalysisDeclContext &AC) {
384   CFG *BodyCFG = AC.getCFG();
385   if (!BodyCFG)
386     return;
387   if (BodyCFG->getExit().pred_empty())
388     return;
389   visitReachableThrows(BodyCFG, [&](const CXXThrowExpr *Throw, CFGBlock &Block) {
390     if (throwEscapes(S, Throw, Block, BodyCFG))
391       EmitDiagForCXXThrowInNonThrowingFunc(S, Throw->getThrowLoc(), FD);
392   });
393 }
394 
395 static bool isNoexcept(const FunctionDecl *FD) {
396   const auto *FPT = FD->getType()->castAs<FunctionProtoType>();
397   if (FPT->isNothrow() || FD->hasAttr<NoThrowAttr>())
398     return true;
399   return false;
400 }
401 
402 //===----------------------------------------------------------------------===//
403 // Check for missing return value.
404 //===----------------------------------------------------------------------===//
405 
406 enum ControlFlowKind {
407   UnknownFallThrough,
408   NeverFallThrough,
409   MaybeFallThrough,
410   AlwaysFallThrough,
411   NeverFallThroughOrReturn
412 };
413 
414 /// CheckFallThrough - Check that we don't fall off the end of a
415 /// Statement that should return a value.
416 ///
417 /// \returns AlwaysFallThrough iff we always fall off the end of the statement,
418 /// MaybeFallThrough iff we might or might not fall off the end,
419 /// NeverFallThroughOrReturn iff we never fall off the end of the statement or
420 /// return.  We assume NeverFallThrough iff we never fall off the end of the
421 /// statement but we may return.  We assume that functions not marked noreturn
422 /// will return.
423 static ControlFlowKind CheckFallThrough(AnalysisDeclContext &AC) {
424   CFG *cfg = AC.getCFG();
425   if (!cfg) return UnknownFallThrough;
426 
427   // The CFG leaves in dead things, and we don't want the dead code paths to
428   // confuse us, so we mark all live things first.
429   llvm::BitVector live(cfg->getNumBlockIDs());
430   unsigned count = reachable_code::ScanReachableFromBlock(&cfg->getEntry(),
431                                                           live);
432 
433   bool AddEHEdges = AC.getAddEHEdges();
434   if (!AddEHEdges && count != cfg->getNumBlockIDs())
435     // When there are things remaining dead, and we didn't add EH edges
436     // from CallExprs to the catch clauses, we have to go back and
437     // mark them as live.
438     for (const auto *B : *cfg) {
439       if (!live[B->getBlockID()]) {
440         if (B->pred_begin() == B->pred_end()) {
441           const Stmt *Term = B->getTerminatorStmt();
442           if (isa_and_nonnull<CXXTryStmt>(Term))
443             // When not adding EH edges from calls, catch clauses
444             // can otherwise seem dead.  Avoid noting them as dead.
445             count += reachable_code::ScanReachableFromBlock(B, live);
446           continue;
447         }
448       }
449     }
450 
451   // Now we know what is live, we check the live precessors of the exit block
452   // and look for fall through paths, being careful to ignore normal returns,
453   // and exceptional paths.
454   bool HasLiveReturn = false;
455   bool HasFakeEdge = false;
456   bool HasPlainEdge = false;
457   bool HasAbnormalEdge = false;
458 
459   // Ignore default cases that aren't likely to be reachable because all
460   // enums in a switch(X) have explicit case statements.
461   CFGBlock::FilterOptions FO;
462   FO.IgnoreDefaultsWithCoveredEnums = 1;
463 
464   for (CFGBlock::filtered_pred_iterator I =
465            cfg->getExit().filtered_pred_start_end(FO);
466        I.hasMore(); ++I) {
467     const CFGBlock &B = **I;
468     if (!live[B.getBlockID()])
469       continue;
470 
471     // Skip blocks which contain an element marked as no-return. They don't
472     // represent actually viable edges into the exit block, so mark them as
473     // abnormal.
474     if (B.hasNoReturnElement()) {
475       HasAbnormalEdge = true;
476       continue;
477     }
478 
479     // Destructors can appear after the 'return' in the CFG.  This is
480     // normal.  We need to look pass the destructors for the return
481     // statement (if it exists).
482     CFGBlock::const_reverse_iterator ri = B.rbegin(), re = B.rend();
483 
484     for ( ; ri != re ; ++ri)
485       if (ri->getAs<CFGStmt>())
486         break;
487 
488     // No more CFGElements in the block?
489     if (ri == re) {
490       const Stmt *Term = B.getTerminatorStmt();
491       if (Term && (isa<CXXTryStmt>(Term) || isa<ObjCAtTryStmt>(Term))) {
492         HasAbnormalEdge = true;
493         continue;
494       }
495       // A labeled empty statement, or the entry block...
496       HasPlainEdge = true;
497       continue;
498     }
499 
500     CFGStmt CS = ri->castAs<CFGStmt>();
501     const Stmt *S = CS.getStmt();
502     if (isa<ReturnStmt>(S) || isa<CoreturnStmt>(S)) {
503       HasLiveReturn = true;
504       continue;
505     }
506     if (isa<ObjCAtThrowStmt>(S)) {
507       HasFakeEdge = true;
508       continue;
509     }
510     if (isa<CXXThrowExpr>(S)) {
511       HasFakeEdge = true;
512       continue;
513     }
514     if (isa<MSAsmStmt>(S)) {
515       // TODO: Verify this is correct.
516       HasFakeEdge = true;
517       HasLiveReturn = true;
518       continue;
519     }
520     if (isa<CXXTryStmt>(S)) {
521       HasAbnormalEdge = true;
522       continue;
523     }
524     if (!llvm::is_contained(B.succs(), &cfg->getExit())) {
525       HasAbnormalEdge = true;
526       continue;
527     }
528 
529     HasPlainEdge = true;
530   }
531   if (!HasPlainEdge) {
532     if (HasLiveReturn)
533       return NeverFallThrough;
534     return NeverFallThroughOrReturn;
535   }
536   if (HasAbnormalEdge || HasFakeEdge || HasLiveReturn)
537     return MaybeFallThrough;
538   // This says AlwaysFallThrough for calls to functions that are not marked
539   // noreturn, that don't return.  If people would like this warning to be more
540   // accurate, such functions should be marked as noreturn.
541   return AlwaysFallThrough;
542 }
543 
544 namespace {
545 
546 struct CheckFallThroughDiagnostics {
547   unsigned diag_MaybeFallThrough_HasNoReturn;
548   unsigned diag_MaybeFallThrough_ReturnsNonVoid;
549   unsigned diag_AlwaysFallThrough_HasNoReturn;
550   unsigned diag_AlwaysFallThrough_ReturnsNonVoid;
551   unsigned diag_NeverFallThroughOrReturn;
552   enum { Function, Block, Lambda, Coroutine } funMode;
553   SourceLocation FuncLoc;
554 
555   static CheckFallThroughDiagnostics MakeForFunction(const Decl *Func) {
556     CheckFallThroughDiagnostics D;
557     D.FuncLoc = Func->getLocation();
558     D.diag_MaybeFallThrough_HasNoReturn =
559       diag::warn_falloff_noreturn_function;
560     D.diag_MaybeFallThrough_ReturnsNonVoid =
561       diag::warn_maybe_falloff_nonvoid_function;
562     D.diag_AlwaysFallThrough_HasNoReturn =
563       diag::warn_falloff_noreturn_function;
564     D.diag_AlwaysFallThrough_ReturnsNonVoid =
565       diag::warn_falloff_nonvoid_function;
566 
567     // Don't suggest that virtual functions be marked "noreturn", since they
568     // might be overridden by non-noreturn functions.
569     bool isVirtualMethod = false;
570     if (const CXXMethodDecl *Method = dyn_cast<CXXMethodDecl>(Func))
571       isVirtualMethod = Method->isVirtual();
572 
573     // Don't suggest that template instantiations be marked "noreturn"
574     bool isTemplateInstantiation = false;
575     if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(Func))
576       isTemplateInstantiation = Function->isTemplateInstantiation();
577 
578     if (!isVirtualMethod && !isTemplateInstantiation)
579       D.diag_NeverFallThroughOrReturn =
580         diag::warn_suggest_noreturn_function;
581     else
582       D.diag_NeverFallThroughOrReturn = 0;
583 
584     D.funMode = Function;
585     return D;
586   }
587 
588   static CheckFallThroughDiagnostics MakeForCoroutine(const Decl *Func) {
589     CheckFallThroughDiagnostics D;
590     D.FuncLoc = Func->getLocation();
591     D.diag_MaybeFallThrough_HasNoReturn = 0;
592     D.diag_MaybeFallThrough_ReturnsNonVoid =
593         diag::warn_maybe_falloff_nonvoid_coroutine;
594     D.diag_AlwaysFallThrough_HasNoReturn = 0;
595     D.diag_AlwaysFallThrough_ReturnsNonVoid =
596         diag::warn_falloff_nonvoid_coroutine;
597     D.diag_NeverFallThroughOrReturn = 0;
598     D.funMode = Coroutine;
599     return D;
600   }
601 
602   static CheckFallThroughDiagnostics MakeForBlock() {
603     CheckFallThroughDiagnostics D;
604     D.diag_MaybeFallThrough_HasNoReturn =
605       diag::err_noreturn_block_has_return_expr;
606     D.diag_MaybeFallThrough_ReturnsNonVoid =
607       diag::err_maybe_falloff_nonvoid_block;
608     D.diag_AlwaysFallThrough_HasNoReturn =
609       diag::err_noreturn_block_has_return_expr;
610     D.diag_AlwaysFallThrough_ReturnsNonVoid =
611       diag::err_falloff_nonvoid_block;
612     D.diag_NeverFallThroughOrReturn = 0;
613     D.funMode = Block;
614     return D;
615   }
616 
617   static CheckFallThroughDiagnostics MakeForLambda() {
618     CheckFallThroughDiagnostics D;
619     D.diag_MaybeFallThrough_HasNoReturn =
620       diag::err_noreturn_lambda_has_return_expr;
621     D.diag_MaybeFallThrough_ReturnsNonVoid =
622       diag::warn_maybe_falloff_nonvoid_lambda;
623     D.diag_AlwaysFallThrough_HasNoReturn =
624       diag::err_noreturn_lambda_has_return_expr;
625     D.diag_AlwaysFallThrough_ReturnsNonVoid =
626       diag::warn_falloff_nonvoid_lambda;
627     D.diag_NeverFallThroughOrReturn = 0;
628     D.funMode = Lambda;
629     return D;
630   }
631 
632   bool checkDiagnostics(DiagnosticsEngine &D, bool ReturnsVoid,
633                         bool HasNoReturn) const {
634     if (funMode == Function) {
635       return (ReturnsVoid ||
636               D.isIgnored(diag::warn_maybe_falloff_nonvoid_function,
637                           FuncLoc)) &&
638              (!HasNoReturn ||
639               D.isIgnored(diag::warn_noreturn_function_has_return_expr,
640                           FuncLoc)) &&
641              (!ReturnsVoid ||
642               D.isIgnored(diag::warn_suggest_noreturn_block, FuncLoc));
643     }
644     if (funMode == Coroutine) {
645       return (ReturnsVoid ||
646               D.isIgnored(diag::warn_maybe_falloff_nonvoid_function, FuncLoc) ||
647               D.isIgnored(diag::warn_maybe_falloff_nonvoid_coroutine,
648                           FuncLoc)) &&
649              (!HasNoReturn);
650     }
651     // For blocks / lambdas.
652     return ReturnsVoid && !HasNoReturn;
653   }
654 };
655 
656 } // anonymous namespace
657 
658 /// CheckFallThroughForBody - Check that we don't fall off the end of a
659 /// function that should return a value.  Check that we don't fall off the end
660 /// of a noreturn function.  We assume that functions and blocks not marked
661 /// noreturn will return.
662 static void CheckFallThroughForBody(Sema &S, const Decl *D, const Stmt *Body,
663                                     QualType BlockType,
664                                     const CheckFallThroughDiagnostics &CD,
665                                     AnalysisDeclContext &AC,
666                                     sema::FunctionScopeInfo *FSI) {
667 
668   bool ReturnsVoid = false;
669   bool HasNoReturn = false;
670   bool IsCoroutine = FSI->isCoroutine();
671 
672   if (const auto *FD = dyn_cast<FunctionDecl>(D)) {
673     if (const auto *CBody = dyn_cast<CoroutineBodyStmt>(Body))
674       ReturnsVoid = CBody->getFallthroughHandler() != nullptr;
675     else
676       ReturnsVoid = FD->getReturnType()->isVoidType();
677     HasNoReturn = FD->isNoReturn();
678   }
679   else if (const auto *MD = dyn_cast<ObjCMethodDecl>(D)) {
680     ReturnsVoid = MD->getReturnType()->isVoidType();
681     HasNoReturn = MD->hasAttr<NoReturnAttr>();
682   }
683   else if (isa<BlockDecl>(D)) {
684     if (const FunctionType *FT =
685           BlockType->getPointeeType()->getAs<FunctionType>()) {
686       if (FT->getReturnType()->isVoidType())
687         ReturnsVoid = true;
688       if (FT->getNoReturnAttr())
689         HasNoReturn = true;
690     }
691   }
692 
693   DiagnosticsEngine &Diags = S.getDiagnostics();
694 
695   // Short circuit for compilation speed.
696   if (CD.checkDiagnostics(Diags, ReturnsVoid, HasNoReturn))
697       return;
698   SourceLocation LBrace = Body->getBeginLoc(), RBrace = Body->getEndLoc();
699   auto EmitDiag = [&](SourceLocation Loc, unsigned DiagID) {
700     if (IsCoroutine)
701       S.Diag(Loc, DiagID) << FSI->CoroutinePromise->getType();
702     else
703       S.Diag(Loc, DiagID);
704   };
705 
706   // cpu_dispatch functions permit empty function bodies for ICC compatibility.
707   if (D->getAsFunction() && D->getAsFunction()->isCPUDispatchMultiVersion())
708     return;
709 
710   // Either in a function body compound statement, or a function-try-block.
711   switch (CheckFallThrough(AC)) {
712     case UnknownFallThrough:
713       break;
714 
715     case MaybeFallThrough:
716       if (HasNoReturn)
717         EmitDiag(RBrace, CD.diag_MaybeFallThrough_HasNoReturn);
718       else if (!ReturnsVoid)
719         EmitDiag(RBrace, CD.diag_MaybeFallThrough_ReturnsNonVoid);
720       break;
721     case AlwaysFallThrough:
722       if (HasNoReturn)
723         EmitDiag(RBrace, CD.diag_AlwaysFallThrough_HasNoReturn);
724       else if (!ReturnsVoid)
725         EmitDiag(RBrace, CD.diag_AlwaysFallThrough_ReturnsNonVoid);
726       break;
727     case NeverFallThroughOrReturn:
728       if (ReturnsVoid && !HasNoReturn && CD.diag_NeverFallThroughOrReturn) {
729         if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
730           S.Diag(LBrace, CD.diag_NeverFallThroughOrReturn) << 0 << FD;
731         } else if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) {
732           S.Diag(LBrace, CD.diag_NeverFallThroughOrReturn) << 1 << MD;
733         } else {
734           S.Diag(LBrace, CD.diag_NeverFallThroughOrReturn);
735         }
736       }
737       break;
738     case NeverFallThrough:
739       break;
740   }
741 }
742 
743 //===----------------------------------------------------------------------===//
744 // -Wuninitialized
745 //===----------------------------------------------------------------------===//
746 
747 namespace {
748 /// ContainsReference - A visitor class to search for references to
749 /// a particular declaration (the needle) within any evaluated component of an
750 /// expression (recursively).
751 class ContainsReference : public ConstEvaluatedExprVisitor<ContainsReference> {
752   bool FoundReference;
753   const DeclRefExpr *Needle;
754 
755 public:
756   typedef ConstEvaluatedExprVisitor<ContainsReference> Inherited;
757 
758   ContainsReference(ASTContext &Context, const DeclRefExpr *Needle)
759     : Inherited(Context), FoundReference(false), Needle(Needle) {}
760 
761   void VisitExpr(const Expr *E) {
762     // Stop evaluating if we already have a reference.
763     if (FoundReference)
764       return;
765 
766     Inherited::VisitExpr(E);
767   }
768 
769   void VisitDeclRefExpr(const DeclRefExpr *E) {
770     if (E == Needle)
771       FoundReference = true;
772     else
773       Inherited::VisitDeclRefExpr(E);
774   }
775 
776   bool doesContainReference() const { return FoundReference; }
777 };
778 } // anonymous namespace
779 
780 static bool SuggestInitializationFixit(Sema &S, const VarDecl *VD) {
781   QualType VariableTy = VD->getType().getCanonicalType();
782   if (VariableTy->isBlockPointerType() &&
783       !VD->hasAttr<BlocksAttr>()) {
784     S.Diag(VD->getLocation(), diag::note_block_var_fixit_add_initialization)
785         << VD->getDeclName()
786         << FixItHint::CreateInsertion(VD->getLocation(), "__block ");
787     return true;
788   }
789 
790   // Don't issue a fixit if there is already an initializer.
791   if (VD->getInit())
792     return false;
793 
794   // Don't suggest a fixit inside macros.
795   if (VD->getEndLoc().isMacroID())
796     return false;
797 
798   SourceLocation Loc = S.getLocForEndOfToken(VD->getEndLoc());
799 
800   // Suggest possible initialization (if any).
801   std::string Init = S.getFixItZeroInitializerForType(VariableTy, Loc);
802   if (Init.empty())
803     return false;
804 
805   S.Diag(Loc, diag::note_var_fixit_add_initialization) << VD->getDeclName()
806     << FixItHint::CreateInsertion(Loc, Init);
807   return true;
808 }
809 
810 /// Create a fixit to remove an if-like statement, on the assumption that its
811 /// condition is CondVal.
812 static void CreateIfFixit(Sema &S, const Stmt *If, const Stmt *Then,
813                           const Stmt *Else, bool CondVal,
814                           FixItHint &Fixit1, FixItHint &Fixit2) {
815   if (CondVal) {
816     // If condition is always true, remove all but the 'then'.
817     Fixit1 = FixItHint::CreateRemoval(
818         CharSourceRange::getCharRange(If->getBeginLoc(), Then->getBeginLoc()));
819     if (Else) {
820       SourceLocation ElseKwLoc = S.getLocForEndOfToken(Then->getEndLoc());
821       Fixit2 =
822           FixItHint::CreateRemoval(SourceRange(ElseKwLoc, Else->getEndLoc()));
823     }
824   } else {
825     // If condition is always false, remove all but the 'else'.
826     if (Else)
827       Fixit1 = FixItHint::CreateRemoval(CharSourceRange::getCharRange(
828           If->getBeginLoc(), Else->getBeginLoc()));
829     else
830       Fixit1 = FixItHint::CreateRemoval(If->getSourceRange());
831   }
832 }
833 
834 /// DiagUninitUse -- Helper function to produce a diagnostic for an
835 /// uninitialized use of a variable.
836 static void DiagUninitUse(Sema &S, const VarDecl *VD, const UninitUse &Use,
837                           bool IsCapturedByBlock) {
838   bool Diagnosed = false;
839 
840   switch (Use.getKind()) {
841   case UninitUse::Always:
842     S.Diag(Use.getUser()->getBeginLoc(), diag::warn_uninit_var)
843         << VD->getDeclName() << IsCapturedByBlock
844         << Use.getUser()->getSourceRange();
845     return;
846 
847   case UninitUse::AfterDecl:
848   case UninitUse::AfterCall:
849     S.Diag(VD->getLocation(), diag::warn_sometimes_uninit_var)
850       << VD->getDeclName() << IsCapturedByBlock
851       << (Use.getKind() == UninitUse::AfterDecl ? 4 : 5)
852       << const_cast<DeclContext*>(VD->getLexicalDeclContext())
853       << VD->getSourceRange();
854     S.Diag(Use.getUser()->getBeginLoc(), diag::note_uninit_var_use)
855         << IsCapturedByBlock << Use.getUser()->getSourceRange();
856     return;
857 
858   case UninitUse::Maybe:
859   case UninitUse::Sometimes:
860     // Carry on to report sometimes-uninitialized branches, if possible,
861     // or a 'may be used uninitialized' diagnostic otherwise.
862     break;
863   }
864 
865   // Diagnose each branch which leads to a sometimes-uninitialized use.
866   for (UninitUse::branch_iterator I = Use.branch_begin(), E = Use.branch_end();
867        I != E; ++I) {
868     assert(Use.getKind() == UninitUse::Sometimes);
869 
870     const Expr *User = Use.getUser();
871     const Stmt *Term = I->Terminator;
872 
873     // Information used when building the diagnostic.
874     unsigned DiagKind;
875     StringRef Str;
876     SourceRange Range;
877 
878     // FixIts to suppress the diagnostic by removing the dead condition.
879     // For all binary terminators, branch 0 is taken if the condition is true,
880     // and branch 1 is taken if the condition is false.
881     int RemoveDiagKind = -1;
882     const char *FixitStr =
883         S.getLangOpts().CPlusPlus ? (I->Output ? "true" : "false")
884                                   : (I->Output ? "1" : "0");
885     FixItHint Fixit1, Fixit2;
886 
887     switch (Term ? Term->getStmtClass() : Stmt::DeclStmtClass) {
888     default:
889       // Don't know how to report this. Just fall back to 'may be used
890       // uninitialized'. FIXME: Can this happen?
891       continue;
892 
893     // "condition is true / condition is false".
894     case Stmt::IfStmtClass: {
895       const IfStmt *IS = cast<IfStmt>(Term);
896       DiagKind = 0;
897       Str = "if";
898       Range = IS->getCond()->getSourceRange();
899       RemoveDiagKind = 0;
900       CreateIfFixit(S, IS, IS->getThen(), IS->getElse(),
901                     I->Output, Fixit1, Fixit2);
902       break;
903     }
904     case Stmt::ConditionalOperatorClass: {
905       const ConditionalOperator *CO = cast<ConditionalOperator>(Term);
906       DiagKind = 0;
907       Str = "?:";
908       Range = CO->getCond()->getSourceRange();
909       RemoveDiagKind = 0;
910       CreateIfFixit(S, CO, CO->getTrueExpr(), CO->getFalseExpr(),
911                     I->Output, Fixit1, Fixit2);
912       break;
913     }
914     case Stmt::BinaryOperatorClass: {
915       const BinaryOperator *BO = cast<BinaryOperator>(Term);
916       if (!BO->isLogicalOp())
917         continue;
918       DiagKind = 0;
919       Str = BO->getOpcodeStr();
920       Range = BO->getLHS()->getSourceRange();
921       RemoveDiagKind = 0;
922       if ((BO->getOpcode() == BO_LAnd && I->Output) ||
923           (BO->getOpcode() == BO_LOr && !I->Output))
924         // true && y -> y, false || y -> y.
925         Fixit1 = FixItHint::CreateRemoval(
926             SourceRange(BO->getBeginLoc(), BO->getOperatorLoc()));
927       else
928         // false && y -> false, true || y -> true.
929         Fixit1 = FixItHint::CreateReplacement(BO->getSourceRange(), FixitStr);
930       break;
931     }
932 
933     // "loop is entered / loop is exited".
934     case Stmt::WhileStmtClass:
935       DiagKind = 1;
936       Str = "while";
937       Range = cast<WhileStmt>(Term)->getCond()->getSourceRange();
938       RemoveDiagKind = 1;
939       Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
940       break;
941     case Stmt::ForStmtClass:
942       DiagKind = 1;
943       Str = "for";
944       Range = cast<ForStmt>(Term)->getCond()->getSourceRange();
945       RemoveDiagKind = 1;
946       if (I->Output)
947         Fixit1 = FixItHint::CreateRemoval(Range);
948       else
949         Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
950       break;
951     case Stmt::CXXForRangeStmtClass:
952       if (I->Output == 1) {
953         // The use occurs if a range-based for loop's body never executes.
954         // That may be impossible, and there's no syntactic fix for this,
955         // so treat it as a 'may be uninitialized' case.
956         continue;
957       }
958       DiagKind = 1;
959       Str = "for";
960       Range = cast<CXXForRangeStmt>(Term)->getRangeInit()->getSourceRange();
961       break;
962 
963     // "condition is true / loop is exited".
964     case Stmt::DoStmtClass:
965       DiagKind = 2;
966       Str = "do";
967       Range = cast<DoStmt>(Term)->getCond()->getSourceRange();
968       RemoveDiagKind = 1;
969       Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
970       break;
971 
972     // "switch case is taken".
973     case Stmt::CaseStmtClass:
974       DiagKind = 3;
975       Str = "case";
976       Range = cast<CaseStmt>(Term)->getLHS()->getSourceRange();
977       break;
978     case Stmt::DefaultStmtClass:
979       DiagKind = 3;
980       Str = "default";
981       Range = cast<DefaultStmt>(Term)->getDefaultLoc();
982       break;
983     }
984 
985     S.Diag(Range.getBegin(), diag::warn_sometimes_uninit_var)
986       << VD->getDeclName() << IsCapturedByBlock << DiagKind
987       << Str << I->Output << Range;
988     S.Diag(User->getBeginLoc(), diag::note_uninit_var_use)
989         << IsCapturedByBlock << User->getSourceRange();
990     if (RemoveDiagKind != -1)
991       S.Diag(Fixit1.RemoveRange.getBegin(), diag::note_uninit_fixit_remove_cond)
992         << RemoveDiagKind << Str << I->Output << Fixit1 << Fixit2;
993 
994     Diagnosed = true;
995   }
996 
997   if (!Diagnosed)
998     S.Diag(Use.getUser()->getBeginLoc(), diag::warn_maybe_uninit_var)
999         << VD->getDeclName() << IsCapturedByBlock
1000         << Use.getUser()->getSourceRange();
1001 }
1002 
1003 /// Diagnose uninitialized const reference usages.
1004 static bool DiagnoseUninitializedConstRefUse(Sema &S, const VarDecl *VD,
1005                                              const UninitUse &Use) {
1006   S.Diag(Use.getUser()->getBeginLoc(), diag::warn_uninit_const_reference)
1007       << VD->getDeclName() << Use.getUser()->getSourceRange();
1008   return true;
1009 }
1010 
1011 /// DiagnoseUninitializedUse -- Helper function for diagnosing uses of an
1012 /// uninitialized variable. This manages the different forms of diagnostic
1013 /// emitted for particular types of uses. Returns true if the use was diagnosed
1014 /// as a warning. If a particular use is one we omit warnings for, returns
1015 /// false.
1016 static bool DiagnoseUninitializedUse(Sema &S, const VarDecl *VD,
1017                                      const UninitUse &Use,
1018                                      bool alwaysReportSelfInit = false) {
1019   if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(Use.getUser())) {
1020     // Inspect the initializer of the variable declaration which is
1021     // being referenced prior to its initialization. We emit
1022     // specialized diagnostics for self-initialization, and we
1023     // specifically avoid warning about self references which take the
1024     // form of:
1025     //
1026     //   int x = x;
1027     //
1028     // This is used to indicate to GCC that 'x' is intentionally left
1029     // uninitialized. Proven code paths which access 'x' in
1030     // an uninitialized state after this will still warn.
1031     if (const Expr *Initializer = VD->getInit()) {
1032       if (!alwaysReportSelfInit && DRE == Initializer->IgnoreParenImpCasts())
1033         return false;
1034 
1035       ContainsReference CR(S.Context, DRE);
1036       CR.Visit(Initializer);
1037       if (CR.doesContainReference()) {
1038         S.Diag(DRE->getBeginLoc(), diag::warn_uninit_self_reference_in_init)
1039             << VD->getDeclName() << VD->getLocation() << DRE->getSourceRange();
1040         return true;
1041       }
1042     }
1043 
1044     DiagUninitUse(S, VD, Use, false);
1045   } else {
1046     const BlockExpr *BE = cast<BlockExpr>(Use.getUser());
1047     if (VD->getType()->isBlockPointerType() && !VD->hasAttr<BlocksAttr>())
1048       S.Diag(BE->getBeginLoc(),
1049              diag::warn_uninit_byref_blockvar_captured_by_block)
1050           << VD->getDeclName()
1051           << VD->getType().getQualifiers().hasObjCLifetime();
1052     else
1053       DiagUninitUse(S, VD, Use, true);
1054   }
1055 
1056   // Report where the variable was declared when the use wasn't within
1057   // the initializer of that declaration & we didn't already suggest
1058   // an initialization fixit.
1059   if (!SuggestInitializationFixit(S, VD))
1060     S.Diag(VD->getBeginLoc(), diag::note_var_declared_here)
1061         << VD->getDeclName();
1062 
1063   return true;
1064 }
1065 
1066 namespace {
1067 class FallthroughMapper : public DynamicRecursiveASTVisitor {
1068 public:
1069   FallthroughMapper(Sema &S) : FoundSwitchStatements(false), S(S) {
1070     ShouldWalkTypesOfTypeLocs = false;
1071   }
1072 
1073   bool foundSwitchStatements() const { return FoundSwitchStatements; }
1074 
1075   void markFallthroughVisited(const AttributedStmt *Stmt) {
1076     bool Found = FallthroughStmts.erase(Stmt);
1077     assert(Found);
1078     (void)Found;
1079   }
1080 
1081   typedef llvm::SmallPtrSet<const AttributedStmt *, 8> AttrStmts;
1082 
1083   const AttrStmts &getFallthroughStmts() const { return FallthroughStmts; }
1084 
1085   void fillReachableBlocks(CFG *Cfg) {
1086     assert(ReachableBlocks.empty() && "ReachableBlocks already filled");
1087     std::deque<const CFGBlock *> BlockQueue;
1088 
1089     ReachableBlocks.insert(&Cfg->getEntry());
1090     BlockQueue.push_back(&Cfg->getEntry());
1091     // Mark all case blocks reachable to avoid problems with switching on
1092     // constants, covered enums, etc.
1093     // These blocks can contain fall-through annotations, and we don't want to
1094     // issue a warn_fallthrough_attr_unreachable for them.
1095     for (const auto *B : *Cfg) {
1096       const Stmt *L = B->getLabel();
1097       if (isa_and_nonnull<SwitchCase>(L) && ReachableBlocks.insert(B).second)
1098         BlockQueue.push_back(B);
1099     }
1100 
1101     while (!BlockQueue.empty()) {
1102       const CFGBlock *P = BlockQueue.front();
1103       BlockQueue.pop_front();
1104       for (const CFGBlock *B : P->succs()) {
1105         if (B && ReachableBlocks.insert(B).second)
1106           BlockQueue.push_back(B);
1107       }
1108     }
1109   }
1110 
1111   bool checkFallThroughIntoBlock(const CFGBlock &B, int &AnnotatedCnt,
1112                                  bool IsTemplateInstantiation) {
1113     assert(!ReachableBlocks.empty() && "ReachableBlocks empty");
1114 
1115     int UnannotatedCnt = 0;
1116     AnnotatedCnt = 0;
1117 
1118     std::deque<const CFGBlock *> BlockQueue(B.pred_begin(), B.pred_end());
1119     while (!BlockQueue.empty()) {
1120       const CFGBlock *P = BlockQueue.front();
1121       BlockQueue.pop_front();
1122       if (!P)
1123         continue;
1124 
1125       const Stmt *Term = P->getTerminatorStmt();
1126       if (isa_and_nonnull<SwitchStmt>(Term))
1127         continue; // Switch statement, good.
1128 
1129       const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(P->getLabel());
1130       if (SW && SW->getSubStmt() == B.getLabel() && P->begin() == P->end())
1131         continue; // Previous case label has no statements, good.
1132 
1133       const LabelStmt *L = dyn_cast_or_null<LabelStmt>(P->getLabel());
1134       if (L && L->getSubStmt() == B.getLabel() && P->begin() == P->end())
1135         continue; // Case label is preceded with a normal label, good.
1136 
1137       if (!ReachableBlocks.count(P)) {
1138         for (const CFGElement &Elem : llvm::reverse(*P)) {
1139           if (std::optional<CFGStmt> CS = Elem.getAs<CFGStmt>()) {
1140             if (const AttributedStmt *AS = asFallThroughAttr(CS->getStmt())) {
1141               // Don't issue a warning for an unreachable fallthrough
1142               // attribute in template instantiations as it may not be
1143               // unreachable in all instantiations of the template.
1144               if (!IsTemplateInstantiation)
1145                 S.Diag(AS->getBeginLoc(),
1146                        diag::warn_unreachable_fallthrough_attr);
1147               markFallthroughVisited(AS);
1148               ++AnnotatedCnt;
1149               break;
1150             }
1151             // Don't care about other unreachable statements.
1152           }
1153         }
1154           // If there are no unreachable statements, this may be a special
1155           // case in CFG:
1156           // case X: {
1157           //    A a;  // A has a destructor.
1158           //    break;
1159           // }
1160           // // <<<< This place is represented by a 'hanging' CFG block.
1161           // case Y:
1162           continue;
1163       }
1164 
1165         const Stmt *LastStmt = getLastStmt(*P);
1166         if (const AttributedStmt *AS = asFallThroughAttr(LastStmt)) {
1167           markFallthroughVisited(AS);
1168           ++AnnotatedCnt;
1169           continue; // Fallthrough annotation, good.
1170         }
1171 
1172         if (!LastStmt) { // This block contains no executable statements.
1173           // Traverse its predecessors.
1174           std::copy(P->pred_begin(), P->pred_end(),
1175                     std::back_inserter(BlockQueue));
1176           continue;
1177         }
1178 
1179         ++UnannotatedCnt;
1180     }
1181     return !!UnannotatedCnt;
1182   }
1183 
1184   bool VisitAttributedStmt(AttributedStmt *S) override {
1185     if (asFallThroughAttr(S))
1186       FallthroughStmts.insert(S);
1187     return true;
1188   }
1189 
1190   bool VisitSwitchStmt(SwitchStmt *S) override {
1191     FoundSwitchStatements = true;
1192     return true;
1193   }
1194 
1195     // We don't want to traverse local type declarations. We analyze their
1196     // methods separately.
1197     bool TraverseDecl(Decl *D) override { return true; }
1198 
1199     // We analyze lambda bodies separately. Skip them here.
1200     bool TraverseLambdaExpr(LambdaExpr *LE) override {
1201       // Traverse the captures, but not the body.
1202       for (const auto C : zip(LE->captures(), LE->capture_inits()))
1203         TraverseLambdaCapture(LE, &std::get<0>(C), std::get<1>(C));
1204       return true;
1205     }
1206 
1207   private:
1208 
1209     static const AttributedStmt *asFallThroughAttr(const Stmt *S) {
1210       if (const AttributedStmt *AS = dyn_cast_or_null<AttributedStmt>(S)) {
1211         if (hasSpecificAttr<FallThroughAttr>(AS->getAttrs()))
1212           return AS;
1213       }
1214       return nullptr;
1215     }
1216 
1217     static const Stmt *getLastStmt(const CFGBlock &B) {
1218       if (const Stmt *Term = B.getTerminatorStmt())
1219         return Term;
1220       for (const CFGElement &Elem : llvm::reverse(B))
1221         if (std::optional<CFGStmt> CS = Elem.getAs<CFGStmt>())
1222           return CS->getStmt();
1223       // Workaround to detect a statement thrown out by CFGBuilder:
1224       //   case X: {} case Y:
1225       //   case X: ; case Y:
1226       if (const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(B.getLabel()))
1227         if (!isa<SwitchCase>(SW->getSubStmt()))
1228           return SW->getSubStmt();
1229 
1230       return nullptr;
1231     }
1232 
1233     bool FoundSwitchStatements;
1234     AttrStmts FallthroughStmts;
1235     Sema &S;
1236     llvm::SmallPtrSet<const CFGBlock *, 16> ReachableBlocks;
1237 };
1238 } // anonymous namespace
1239 
1240 static StringRef getFallthroughAttrSpelling(Preprocessor &PP,
1241                                             SourceLocation Loc) {
1242   TokenValue FallthroughTokens[] = {
1243     tok::l_square, tok::l_square,
1244     PP.getIdentifierInfo("fallthrough"),
1245     tok::r_square, tok::r_square
1246   };
1247 
1248   TokenValue ClangFallthroughTokens[] = {
1249     tok::l_square, tok::l_square, PP.getIdentifierInfo("clang"),
1250     tok::coloncolon, PP.getIdentifierInfo("fallthrough"),
1251     tok::r_square, tok::r_square
1252   };
1253 
1254   bool PreferClangAttr = !PP.getLangOpts().CPlusPlus17 && !PP.getLangOpts().C23;
1255 
1256   StringRef MacroName;
1257   if (PreferClangAttr)
1258     MacroName = PP.getLastMacroWithSpelling(Loc, ClangFallthroughTokens);
1259   if (MacroName.empty())
1260     MacroName = PP.getLastMacroWithSpelling(Loc, FallthroughTokens);
1261   if (MacroName.empty() && !PreferClangAttr)
1262     MacroName = PP.getLastMacroWithSpelling(Loc, ClangFallthroughTokens);
1263   if (MacroName.empty()) {
1264     if (!PreferClangAttr)
1265       MacroName = "[[fallthrough]]";
1266     else if (PP.getLangOpts().CPlusPlus)
1267       MacroName = "[[clang::fallthrough]]";
1268     else
1269       MacroName = "__attribute__((fallthrough))";
1270   }
1271   return MacroName;
1272 }
1273 
1274 static void DiagnoseSwitchLabelsFallthrough(Sema &S, AnalysisDeclContext &AC,
1275                                             bool PerFunction) {
1276   FallthroughMapper FM(S);
1277   FM.TraverseStmt(AC.getBody());
1278 
1279   if (!FM.foundSwitchStatements())
1280     return;
1281 
1282   if (PerFunction && FM.getFallthroughStmts().empty())
1283     return;
1284 
1285   CFG *Cfg = AC.getCFG();
1286 
1287   if (!Cfg)
1288     return;
1289 
1290   FM.fillReachableBlocks(Cfg);
1291 
1292   for (const CFGBlock *B : llvm::reverse(*Cfg)) {
1293     const Stmt *Label = B->getLabel();
1294 
1295     if (!isa_and_nonnull<SwitchCase>(Label))
1296       continue;
1297 
1298     int AnnotatedCnt;
1299 
1300     bool IsTemplateInstantiation = false;
1301     if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(AC.getDecl()))
1302       IsTemplateInstantiation = Function->isTemplateInstantiation();
1303     if (!FM.checkFallThroughIntoBlock(*B, AnnotatedCnt,
1304                                       IsTemplateInstantiation))
1305       continue;
1306 
1307     S.Diag(Label->getBeginLoc(),
1308            PerFunction ? diag::warn_unannotated_fallthrough_per_function
1309                        : diag::warn_unannotated_fallthrough);
1310 
1311     if (!AnnotatedCnt) {
1312       SourceLocation L = Label->getBeginLoc();
1313       if (L.isMacroID())
1314         continue;
1315 
1316       const Stmt *Term = B->getTerminatorStmt();
1317       // Skip empty cases.
1318       while (B->empty() && !Term && B->succ_size() == 1) {
1319         B = *B->succ_begin();
1320         Term = B->getTerminatorStmt();
1321       }
1322       if (!(B->empty() && isa_and_nonnull<BreakStmt>(Term))) {
1323         Preprocessor &PP = S.getPreprocessor();
1324         StringRef AnnotationSpelling = getFallthroughAttrSpelling(PP, L);
1325         SmallString<64> TextToInsert(AnnotationSpelling);
1326         TextToInsert += "; ";
1327         S.Diag(L, diag::note_insert_fallthrough_fixit)
1328             << AnnotationSpelling
1329             << FixItHint::CreateInsertion(L, TextToInsert);
1330       }
1331       S.Diag(L, diag::note_insert_break_fixit)
1332           << FixItHint::CreateInsertion(L, "break; ");
1333     }
1334   }
1335 
1336   for (const auto *F : FM.getFallthroughStmts())
1337     S.Diag(F->getBeginLoc(), diag::err_fallthrough_attr_invalid_placement);
1338 }
1339 
1340 static bool isInLoop(const ASTContext &Ctx, const ParentMap &PM,
1341                      const Stmt *S) {
1342   assert(S);
1343 
1344   do {
1345     switch (S->getStmtClass()) {
1346     case Stmt::ForStmtClass:
1347     case Stmt::WhileStmtClass:
1348     case Stmt::CXXForRangeStmtClass:
1349     case Stmt::ObjCForCollectionStmtClass:
1350       return true;
1351     case Stmt::DoStmtClass: {
1352       Expr::EvalResult Result;
1353       if (!cast<DoStmt>(S)->getCond()->EvaluateAsInt(Result, Ctx))
1354         return true;
1355       return Result.Val.getInt().getBoolValue();
1356     }
1357     default:
1358       break;
1359     }
1360   } while ((S = PM.getParent(S)));
1361 
1362   return false;
1363 }
1364 
1365 static void diagnoseRepeatedUseOfWeak(Sema &S,
1366                                       const sema::FunctionScopeInfo *CurFn,
1367                                       const Decl *D,
1368                                       const ParentMap &PM) {
1369   typedef sema::FunctionScopeInfo::WeakObjectProfileTy WeakObjectProfileTy;
1370   typedef sema::FunctionScopeInfo::WeakObjectUseMap WeakObjectUseMap;
1371   typedef sema::FunctionScopeInfo::WeakUseVector WeakUseVector;
1372   typedef std::pair<const Stmt *, WeakObjectUseMap::const_iterator>
1373   StmtUsesPair;
1374 
1375   ASTContext &Ctx = S.getASTContext();
1376 
1377   const WeakObjectUseMap &WeakMap = CurFn->getWeakObjectUses();
1378 
1379   // Extract all weak objects that are referenced more than once.
1380   SmallVector<StmtUsesPair, 8> UsesByStmt;
1381   for (WeakObjectUseMap::const_iterator I = WeakMap.begin(), E = WeakMap.end();
1382        I != E; ++I) {
1383     const WeakUseVector &Uses = I->second;
1384 
1385     // Find the first read of the weak object.
1386     WeakUseVector::const_iterator UI = Uses.begin(), UE = Uses.end();
1387     for ( ; UI != UE; ++UI) {
1388       if (UI->isUnsafe())
1389         break;
1390     }
1391 
1392     // If there were only writes to this object, don't warn.
1393     if (UI == UE)
1394       continue;
1395 
1396     // If there was only one read, followed by any number of writes, and the
1397     // read is not within a loop, don't warn. Additionally, don't warn in a
1398     // loop if the base object is a local variable -- local variables are often
1399     // changed in loops.
1400     if (UI == Uses.begin()) {
1401       WeakUseVector::const_iterator UI2 = UI;
1402       for (++UI2; UI2 != UE; ++UI2)
1403         if (UI2->isUnsafe())
1404           break;
1405 
1406       if (UI2 == UE) {
1407         if (!isInLoop(Ctx, PM, UI->getUseExpr()))
1408           continue;
1409 
1410         const WeakObjectProfileTy &Profile = I->first;
1411         if (!Profile.isExactProfile())
1412           continue;
1413 
1414         const NamedDecl *Base = Profile.getBase();
1415         if (!Base)
1416           Base = Profile.getProperty();
1417         assert(Base && "A profile always has a base or property.");
1418 
1419         if (const VarDecl *BaseVar = dyn_cast<VarDecl>(Base))
1420           if (BaseVar->hasLocalStorage() && !isa<ParmVarDecl>(Base))
1421             continue;
1422       }
1423     }
1424 
1425     UsesByStmt.push_back(StmtUsesPair(UI->getUseExpr(), I));
1426   }
1427 
1428   if (UsesByStmt.empty())
1429     return;
1430 
1431   // Sort by first use so that we emit the warnings in a deterministic order.
1432   SourceManager &SM = S.getSourceManager();
1433   llvm::sort(UsesByStmt,
1434              [&SM](const StmtUsesPair &LHS, const StmtUsesPair &RHS) {
1435                return SM.isBeforeInTranslationUnit(LHS.first->getBeginLoc(),
1436                                                    RHS.first->getBeginLoc());
1437              });
1438 
1439   // Classify the current code body for better warning text.
1440   // This enum should stay in sync with the cases in
1441   // warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak.
1442   // FIXME: Should we use a common classification enum and the same set of
1443   // possibilities all throughout Sema?
1444   enum {
1445     Function,
1446     Method,
1447     Block,
1448     Lambda
1449   } FunctionKind;
1450 
1451   if (isa<sema::BlockScopeInfo>(CurFn))
1452     FunctionKind = Block;
1453   else if (isa<sema::LambdaScopeInfo>(CurFn))
1454     FunctionKind = Lambda;
1455   else if (isa<ObjCMethodDecl>(D))
1456     FunctionKind = Method;
1457   else
1458     FunctionKind = Function;
1459 
1460   // Iterate through the sorted problems and emit warnings for each.
1461   for (const auto &P : UsesByStmt) {
1462     const Stmt *FirstRead = P.first;
1463     const WeakObjectProfileTy &Key = P.second->first;
1464     const WeakUseVector &Uses = P.second->second;
1465 
1466     // For complicated expressions like 'a.b.c' and 'x.b.c', WeakObjectProfileTy
1467     // may not contain enough information to determine that these are different
1468     // properties. We can only be 100% sure of a repeated use in certain cases,
1469     // and we adjust the diagnostic kind accordingly so that the less certain
1470     // case can be turned off if it is too noisy.
1471     unsigned DiagKind;
1472     if (Key.isExactProfile())
1473       DiagKind = diag::warn_arc_repeated_use_of_weak;
1474     else
1475       DiagKind = diag::warn_arc_possible_repeated_use_of_weak;
1476 
1477     // Classify the weak object being accessed for better warning text.
1478     // This enum should stay in sync with the cases in
1479     // warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak.
1480     enum {
1481       Variable,
1482       Property,
1483       ImplicitProperty,
1484       Ivar
1485     } ObjectKind;
1486 
1487     const NamedDecl *KeyProp = Key.getProperty();
1488     if (isa<VarDecl>(KeyProp))
1489       ObjectKind = Variable;
1490     else if (isa<ObjCPropertyDecl>(KeyProp))
1491       ObjectKind = Property;
1492     else if (isa<ObjCMethodDecl>(KeyProp))
1493       ObjectKind = ImplicitProperty;
1494     else if (isa<ObjCIvarDecl>(KeyProp))
1495       ObjectKind = Ivar;
1496     else
1497       llvm_unreachable("Unexpected weak object kind!");
1498 
1499     // Do not warn about IBOutlet weak property receivers being set to null
1500     // since they are typically only used from the main thread.
1501     if (const ObjCPropertyDecl *Prop = dyn_cast<ObjCPropertyDecl>(KeyProp))
1502       if (Prop->hasAttr<IBOutletAttr>())
1503         continue;
1504 
1505     // Show the first time the object was read.
1506     S.Diag(FirstRead->getBeginLoc(), DiagKind)
1507         << int(ObjectKind) << KeyProp << int(FunctionKind)
1508         << FirstRead->getSourceRange();
1509 
1510     // Print all the other accesses as notes.
1511     for (const auto &Use : Uses) {
1512       if (Use.getUseExpr() == FirstRead)
1513         continue;
1514       S.Diag(Use.getUseExpr()->getBeginLoc(),
1515              diag::note_arc_weak_also_accessed_here)
1516           << Use.getUseExpr()->getSourceRange();
1517     }
1518   }
1519 }
1520 
1521 namespace clang {
1522 namespace {
1523 typedef SmallVector<PartialDiagnosticAt, 1> OptionalNotes;
1524 typedef std::pair<PartialDiagnosticAt, OptionalNotes> DelayedDiag;
1525 typedef std::list<DelayedDiag> DiagList;
1526 
1527 struct SortDiagBySourceLocation {
1528   SourceManager &SM;
1529   SortDiagBySourceLocation(SourceManager &SM) : SM(SM) {}
1530 
1531   bool operator()(const DelayedDiag &left, const DelayedDiag &right) {
1532     // Although this call will be slow, this is only called when outputting
1533     // multiple warnings.
1534     return SM.isBeforeInTranslationUnit(left.first.first, right.first.first);
1535   }
1536 };
1537 } // anonymous namespace
1538 } // namespace clang
1539 
1540 namespace {
1541 class UninitValsDiagReporter : public UninitVariablesHandler {
1542   Sema &S;
1543   typedef SmallVector<UninitUse, 2> UsesVec;
1544   typedef llvm::PointerIntPair<UsesVec *, 1, bool> MappedType;
1545   // Prefer using MapVector to DenseMap, so that iteration order will be
1546   // the same as insertion order. This is needed to obtain a deterministic
1547   // order of diagnostics when calling flushDiagnostics().
1548   typedef llvm::MapVector<const VarDecl *, MappedType> UsesMap;
1549   UsesMap uses;
1550   UsesMap constRefUses;
1551 
1552 public:
1553   UninitValsDiagReporter(Sema &S) : S(S) {}
1554   ~UninitValsDiagReporter() override { flushDiagnostics(); }
1555 
1556   MappedType &getUses(UsesMap &um, const VarDecl *vd) {
1557     MappedType &V = um[vd];
1558     if (!V.getPointer())
1559       V.setPointer(new UsesVec());
1560     return V;
1561   }
1562 
1563   void handleUseOfUninitVariable(const VarDecl *vd,
1564                                  const UninitUse &use) override {
1565     getUses(uses, vd).getPointer()->push_back(use);
1566   }
1567 
1568   void handleConstRefUseOfUninitVariable(const VarDecl *vd,
1569                                          const UninitUse &use) override {
1570     getUses(constRefUses, vd).getPointer()->push_back(use);
1571   }
1572 
1573   void handleSelfInit(const VarDecl *vd) override {
1574     getUses(uses, vd).setInt(true);
1575     getUses(constRefUses, vd).setInt(true);
1576   }
1577 
1578   void flushDiagnostics() {
1579     for (const auto &P : uses) {
1580       const VarDecl *vd = P.first;
1581       const MappedType &V = P.second;
1582 
1583       UsesVec *vec = V.getPointer();
1584       bool hasSelfInit = V.getInt();
1585 
1586       // Specially handle the case where we have uses of an uninitialized
1587       // variable, but the root cause is an idiomatic self-init.  We want
1588       // to report the diagnostic at the self-init since that is the root cause.
1589       if (!vec->empty() && hasSelfInit && hasAlwaysUninitializedUse(vec))
1590         DiagnoseUninitializedUse(S, vd,
1591                                  UninitUse(vd->getInit()->IgnoreParenCasts(),
1592                                            /* isAlwaysUninit */ true),
1593                                  /* alwaysReportSelfInit */ true);
1594       else {
1595         // Sort the uses by their SourceLocations.  While not strictly
1596         // guaranteed to produce them in line/column order, this will provide
1597         // a stable ordering.
1598         llvm::sort(*vec, [](const UninitUse &a, const UninitUse &b) {
1599           // Prefer a more confident report over a less confident one.
1600           if (a.getKind() != b.getKind())
1601             return a.getKind() > b.getKind();
1602           return a.getUser()->getBeginLoc() < b.getUser()->getBeginLoc();
1603         });
1604 
1605         for (const auto &U : *vec) {
1606           // If we have self-init, downgrade all uses to 'may be uninitialized'.
1607           UninitUse Use = hasSelfInit ? UninitUse(U.getUser(), false) : U;
1608 
1609           if (DiagnoseUninitializedUse(S, vd, Use))
1610             // Skip further diagnostics for this variable. We try to warn only
1611             // on the first point at which a variable is used uninitialized.
1612             break;
1613         }
1614       }
1615 
1616       // Release the uses vector.
1617       delete vec;
1618     }
1619 
1620     uses.clear();
1621 
1622     // Flush all const reference uses diags.
1623     for (const auto &P : constRefUses) {
1624       const VarDecl *vd = P.first;
1625       const MappedType &V = P.second;
1626 
1627       UsesVec *vec = V.getPointer();
1628       bool hasSelfInit = V.getInt();
1629 
1630       if (!vec->empty() && hasSelfInit && hasAlwaysUninitializedUse(vec))
1631         DiagnoseUninitializedUse(S, vd,
1632                                  UninitUse(vd->getInit()->IgnoreParenCasts(),
1633                                            /* isAlwaysUninit */ true),
1634                                  /* alwaysReportSelfInit */ true);
1635       else {
1636         for (const auto &U : *vec) {
1637           if (DiagnoseUninitializedConstRefUse(S, vd, U))
1638             break;
1639         }
1640       }
1641 
1642       // Release the uses vector.
1643       delete vec;
1644     }
1645 
1646     constRefUses.clear();
1647   }
1648 
1649 private:
1650   static bool hasAlwaysUninitializedUse(const UsesVec* vec) {
1651     return llvm::any_of(*vec, [](const UninitUse &U) {
1652       return U.getKind() == UninitUse::Always ||
1653              U.getKind() == UninitUse::AfterCall ||
1654              U.getKind() == UninitUse::AfterDecl;
1655     });
1656   }
1657 };
1658 
1659 /// Inter-procedural data for the called-once checker.
1660 class CalledOnceInterProceduralData {
1661 public:
1662   // Add the delayed warning for the given block.
1663   void addDelayedWarning(const BlockDecl *Block,
1664                          PartialDiagnosticAt &&Warning) {
1665     DelayedBlockWarnings[Block].emplace_back(std::move(Warning));
1666   }
1667   // Report all of the warnings we've gathered for the given block.
1668   void flushWarnings(const BlockDecl *Block, Sema &S) {
1669     for (const PartialDiagnosticAt &Delayed : DelayedBlockWarnings[Block])
1670       S.Diag(Delayed.first, Delayed.second);
1671 
1672     discardWarnings(Block);
1673   }
1674   // Discard all of the warnings we've gathered for the given block.
1675   void discardWarnings(const BlockDecl *Block) {
1676     DelayedBlockWarnings.erase(Block);
1677   }
1678 
1679 private:
1680   using DelayedDiagnostics = SmallVector<PartialDiagnosticAt, 2>;
1681   llvm::DenseMap<const BlockDecl *, DelayedDiagnostics> DelayedBlockWarnings;
1682 };
1683 
1684 class CalledOnceCheckReporter : public CalledOnceCheckHandler {
1685 public:
1686   CalledOnceCheckReporter(Sema &S, CalledOnceInterProceduralData &Data)
1687       : S(S), Data(Data) {}
1688   void handleDoubleCall(const ParmVarDecl *Parameter, const Expr *Call,
1689                         const Expr *PrevCall, bool IsCompletionHandler,
1690                         bool Poised) override {
1691     auto DiagToReport = IsCompletionHandler
1692                             ? diag::warn_completion_handler_called_twice
1693                             : diag::warn_called_once_gets_called_twice;
1694     S.Diag(Call->getBeginLoc(), DiagToReport) << Parameter;
1695     S.Diag(PrevCall->getBeginLoc(), diag::note_called_once_gets_called_twice)
1696         << Poised;
1697   }
1698 
1699   void handleNeverCalled(const ParmVarDecl *Parameter,
1700                          bool IsCompletionHandler) override {
1701     auto DiagToReport = IsCompletionHandler
1702                             ? diag::warn_completion_handler_never_called
1703                             : diag::warn_called_once_never_called;
1704     S.Diag(Parameter->getBeginLoc(), DiagToReport)
1705         << Parameter << /* Captured */ false;
1706   }
1707 
1708   void handleNeverCalled(const ParmVarDecl *Parameter, const Decl *Function,
1709                          const Stmt *Where, NeverCalledReason Reason,
1710                          bool IsCalledDirectly,
1711                          bool IsCompletionHandler) override {
1712     auto DiagToReport = IsCompletionHandler
1713                             ? diag::warn_completion_handler_never_called_when
1714                             : diag::warn_called_once_never_called_when;
1715     PartialDiagnosticAt Warning(Where->getBeginLoc(), S.PDiag(DiagToReport)
1716                                                           << Parameter
1717                                                           << IsCalledDirectly
1718                                                           << (unsigned)Reason);
1719 
1720     if (const auto *Block = dyn_cast<BlockDecl>(Function)) {
1721       // We shouldn't report these warnings on blocks immediately
1722       Data.addDelayedWarning(Block, std::move(Warning));
1723     } else {
1724       S.Diag(Warning.first, Warning.second);
1725     }
1726   }
1727 
1728   void handleCapturedNeverCalled(const ParmVarDecl *Parameter,
1729                                  const Decl *Where,
1730                                  bool IsCompletionHandler) override {
1731     auto DiagToReport = IsCompletionHandler
1732                             ? diag::warn_completion_handler_never_called
1733                             : diag::warn_called_once_never_called;
1734     S.Diag(Where->getBeginLoc(), DiagToReport)
1735         << Parameter << /* Captured */ true;
1736   }
1737 
1738   void
1739   handleBlockThatIsGuaranteedToBeCalledOnce(const BlockDecl *Block) override {
1740     Data.flushWarnings(Block, S);
1741   }
1742 
1743   void handleBlockWithNoGuarantees(const BlockDecl *Block) override {
1744     Data.discardWarnings(Block);
1745   }
1746 
1747 private:
1748   Sema &S;
1749   CalledOnceInterProceduralData &Data;
1750 };
1751 
1752 constexpr unsigned CalledOnceWarnings[] = {
1753     diag::warn_called_once_never_called,
1754     diag::warn_called_once_never_called_when,
1755     diag::warn_called_once_gets_called_twice};
1756 
1757 constexpr unsigned CompletionHandlerWarnings[]{
1758     diag::warn_completion_handler_never_called,
1759     diag::warn_completion_handler_never_called_when,
1760     diag::warn_completion_handler_called_twice};
1761 
1762 bool shouldAnalyzeCalledOnceImpl(llvm::ArrayRef<unsigned> DiagIDs,
1763                                  const DiagnosticsEngine &Diags,
1764                                  SourceLocation At) {
1765   return llvm::any_of(DiagIDs, [&Diags, At](unsigned DiagID) {
1766     return !Diags.isIgnored(DiagID, At);
1767   });
1768 }
1769 
1770 bool shouldAnalyzeCalledOnceConventions(const DiagnosticsEngine &Diags,
1771                                         SourceLocation At) {
1772   return shouldAnalyzeCalledOnceImpl(CompletionHandlerWarnings, Diags, At);
1773 }
1774 
1775 bool shouldAnalyzeCalledOnceParameters(const DiagnosticsEngine &Diags,
1776                                        SourceLocation At) {
1777   return shouldAnalyzeCalledOnceImpl(CalledOnceWarnings, Diags, At) ||
1778          shouldAnalyzeCalledOnceConventions(Diags, At);
1779 }
1780 } // anonymous namespace
1781 
1782 //===----------------------------------------------------------------------===//
1783 // -Wthread-safety
1784 //===----------------------------------------------------------------------===//
1785 namespace clang {
1786 namespace threadSafety {
1787 namespace {
1788 class ThreadSafetyReporter : public clang::threadSafety::ThreadSafetyHandler {
1789   Sema &S;
1790   DiagList Warnings;
1791   SourceLocation FunLocation, FunEndLocation;
1792 
1793   const FunctionDecl *CurrentFunction;
1794   bool Verbose;
1795 
1796   OptionalNotes getNotes() const {
1797     if (Verbose && CurrentFunction) {
1798       PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
1799                                 S.PDiag(diag::note_thread_warning_in_fun)
1800                                     << CurrentFunction);
1801       return OptionalNotes(1, FNote);
1802     }
1803     return OptionalNotes();
1804   }
1805 
1806   OptionalNotes getNotes(const PartialDiagnosticAt &Note) const {
1807     OptionalNotes ONS(1, Note);
1808     if (Verbose && CurrentFunction) {
1809       PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
1810                                 S.PDiag(diag::note_thread_warning_in_fun)
1811                                     << CurrentFunction);
1812       ONS.push_back(std::move(FNote));
1813     }
1814     return ONS;
1815   }
1816 
1817   OptionalNotes getNotes(const PartialDiagnosticAt &Note1,
1818                          const PartialDiagnosticAt &Note2) const {
1819     OptionalNotes ONS;
1820     ONS.push_back(Note1);
1821     ONS.push_back(Note2);
1822     if (Verbose && CurrentFunction) {
1823       PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
1824                                 S.PDiag(diag::note_thread_warning_in_fun)
1825                                     << CurrentFunction);
1826       ONS.push_back(std::move(FNote));
1827     }
1828     return ONS;
1829   }
1830 
1831   OptionalNotes makeLockedHereNote(SourceLocation LocLocked, StringRef Kind) {
1832     return LocLocked.isValid()
1833                ? getNotes(PartialDiagnosticAt(
1834                      LocLocked, S.PDiag(diag::note_locked_here) << Kind))
1835                : getNotes();
1836   }
1837 
1838   OptionalNotes makeUnlockedHereNote(SourceLocation LocUnlocked,
1839                                      StringRef Kind) {
1840     return LocUnlocked.isValid()
1841                ? getNotes(PartialDiagnosticAt(
1842                      LocUnlocked, S.PDiag(diag::note_unlocked_here) << Kind))
1843                : getNotes();
1844   }
1845 
1846   OptionalNotes makeManagedMismatchNoteForParam(SourceLocation DeclLoc) {
1847     return DeclLoc.isValid()
1848                ? getNotes(PartialDiagnosticAt(
1849                      DeclLoc,
1850                      S.PDiag(diag::note_managed_mismatch_here_for_param)))
1851                : getNotes();
1852   }
1853 
1854  public:
1855   ThreadSafetyReporter(Sema &S, SourceLocation FL, SourceLocation FEL)
1856     : S(S), FunLocation(FL), FunEndLocation(FEL),
1857       CurrentFunction(nullptr), Verbose(false) {}
1858 
1859   void setVerbose(bool b) { Verbose = b; }
1860 
1861   /// Emit all buffered diagnostics in order of sourcelocation.
1862   /// We need to output diagnostics produced while iterating through
1863   /// the lockset in deterministic order, so this function orders diagnostics
1864   /// and outputs them.
1865   void emitDiagnostics() {
1866     Warnings.sort(SortDiagBySourceLocation(S.getSourceManager()));
1867     for (const auto &Diag : Warnings) {
1868       S.Diag(Diag.first.first, Diag.first.second);
1869       for (const auto &Note : Diag.second)
1870         S.Diag(Note.first, Note.second);
1871     }
1872   }
1873 
1874   void handleUnmatchedUnderlyingMutexes(SourceLocation Loc, SourceLocation DLoc,
1875                                         Name scopeName, StringRef Kind,
1876                                         Name expected, Name actual) override {
1877     PartialDiagnosticAt Warning(Loc,
1878                                 S.PDiag(diag::warn_unmatched_underlying_mutexes)
1879                                     << Kind << scopeName << expected << actual);
1880     Warnings.emplace_back(std::move(Warning),
1881                           makeManagedMismatchNoteForParam(DLoc));
1882   }
1883 
1884   void handleExpectMoreUnderlyingMutexes(SourceLocation Loc,
1885                                          SourceLocation DLoc, Name scopeName,
1886                                          StringRef Kind,
1887                                          Name expected) override {
1888     PartialDiagnosticAt Warning(
1889         Loc, S.PDiag(diag::warn_expect_more_underlying_mutexes)
1890                  << Kind << scopeName << expected);
1891     Warnings.emplace_back(std::move(Warning),
1892                           makeManagedMismatchNoteForParam(DLoc));
1893   }
1894 
1895   void handleExpectFewerUnderlyingMutexes(SourceLocation Loc,
1896                                           SourceLocation DLoc, Name scopeName,
1897                                           StringRef Kind,
1898                                           Name actual) override {
1899     PartialDiagnosticAt Warning(
1900         Loc, S.PDiag(diag::warn_expect_fewer_underlying_mutexes)
1901                  << Kind << scopeName << actual);
1902     Warnings.emplace_back(std::move(Warning),
1903                           makeManagedMismatchNoteForParam(DLoc));
1904   }
1905 
1906   void handleInvalidLockExp(SourceLocation Loc) override {
1907     PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_cannot_resolve_lock)
1908                                          << Loc);
1909     Warnings.emplace_back(std::move(Warning), getNotes());
1910   }
1911 
1912   void handleUnmatchedUnlock(StringRef Kind, Name LockName, SourceLocation Loc,
1913                              SourceLocation LocPreviousUnlock) override {
1914     if (Loc.isInvalid())
1915       Loc = FunLocation;
1916     PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_unlock_but_no_lock)
1917                                          << Kind << LockName);
1918     Warnings.emplace_back(std::move(Warning),
1919                           makeUnlockedHereNote(LocPreviousUnlock, Kind));
1920   }
1921 
1922   void handleIncorrectUnlockKind(StringRef Kind, Name LockName,
1923                                  LockKind Expected, LockKind Received,
1924                                  SourceLocation LocLocked,
1925                                  SourceLocation LocUnlock) override {
1926     if (LocUnlock.isInvalid())
1927       LocUnlock = FunLocation;
1928     PartialDiagnosticAt Warning(
1929         LocUnlock, S.PDiag(diag::warn_unlock_kind_mismatch)
1930                        << Kind << LockName << Received << Expected);
1931     Warnings.emplace_back(std::move(Warning),
1932                           makeLockedHereNote(LocLocked, Kind));
1933   }
1934 
1935   void handleDoubleLock(StringRef Kind, Name LockName, SourceLocation LocLocked,
1936                         SourceLocation LocDoubleLock) override {
1937     if (LocDoubleLock.isInvalid())
1938       LocDoubleLock = FunLocation;
1939     PartialDiagnosticAt Warning(LocDoubleLock, S.PDiag(diag::warn_double_lock)
1940                                                    << Kind << LockName);
1941     Warnings.emplace_back(std::move(Warning),
1942                           makeLockedHereNote(LocLocked, Kind));
1943   }
1944 
1945   void handleMutexHeldEndOfScope(StringRef Kind, Name LockName,
1946                                  SourceLocation LocLocked,
1947                                  SourceLocation LocEndOfScope,
1948                                  LockErrorKind LEK) override {
1949     unsigned DiagID = 0;
1950     switch (LEK) {
1951       case LEK_LockedSomePredecessors:
1952         DiagID = diag::warn_lock_some_predecessors;
1953         break;
1954       case LEK_LockedSomeLoopIterations:
1955         DiagID = diag::warn_expecting_lock_held_on_loop;
1956         break;
1957       case LEK_LockedAtEndOfFunction:
1958         DiagID = diag::warn_no_unlock;
1959         break;
1960       case LEK_NotLockedAtEndOfFunction:
1961         DiagID = diag::warn_expecting_locked;
1962         break;
1963     }
1964     if (LocEndOfScope.isInvalid())
1965       LocEndOfScope = FunEndLocation;
1966 
1967     PartialDiagnosticAt Warning(LocEndOfScope, S.PDiag(DiagID) << Kind
1968                                                                << LockName);
1969     Warnings.emplace_back(std::move(Warning),
1970                           makeLockedHereNote(LocLocked, Kind));
1971   }
1972 
1973   void handleExclusiveAndShared(StringRef Kind, Name LockName,
1974                                 SourceLocation Loc1,
1975                                 SourceLocation Loc2) override {
1976     PartialDiagnosticAt Warning(Loc1,
1977                                 S.PDiag(diag::warn_lock_exclusive_and_shared)
1978                                     << Kind << LockName);
1979     PartialDiagnosticAt Note(Loc2, S.PDiag(diag::note_lock_exclusive_and_shared)
1980                                        << Kind << LockName);
1981     Warnings.emplace_back(std::move(Warning), getNotes(Note));
1982   }
1983 
1984   void handleNoMutexHeld(const NamedDecl *D, ProtectedOperationKind POK,
1985                          AccessKind AK, SourceLocation Loc) override {
1986     assert((POK == POK_VarAccess || POK == POK_VarDereference) &&
1987            "Only works for variables");
1988     unsigned DiagID = POK == POK_VarAccess?
1989                         diag::warn_variable_requires_any_lock:
1990                         diag::warn_var_deref_requires_any_lock;
1991     PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID)
1992       << D << getLockKindFromAccessKind(AK));
1993     Warnings.emplace_back(std::move(Warning), getNotes());
1994   }
1995 
1996   void handleMutexNotHeld(StringRef Kind, const NamedDecl *D,
1997                           ProtectedOperationKind POK, Name LockName,
1998                           LockKind LK, SourceLocation Loc,
1999                           Name *PossibleMatch) override {
2000     unsigned DiagID = 0;
2001     if (PossibleMatch) {
2002       switch (POK) {
2003         case POK_VarAccess:
2004           DiagID = diag::warn_variable_requires_lock_precise;
2005           break;
2006         case POK_VarDereference:
2007           DiagID = diag::warn_var_deref_requires_lock_precise;
2008           break;
2009         case POK_FunctionCall:
2010           DiagID = diag::warn_fun_requires_lock_precise;
2011           break;
2012         case POK_PassByRef:
2013           DiagID = diag::warn_guarded_pass_by_reference;
2014           break;
2015         case POK_PtPassByRef:
2016           DiagID = diag::warn_pt_guarded_pass_by_reference;
2017           break;
2018         case POK_ReturnByRef:
2019           DiagID = diag::warn_guarded_return_by_reference;
2020           break;
2021         case POK_PtReturnByRef:
2022           DiagID = diag::warn_pt_guarded_return_by_reference;
2023           break;
2024       }
2025       PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << Kind
2026                                                        << D
2027                                                        << LockName << LK);
2028       PartialDiagnosticAt Note(Loc, S.PDiag(diag::note_found_mutex_near_match)
2029                                         << *PossibleMatch);
2030       if (Verbose && POK == POK_VarAccess) {
2031         PartialDiagnosticAt VNote(D->getLocation(),
2032                                   S.PDiag(diag::note_guarded_by_declared_here)
2033                                       << D->getDeclName());
2034         Warnings.emplace_back(std::move(Warning), getNotes(Note, VNote));
2035       } else
2036         Warnings.emplace_back(std::move(Warning), getNotes(Note));
2037     } else {
2038       switch (POK) {
2039         case POK_VarAccess:
2040           DiagID = diag::warn_variable_requires_lock;
2041           break;
2042         case POK_VarDereference:
2043           DiagID = diag::warn_var_deref_requires_lock;
2044           break;
2045         case POK_FunctionCall:
2046           DiagID = diag::warn_fun_requires_lock;
2047           break;
2048         case POK_PassByRef:
2049           DiagID = diag::warn_guarded_pass_by_reference;
2050           break;
2051         case POK_PtPassByRef:
2052           DiagID = diag::warn_pt_guarded_pass_by_reference;
2053           break;
2054         case POK_ReturnByRef:
2055           DiagID = diag::warn_guarded_return_by_reference;
2056           break;
2057         case POK_PtReturnByRef:
2058           DiagID = diag::warn_pt_guarded_return_by_reference;
2059           break;
2060       }
2061       PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << Kind
2062                                                        << D
2063                                                        << LockName << LK);
2064       if (Verbose && POK == POK_VarAccess) {
2065         PartialDiagnosticAt Note(D->getLocation(),
2066                                  S.PDiag(diag::note_guarded_by_declared_here));
2067         Warnings.emplace_back(std::move(Warning), getNotes(Note));
2068       } else
2069         Warnings.emplace_back(std::move(Warning), getNotes());
2070     }
2071   }
2072 
2073   void handleNegativeNotHeld(StringRef Kind, Name LockName, Name Neg,
2074                              SourceLocation Loc) override {
2075     PartialDiagnosticAt Warning(Loc,
2076         S.PDiag(diag::warn_acquire_requires_negative_cap)
2077         << Kind << LockName << Neg);
2078     Warnings.emplace_back(std::move(Warning), getNotes());
2079   }
2080 
2081   void handleNegativeNotHeld(const NamedDecl *D, Name LockName,
2082                              SourceLocation Loc) override {
2083     PartialDiagnosticAt Warning(
2084         Loc, S.PDiag(diag::warn_fun_requires_negative_cap) << D << LockName);
2085     Warnings.emplace_back(std::move(Warning), getNotes());
2086   }
2087 
2088   void handleFunExcludesLock(StringRef Kind, Name FunName, Name LockName,
2089                              SourceLocation Loc) override {
2090     PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_fun_excludes_mutex)
2091                                          << Kind << FunName << LockName);
2092     Warnings.emplace_back(std::move(Warning), getNotes());
2093   }
2094 
2095   void handleLockAcquiredBefore(StringRef Kind, Name L1Name, Name L2Name,
2096                                 SourceLocation Loc) override {
2097     PartialDiagnosticAt Warning(Loc,
2098       S.PDiag(diag::warn_acquired_before) << Kind << L1Name << L2Name);
2099     Warnings.emplace_back(std::move(Warning), getNotes());
2100   }
2101 
2102   void handleBeforeAfterCycle(Name L1Name, SourceLocation Loc) override {
2103     PartialDiagnosticAt Warning(Loc,
2104       S.PDiag(diag::warn_acquired_before_after_cycle) << L1Name);
2105     Warnings.emplace_back(std::move(Warning), getNotes());
2106   }
2107 
2108   void enterFunction(const FunctionDecl* FD) override {
2109     CurrentFunction = FD;
2110   }
2111 
2112   void leaveFunction(const FunctionDecl* FD) override {
2113     CurrentFunction = nullptr;
2114   }
2115 };
2116 } // anonymous namespace
2117 } // namespace threadSafety
2118 } // namespace clang
2119 
2120 //===----------------------------------------------------------------------===//
2121 // -Wconsumed
2122 //===----------------------------------------------------------------------===//
2123 
2124 namespace clang {
2125 namespace consumed {
2126 namespace {
2127 class ConsumedWarningsHandler : public ConsumedWarningsHandlerBase {
2128 
2129   Sema &S;
2130   DiagList Warnings;
2131 
2132 public:
2133 
2134   ConsumedWarningsHandler(Sema &S) : S(S) {}
2135 
2136   void emitDiagnostics() override {
2137     Warnings.sort(SortDiagBySourceLocation(S.getSourceManager()));
2138     for (const auto &Diag : Warnings) {
2139       S.Diag(Diag.first.first, Diag.first.second);
2140       for (const auto &Note : Diag.second)
2141         S.Diag(Note.first, Note.second);
2142     }
2143   }
2144 
2145   void warnLoopStateMismatch(SourceLocation Loc,
2146                              StringRef VariableName) override {
2147     PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_loop_state_mismatch) <<
2148       VariableName);
2149 
2150     Warnings.emplace_back(std::move(Warning), OptionalNotes());
2151   }
2152 
2153   void warnParamReturnTypestateMismatch(SourceLocation Loc,
2154                                         StringRef VariableName,
2155                                         StringRef ExpectedState,
2156                                         StringRef ObservedState) override {
2157 
2158     PartialDiagnosticAt Warning(Loc, S.PDiag(
2159       diag::warn_param_return_typestate_mismatch) << VariableName <<
2160         ExpectedState << ObservedState);
2161 
2162     Warnings.emplace_back(std::move(Warning), OptionalNotes());
2163   }
2164 
2165   void warnParamTypestateMismatch(SourceLocation Loc, StringRef ExpectedState,
2166                                   StringRef ObservedState) override {
2167 
2168     PartialDiagnosticAt Warning(Loc, S.PDiag(
2169       diag::warn_param_typestate_mismatch) << ExpectedState << ObservedState);
2170 
2171     Warnings.emplace_back(std::move(Warning), OptionalNotes());
2172   }
2173 
2174   void warnReturnTypestateForUnconsumableType(SourceLocation Loc,
2175                                               StringRef TypeName) override {
2176     PartialDiagnosticAt Warning(Loc, S.PDiag(
2177       diag::warn_return_typestate_for_unconsumable_type) << TypeName);
2178 
2179     Warnings.emplace_back(std::move(Warning), OptionalNotes());
2180   }
2181 
2182   void warnReturnTypestateMismatch(SourceLocation Loc, StringRef ExpectedState,
2183                                    StringRef ObservedState) override {
2184 
2185     PartialDiagnosticAt Warning(Loc, S.PDiag(
2186       diag::warn_return_typestate_mismatch) << ExpectedState << ObservedState);
2187 
2188     Warnings.emplace_back(std::move(Warning), OptionalNotes());
2189   }
2190 
2191   void warnUseOfTempInInvalidState(StringRef MethodName, StringRef State,
2192                                    SourceLocation Loc) override {
2193 
2194     PartialDiagnosticAt Warning(Loc, S.PDiag(
2195       diag::warn_use_of_temp_in_invalid_state) << MethodName << State);
2196 
2197     Warnings.emplace_back(std::move(Warning), OptionalNotes());
2198   }
2199 
2200   void warnUseInInvalidState(StringRef MethodName, StringRef VariableName,
2201                              StringRef State, SourceLocation Loc) override {
2202 
2203     PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_use_in_invalid_state) <<
2204                                 MethodName << VariableName << State);
2205 
2206     Warnings.emplace_back(std::move(Warning), OptionalNotes());
2207   }
2208 };
2209 } // anonymous namespace
2210 } // namespace consumed
2211 } // namespace clang
2212 
2213 //===----------------------------------------------------------------------===//
2214 // Unsafe buffer usage analysis.
2215 //===----------------------------------------------------------------------===//
2216 
2217 namespace {
2218 class UnsafeBufferUsageReporter : public UnsafeBufferUsageHandler {
2219   Sema &S;
2220   bool SuggestSuggestions;  // Recommend -fsafe-buffer-usage-suggestions?
2221 
2222   // Lists as a string the names of variables in `VarGroupForVD` except for `VD`
2223   // itself:
2224   std::string listVariableGroupAsString(
2225       const VarDecl *VD, const ArrayRef<const VarDecl *> &VarGroupForVD) const {
2226     if (VarGroupForVD.size() <= 1)
2227       return "";
2228 
2229     std::vector<StringRef> VarNames;
2230     auto PutInQuotes = [](StringRef S) -> std::string {
2231       return "'" + S.str() + "'";
2232     };
2233 
2234     for (auto *V : VarGroupForVD) {
2235       if (V == VD)
2236         continue;
2237       VarNames.push_back(V->getName());
2238     }
2239     if (VarNames.size() == 1) {
2240       return PutInQuotes(VarNames[0]);
2241     }
2242     if (VarNames.size() == 2) {
2243       return PutInQuotes(VarNames[0]) + " and " + PutInQuotes(VarNames[1]);
2244     }
2245     assert(VarGroupForVD.size() > 3);
2246     const unsigned N = VarNames.size() -
2247                        2; // need to print the last two names as "..., X, and Y"
2248     std::string AllVars = "";
2249 
2250     for (unsigned I = 0; I < N; ++I)
2251       AllVars.append(PutInQuotes(VarNames[I]) + ", ");
2252     AllVars.append(PutInQuotes(VarNames[N]) + ", and " +
2253                    PutInQuotes(VarNames[N + 1]));
2254     return AllVars;
2255   }
2256 
2257 public:
2258   UnsafeBufferUsageReporter(Sema &S, bool SuggestSuggestions)
2259     : S(S), SuggestSuggestions(SuggestSuggestions) {}
2260 
2261   void handleUnsafeOperation(const Stmt *Operation, bool IsRelatedToDecl,
2262                              ASTContext &Ctx) override {
2263     SourceLocation Loc;
2264     SourceRange Range;
2265     unsigned MsgParam = 0;
2266     NamedDecl *D = nullptr;
2267     if (const auto *ASE = dyn_cast<ArraySubscriptExpr>(Operation)) {
2268       Loc = ASE->getBase()->getExprLoc();
2269       Range = ASE->getBase()->getSourceRange();
2270       MsgParam = 2;
2271     } else if (const auto *BO = dyn_cast<BinaryOperator>(Operation)) {
2272       BinaryOperator::Opcode Op = BO->getOpcode();
2273       if (Op == BO_Add || Op == BO_AddAssign || Op == BO_Sub ||
2274           Op == BO_SubAssign) {
2275         if (BO->getRHS()->getType()->isIntegerType()) {
2276           Loc = BO->getLHS()->getExprLoc();
2277           Range = BO->getLHS()->getSourceRange();
2278         } else {
2279           Loc = BO->getRHS()->getExprLoc();
2280           Range = BO->getRHS()->getSourceRange();
2281         }
2282         MsgParam = 1;
2283       }
2284     } else if (const auto *UO = dyn_cast<UnaryOperator>(Operation)) {
2285       UnaryOperator::Opcode Op = UO->getOpcode();
2286       if (Op == UO_PreInc || Op == UO_PreDec || Op == UO_PostInc ||
2287           Op == UO_PostDec) {
2288         Loc = UO->getSubExpr()->getExprLoc();
2289         Range = UO->getSubExpr()->getSourceRange();
2290         MsgParam = 1;
2291       }
2292     } else {
2293       if (isa<CallExpr>(Operation) || isa<CXXConstructExpr>(Operation)) {
2294         // note_unsafe_buffer_operation doesn't have this mode yet.
2295         assert(!IsRelatedToDecl && "Not implemented yet!");
2296         MsgParam = 3;
2297       } else if (isa<MemberExpr>(Operation)) {
2298         // note_unsafe_buffer_operation doesn't have this mode yet.
2299         assert(!IsRelatedToDecl && "Not implemented yet!");
2300         auto *ME = cast<MemberExpr>(Operation);
2301         D = ME->getMemberDecl();
2302         MsgParam = 5;
2303       } else if (const auto *ECE = dyn_cast<ExplicitCastExpr>(Operation)) {
2304         QualType destType = ECE->getType();
2305         bool destTypeComplete = true;
2306 
2307         if (!isa<PointerType>(destType))
2308           return;
2309         destType = destType.getTypePtr()->getPointeeType();
2310         if (const auto *D = destType->getAsTagDecl())
2311           destTypeComplete = D->isCompleteDefinition();
2312 
2313         // If destination type is incomplete, it is unsafe to cast to anyway, no
2314         // need to check its type:
2315         if (destTypeComplete) {
2316           const uint64_t dSize = Ctx.getTypeSize(destType);
2317           QualType srcType = ECE->getSubExpr()->getType();
2318 
2319           assert(srcType->isPointerType());
2320 
2321           const uint64_t sSize =
2322               Ctx.getTypeSize(srcType.getTypePtr()->getPointeeType());
2323 
2324           if (sSize >= dSize)
2325             return;
2326         }
2327         if (const auto *CE = dyn_cast<CXXMemberCallExpr>(
2328                 ECE->getSubExpr()->IgnoreParens())) {
2329           D = CE->getMethodDecl();
2330         }
2331 
2332         if (!D)
2333           return;
2334 
2335         MsgParam = 4;
2336       }
2337       Loc = Operation->getBeginLoc();
2338       Range = Operation->getSourceRange();
2339     }
2340     if (IsRelatedToDecl) {
2341       assert(!SuggestSuggestions &&
2342              "Variables blamed for unsafe buffer usage without suggestions!");
2343       S.Diag(Loc, diag::note_unsafe_buffer_operation) << MsgParam << Range;
2344     } else {
2345       if (D) {
2346         S.Diag(Loc, diag::warn_unsafe_buffer_operation)
2347             << MsgParam << D << Range;
2348       } else {
2349         S.Diag(Loc, diag::warn_unsafe_buffer_operation) << MsgParam << Range;
2350       }
2351       if (SuggestSuggestions) {
2352         S.Diag(Loc, diag::note_safe_buffer_usage_suggestions_disabled);
2353       }
2354     }
2355   }
2356 
2357   void handleUnsafeLibcCall(const CallExpr *Call, unsigned PrintfInfo,
2358                             ASTContext &Ctx,
2359                             const Expr *UnsafeArg = nullptr) override {
2360     S.Diag(Call->getBeginLoc(), diag::warn_unsafe_buffer_libc_call)
2361         << Call->getDirectCallee() // We've checked there is a direct callee
2362         << Call->getSourceRange();
2363     if (PrintfInfo > 0) {
2364       SourceRange R =
2365           UnsafeArg ? UnsafeArg->getSourceRange() : Call->getSourceRange();
2366       S.Diag(R.getBegin(), diag::note_unsafe_buffer_printf_call)
2367           << PrintfInfo << R;
2368     }
2369   }
2370 
2371   void handleUnsafeOperationInContainer(const Stmt *Operation,
2372                                         bool IsRelatedToDecl,
2373                                         ASTContext &Ctx) override {
2374     SourceLocation Loc;
2375     SourceRange Range;
2376     unsigned MsgParam = 0;
2377 
2378     // This function only handles SpanTwoParamConstructorGadget so far, which
2379     // always gives a CXXConstructExpr.
2380     const auto *CtorExpr = cast<CXXConstructExpr>(Operation);
2381     Loc = CtorExpr->getLocation();
2382 
2383     S.Diag(Loc, diag::warn_unsafe_buffer_usage_in_container);
2384     if (IsRelatedToDecl) {
2385       assert(!SuggestSuggestions &&
2386              "Variables blamed for unsafe buffer usage without suggestions!");
2387       S.Diag(Loc, diag::note_unsafe_buffer_operation) << MsgParam << Range;
2388     }
2389   }
2390 
2391   void handleUnsafeVariableGroup(const VarDecl *Variable,
2392                                  const VariableGroupsManager &VarGrpMgr,
2393                                  FixItList &&Fixes, const Decl *D,
2394                                  const FixitStrategy &VarTargetTypes) override {
2395     assert(!SuggestSuggestions &&
2396            "Unsafe buffer usage fixits displayed without suggestions!");
2397     S.Diag(Variable->getLocation(), diag::warn_unsafe_buffer_variable)
2398         << Variable << (Variable->getType()->isPointerType() ? 0 : 1)
2399         << Variable->getSourceRange();
2400     if (!Fixes.empty()) {
2401       assert(isa<NamedDecl>(D) &&
2402              "Fix-its are generated only for `NamedDecl`s");
2403       const NamedDecl *ND = cast<NamedDecl>(D);
2404       bool BriefMsg = false;
2405       // If the variable group involves parameters, the diagnostic message will
2406       // NOT explain how the variables are grouped as the reason is non-trivial
2407       // and irrelavant to users' experience:
2408       const auto VarGroupForVD = VarGrpMgr.getGroupOfVar(Variable, &BriefMsg);
2409       unsigned FixItStrategy = 0;
2410       switch (VarTargetTypes.lookup(Variable)) {
2411       case clang::FixitStrategy::Kind::Span:
2412         FixItStrategy = 0;
2413         break;
2414       case clang::FixitStrategy::Kind::Array:
2415         FixItStrategy = 1;
2416         break;
2417       default:
2418         assert(false && "We support only std::span and std::array");
2419       };
2420 
2421       const auto &FD =
2422           S.Diag(Variable->getLocation(),
2423                  BriefMsg ? diag::note_unsafe_buffer_variable_fixit_together
2424                           : diag::note_unsafe_buffer_variable_fixit_group);
2425 
2426       FD << Variable << FixItStrategy;
2427       FD << listVariableGroupAsString(Variable, VarGroupForVD)
2428          << (VarGroupForVD.size() > 1) << ND;
2429       for (const auto &F : Fixes) {
2430         FD << F;
2431       }
2432     }
2433 
2434 #ifndef NDEBUG
2435     if (areDebugNotesRequested())
2436       for (const DebugNote &Note: DebugNotesByVar[Variable])
2437         S.Diag(Note.first, diag::note_safe_buffer_debug_mode) << Note.second;
2438 #endif
2439   }
2440 
2441   bool isSafeBufferOptOut(const SourceLocation &Loc) const override {
2442     return S.PP.isSafeBufferOptOut(S.getSourceManager(), Loc);
2443   }
2444 
2445   bool ignoreUnsafeBufferInContainer(const SourceLocation &Loc) const override {
2446     return S.Diags.isIgnored(diag::warn_unsafe_buffer_usage_in_container, Loc);
2447   }
2448 
2449   bool ignoreUnsafeBufferInLibcCall(const SourceLocation &Loc) const override {
2450     return S.Diags.isIgnored(diag::warn_unsafe_buffer_libc_call, Loc);
2451   }
2452 
2453   // Returns the text representation of clang::unsafe_buffer_usage attribute.
2454   // `WSSuffix` holds customized "white-space"s, e.g., newline or whilespace
2455   // characters.
2456   std::string
2457   getUnsafeBufferUsageAttributeTextAt(SourceLocation Loc,
2458                                       StringRef WSSuffix = "") const override {
2459     Preprocessor &PP = S.getPreprocessor();
2460     TokenValue ClangUnsafeBufferUsageTokens[] = {
2461         tok::l_square,
2462         tok::l_square,
2463         PP.getIdentifierInfo("clang"),
2464         tok::coloncolon,
2465         PP.getIdentifierInfo("unsafe_buffer_usage"),
2466         tok::r_square,
2467         tok::r_square};
2468 
2469     StringRef MacroName;
2470 
2471     // The returned macro (it returns) is guaranteed not to be function-like:
2472     MacroName = PP.getLastMacroWithSpelling(Loc, ClangUnsafeBufferUsageTokens);
2473     if (MacroName.empty())
2474       MacroName = "[[clang::unsafe_buffer_usage]]";
2475     return MacroName.str() + WSSuffix.str();
2476   }
2477 };
2478 } // namespace
2479 
2480 //===----------------------------------------------------------------------===//
2481 // AnalysisBasedWarnings - Worker object used by Sema to execute analysis-based
2482 //  warnings on a function, method, or block.
2483 //===----------------------------------------------------------------------===//
2484 
2485 sema::AnalysisBasedWarnings::Policy::Policy() {
2486   enableCheckFallThrough = 1;
2487   enableCheckUnreachable = 0;
2488   enableThreadSafetyAnalysis = 0;
2489   enableConsumedAnalysis = 0;
2490 }
2491 
2492 /// InterProceduralData aims to be a storage of whatever data should be passed
2493 /// between analyses of different functions.
2494 ///
2495 /// At the moment, its primary goal is to make the information gathered during
2496 /// the analysis of the blocks available during the analysis of the enclosing
2497 /// function.  This is important due to the fact that blocks are analyzed before
2498 /// the enclosed function is even parsed fully, so it is not viable to access
2499 /// anything in the outer scope while analyzing the block.  On the other hand,
2500 /// re-building CFG for blocks and re-analyzing them when we do have all the
2501 /// information (i.e. during the analysis of the enclosing function) seems to be
2502 /// ill-designed.
2503 class sema::AnalysisBasedWarnings::InterProceduralData {
2504 public:
2505   // It is important to analyze blocks within functions because it's a very
2506   // common pattern to capture completion handler parameters by blocks.
2507   CalledOnceInterProceduralData CalledOnceData;
2508 };
2509 
2510 static unsigned isEnabled(DiagnosticsEngine &D, unsigned diag) {
2511   return (unsigned)!D.isIgnored(diag, SourceLocation());
2512 }
2513 
2514 sema::AnalysisBasedWarnings::AnalysisBasedWarnings(Sema &s)
2515     : S(s), IPData(std::make_unique<InterProceduralData>()),
2516       NumFunctionsAnalyzed(0), NumFunctionsWithBadCFGs(0), NumCFGBlocks(0),
2517       MaxCFGBlocksPerFunction(0), NumUninitAnalysisFunctions(0),
2518       NumUninitAnalysisVariables(0), MaxUninitAnalysisVariablesPerFunction(0),
2519       NumUninitAnalysisBlockVisits(0),
2520       MaxUninitAnalysisBlockVisitsPerFunction(0) {
2521 
2522   using namespace diag;
2523   DiagnosticsEngine &D = S.getDiagnostics();
2524 
2525   DefaultPolicy.enableCheckUnreachable =
2526       isEnabled(D, warn_unreachable) || isEnabled(D, warn_unreachable_break) ||
2527       isEnabled(D, warn_unreachable_return) ||
2528       isEnabled(D, warn_unreachable_loop_increment);
2529 
2530   DefaultPolicy.enableThreadSafetyAnalysis = isEnabled(D, warn_double_lock);
2531 
2532   DefaultPolicy.enableConsumedAnalysis =
2533       isEnabled(D, warn_use_in_invalid_state);
2534 }
2535 
2536 // We need this here for unique_ptr with forward declared class.
2537 sema::AnalysisBasedWarnings::~AnalysisBasedWarnings() = default;
2538 
2539 static void flushDiagnostics(Sema &S, const sema::FunctionScopeInfo *fscope) {
2540   for (const auto &D : fscope->PossiblyUnreachableDiags)
2541     S.Diag(D.Loc, D.PD);
2542 }
2543 
2544 // An AST Visitor that calls a callback function on each callable DEFINITION
2545 // that is NOT in a dependent context:
2546 class CallableVisitor : public DynamicRecursiveASTVisitor {
2547 private:
2548   llvm::function_ref<void(const Decl *)> Callback;
2549 
2550 public:
2551   CallableVisitor(llvm::function_ref<void(const Decl *)> Callback)
2552       : Callback(Callback) {
2553     ShouldVisitTemplateInstantiations = true;
2554     ShouldVisitImplicitCode = false;
2555   }
2556 
2557   bool VisitFunctionDecl(FunctionDecl *Node) override {
2558     if (cast<DeclContext>(Node)->isDependentContext())
2559       return true; // Not to analyze dependent decl
2560     // `FunctionDecl->hasBody()` returns true if the function has a body
2561     // somewhere defined.  But we want to know if this `Node` has a body
2562     // child.  So we use `doesThisDeclarationHaveABody`:
2563     if (Node->doesThisDeclarationHaveABody())
2564       Callback(Node);
2565     return true;
2566   }
2567 
2568   bool VisitBlockDecl(BlockDecl *Node) override {
2569     if (cast<DeclContext>(Node)->isDependentContext())
2570       return true; // Not to analyze dependent decl
2571     Callback(Node);
2572     return true;
2573   }
2574 
2575   bool VisitObjCMethodDecl(ObjCMethodDecl *Node) override {
2576     if (cast<DeclContext>(Node)->isDependentContext())
2577       return true; // Not to analyze dependent decl
2578     if (Node->hasBody())
2579       Callback(Node);
2580     return true;
2581   }
2582 
2583   bool VisitLambdaExpr(LambdaExpr *Node) override {
2584     return VisitFunctionDecl(Node->getCallOperator());
2585   }
2586 };
2587 
2588 void clang::sema::AnalysisBasedWarnings::IssueWarnings(
2589      TranslationUnitDecl *TU) {
2590   if (!TU)
2591     return; // This is unexpected, give up quietly.
2592 
2593   DiagnosticsEngine &Diags = S.getDiagnostics();
2594 
2595   if (S.hasUncompilableErrorOccurred() || Diags.getIgnoreAllWarnings())
2596     // exit if having uncompilable errors or ignoring all warnings:
2597     return;
2598 
2599   DiagnosticOptions &DiagOpts = Diags.getDiagnosticOptions();
2600 
2601   // UnsafeBufferUsage analysis settings.
2602   bool UnsafeBufferUsageCanEmitSuggestions = S.getLangOpts().CPlusPlus20;
2603   bool UnsafeBufferUsageShouldEmitSuggestions =  // Should != Can.
2604       UnsafeBufferUsageCanEmitSuggestions &&
2605       DiagOpts.ShowSafeBufferUsageSuggestions;
2606   bool UnsafeBufferUsageShouldSuggestSuggestions =
2607       UnsafeBufferUsageCanEmitSuggestions &&
2608       !DiagOpts.ShowSafeBufferUsageSuggestions;
2609   UnsafeBufferUsageReporter R(S, UnsafeBufferUsageShouldSuggestSuggestions);
2610 
2611   // The Callback function that performs analyses:
2612   auto CallAnalyzers = [&](const Decl *Node) -> void {
2613     // Perform unsafe buffer usage analysis:
2614     if (!Diags.isIgnored(diag::warn_unsafe_buffer_operation,
2615                          Node->getBeginLoc()) ||
2616         !Diags.isIgnored(diag::warn_unsafe_buffer_variable,
2617                          Node->getBeginLoc()) ||
2618         !Diags.isIgnored(diag::warn_unsafe_buffer_usage_in_container,
2619                          Node->getBeginLoc()) ||
2620         !Diags.isIgnored(diag::warn_unsafe_buffer_libc_call,
2621                          Node->getBeginLoc())) {
2622       clang::checkUnsafeBufferUsage(Node, R,
2623                                     UnsafeBufferUsageShouldEmitSuggestions);
2624     }
2625 
2626     // More analysis ...
2627   };
2628   // Emit per-function analysis-based warnings that require the whole-TU
2629   // reasoning. Check if any of them is enabled at all before scanning the AST:
2630   if (!Diags.isIgnored(diag::warn_unsafe_buffer_operation, SourceLocation()) ||
2631       !Diags.isIgnored(diag::warn_unsafe_buffer_variable, SourceLocation()) ||
2632       !Diags.isIgnored(diag::warn_unsafe_buffer_usage_in_container,
2633                        SourceLocation()) ||
2634       (!Diags.isIgnored(diag::warn_unsafe_buffer_libc_call, SourceLocation()) &&
2635        S.getLangOpts().CPlusPlus /* only warn about libc calls in C++ */)) {
2636     CallableVisitor(CallAnalyzers).TraverseTranslationUnitDecl(TU);
2637   }
2638 }
2639 
2640 void clang::sema::AnalysisBasedWarnings::IssueWarnings(
2641     sema::AnalysisBasedWarnings::Policy P, sema::FunctionScopeInfo *fscope,
2642     const Decl *D, QualType BlockType) {
2643 
2644   // We avoid doing analysis-based warnings when there are errors for
2645   // two reasons:
2646   // (1) The CFGs often can't be constructed (if the body is invalid), so
2647   //     don't bother trying.
2648   // (2) The code already has problems; running the analysis just takes more
2649   //     time.
2650   DiagnosticsEngine &Diags = S.getDiagnostics();
2651 
2652   // Do not do any analysis if we are going to just ignore them.
2653   if (Diags.getIgnoreAllWarnings() ||
2654       (Diags.getSuppressSystemWarnings() &&
2655        S.SourceMgr.isInSystemHeader(D->getLocation())))
2656     return;
2657 
2658   // For code in dependent contexts, we'll do this at instantiation time.
2659   if (cast<DeclContext>(D)->isDependentContext())
2660     return;
2661 
2662   if (S.hasUncompilableErrorOccurred()) {
2663     // Flush out any possibly unreachable diagnostics.
2664     flushDiagnostics(S, fscope);
2665     return;
2666   }
2667 
2668   const Stmt *Body = D->getBody();
2669   assert(Body);
2670 
2671   // Construct the analysis context with the specified CFG build options.
2672   AnalysisDeclContext AC(/* AnalysisDeclContextManager */ nullptr, D);
2673 
2674   // Don't generate EH edges for CallExprs as we'd like to avoid the n^2
2675   // explosion for destructors that can result and the compile time hit.
2676   AC.getCFGBuildOptions().PruneTriviallyFalseEdges = true;
2677   AC.getCFGBuildOptions().AddEHEdges = false;
2678   AC.getCFGBuildOptions().AddInitializers = true;
2679   AC.getCFGBuildOptions().AddImplicitDtors = true;
2680   AC.getCFGBuildOptions().AddTemporaryDtors = true;
2681   AC.getCFGBuildOptions().AddCXXNewAllocator = false;
2682   AC.getCFGBuildOptions().AddCXXDefaultInitExprInCtors = true;
2683 
2684   // Force that certain expressions appear as CFGElements in the CFG.  This
2685   // is used to speed up various analyses.
2686   // FIXME: This isn't the right factoring.  This is here for initial
2687   // prototyping, but we need a way for analyses to say what expressions they
2688   // expect to always be CFGElements and then fill in the BuildOptions
2689   // appropriately.  This is essentially a layering violation.
2690   if (P.enableCheckUnreachable || P.enableThreadSafetyAnalysis ||
2691       P.enableConsumedAnalysis) {
2692     // Unreachable code analysis and thread safety require a linearized CFG.
2693     AC.getCFGBuildOptions().setAllAlwaysAdd();
2694   }
2695   else {
2696     AC.getCFGBuildOptions()
2697       .setAlwaysAdd(Stmt::BinaryOperatorClass)
2698       .setAlwaysAdd(Stmt::CompoundAssignOperatorClass)
2699       .setAlwaysAdd(Stmt::BlockExprClass)
2700       .setAlwaysAdd(Stmt::CStyleCastExprClass)
2701       .setAlwaysAdd(Stmt::DeclRefExprClass)
2702       .setAlwaysAdd(Stmt::ImplicitCastExprClass)
2703       .setAlwaysAdd(Stmt::UnaryOperatorClass);
2704   }
2705 
2706   // Install the logical handler.
2707   std::optional<LogicalErrorHandler> LEH;
2708   if (LogicalErrorHandler::hasActiveDiagnostics(Diags, D->getBeginLoc())) {
2709     LEH.emplace(S);
2710     AC.getCFGBuildOptions().Observer = &*LEH;
2711   }
2712 
2713   // Emit delayed diagnostics.
2714   if (!fscope->PossiblyUnreachableDiags.empty()) {
2715     bool analyzed = false;
2716 
2717     // Register the expressions with the CFGBuilder.
2718     for (const auto &D : fscope->PossiblyUnreachableDiags) {
2719       for (const Stmt *S : D.Stmts)
2720         AC.registerForcedBlockExpression(S);
2721     }
2722 
2723     if (AC.getCFG()) {
2724       analyzed = true;
2725       for (const auto &D : fscope->PossiblyUnreachableDiags) {
2726         bool AllReachable = true;
2727         for (const Stmt *S : D.Stmts) {
2728           const CFGBlock *block = AC.getBlockForRegisteredExpression(S);
2729           CFGReverseBlockReachabilityAnalysis *cra =
2730               AC.getCFGReachablityAnalysis();
2731           // FIXME: We should be able to assert that block is non-null, but
2732           // the CFG analysis can skip potentially-evaluated expressions in
2733           // edge cases; see test/Sema/vla-2.c.
2734           if (block && cra) {
2735             // Can this block be reached from the entrance?
2736             if (!cra->isReachable(&AC.getCFG()->getEntry(), block)) {
2737               AllReachable = false;
2738               break;
2739             }
2740           }
2741           // If we cannot map to a basic block, assume the statement is
2742           // reachable.
2743         }
2744 
2745         if (AllReachable)
2746           S.Diag(D.Loc, D.PD);
2747       }
2748     }
2749 
2750     if (!analyzed)
2751       flushDiagnostics(S, fscope);
2752   }
2753 
2754   // Warning: check missing 'return'
2755   if (P.enableCheckFallThrough) {
2756     const CheckFallThroughDiagnostics &CD =
2757         (isa<BlockDecl>(D)
2758              ? CheckFallThroughDiagnostics::MakeForBlock()
2759              : (isa<CXXMethodDecl>(D) &&
2760                 cast<CXXMethodDecl>(D)->getOverloadedOperator() == OO_Call &&
2761                 cast<CXXMethodDecl>(D)->getParent()->isLambda())
2762                    ? CheckFallThroughDiagnostics::MakeForLambda()
2763                    : (fscope->isCoroutine()
2764                           ? CheckFallThroughDiagnostics::MakeForCoroutine(D)
2765                           : CheckFallThroughDiagnostics::MakeForFunction(D)));
2766     CheckFallThroughForBody(S, D, Body, BlockType, CD, AC, fscope);
2767   }
2768 
2769   // Warning: check for unreachable code
2770   if (P.enableCheckUnreachable) {
2771     // Only check for unreachable code on non-template instantiations.
2772     // Different template instantiations can effectively change the control-flow
2773     // and it is very difficult to prove that a snippet of code in a template
2774     // is unreachable for all instantiations.
2775     bool isTemplateInstantiation = false;
2776     if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(D))
2777       isTemplateInstantiation = Function->isTemplateInstantiation();
2778     if (!isTemplateInstantiation)
2779       CheckUnreachable(S, AC);
2780   }
2781 
2782   // Check for thread safety violations
2783   if (P.enableThreadSafetyAnalysis) {
2784     SourceLocation FL = AC.getDecl()->getLocation();
2785     SourceLocation FEL = AC.getDecl()->getEndLoc();
2786     threadSafety::ThreadSafetyReporter Reporter(S, FL, FEL);
2787     if (!Diags.isIgnored(diag::warn_thread_safety_beta, D->getBeginLoc()))
2788       Reporter.setIssueBetaWarnings(true);
2789     if (!Diags.isIgnored(diag::warn_thread_safety_verbose, D->getBeginLoc()))
2790       Reporter.setVerbose(true);
2791 
2792     threadSafety::runThreadSafetyAnalysis(AC, Reporter,
2793                                           &S.ThreadSafetyDeclCache);
2794     Reporter.emitDiagnostics();
2795   }
2796 
2797   // Check for violations of consumed properties.
2798   if (P.enableConsumedAnalysis) {
2799     consumed::ConsumedWarningsHandler WarningHandler(S);
2800     consumed::ConsumedAnalyzer Analyzer(WarningHandler);
2801     Analyzer.run(AC);
2802   }
2803 
2804   if (!Diags.isIgnored(diag::warn_uninit_var, D->getBeginLoc()) ||
2805       !Diags.isIgnored(diag::warn_sometimes_uninit_var, D->getBeginLoc()) ||
2806       !Diags.isIgnored(diag::warn_maybe_uninit_var, D->getBeginLoc()) ||
2807       !Diags.isIgnored(diag::warn_uninit_const_reference, D->getBeginLoc())) {
2808     if (CFG *cfg = AC.getCFG()) {
2809       UninitValsDiagReporter reporter(S);
2810       UninitVariablesAnalysisStats stats;
2811       std::memset(&stats, 0, sizeof(UninitVariablesAnalysisStats));
2812       runUninitializedVariablesAnalysis(*cast<DeclContext>(D), *cfg, AC,
2813                                         reporter, stats);
2814 
2815       if (S.CollectStats && stats.NumVariablesAnalyzed > 0) {
2816         ++NumUninitAnalysisFunctions;
2817         NumUninitAnalysisVariables += stats.NumVariablesAnalyzed;
2818         NumUninitAnalysisBlockVisits += stats.NumBlockVisits;
2819         MaxUninitAnalysisVariablesPerFunction =
2820             std::max(MaxUninitAnalysisVariablesPerFunction,
2821                      stats.NumVariablesAnalyzed);
2822         MaxUninitAnalysisBlockVisitsPerFunction =
2823             std::max(MaxUninitAnalysisBlockVisitsPerFunction,
2824                      stats.NumBlockVisits);
2825       }
2826     }
2827   }
2828 
2829   // Check for violations of "called once" parameter properties.
2830   if (S.getLangOpts().ObjC && !S.getLangOpts().CPlusPlus &&
2831       shouldAnalyzeCalledOnceParameters(Diags, D->getBeginLoc())) {
2832     if (AC.getCFG()) {
2833       CalledOnceCheckReporter Reporter(S, IPData->CalledOnceData);
2834       checkCalledOnceParameters(
2835           AC, Reporter,
2836           shouldAnalyzeCalledOnceConventions(Diags, D->getBeginLoc()));
2837     }
2838   }
2839 
2840   bool FallThroughDiagFull =
2841       !Diags.isIgnored(diag::warn_unannotated_fallthrough, D->getBeginLoc());
2842   bool FallThroughDiagPerFunction = !Diags.isIgnored(
2843       diag::warn_unannotated_fallthrough_per_function, D->getBeginLoc());
2844   if (FallThroughDiagFull || FallThroughDiagPerFunction ||
2845       fscope->HasFallthroughStmt) {
2846     DiagnoseSwitchLabelsFallthrough(S, AC, !FallThroughDiagFull);
2847   }
2848 
2849   if (S.getLangOpts().ObjCWeak &&
2850       !Diags.isIgnored(diag::warn_arc_repeated_use_of_weak, D->getBeginLoc()))
2851     diagnoseRepeatedUseOfWeak(S, fscope, D, AC.getParentMap());
2852 
2853 
2854   // Check for infinite self-recursion in functions
2855   if (!Diags.isIgnored(diag::warn_infinite_recursive_function,
2856                        D->getBeginLoc())) {
2857     if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
2858       checkRecursiveFunction(S, FD, Body, AC);
2859     }
2860   }
2861 
2862   // Check for throw out of non-throwing function.
2863   if (!Diags.isIgnored(diag::warn_throw_in_noexcept_func, D->getBeginLoc()))
2864     if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D))
2865       if (S.getLangOpts().CPlusPlus && !fscope->isCoroutine() && isNoexcept(FD))
2866         checkThrowInNonThrowingFunc(S, FD, AC);
2867 
2868   // If none of the previous checks caused a CFG build, trigger one here
2869   // for the logical error handler.
2870   if (LogicalErrorHandler::hasActiveDiagnostics(Diags, D->getBeginLoc())) {
2871     AC.getCFG();
2872   }
2873 
2874   // Collect statistics about the CFG if it was built.
2875   if (S.CollectStats && AC.isCFGBuilt()) {
2876     ++NumFunctionsAnalyzed;
2877     if (CFG *cfg = AC.getCFG()) {
2878       // If we successfully built a CFG for this context, record some more
2879       // detail information about it.
2880       NumCFGBlocks += cfg->getNumBlockIDs();
2881       MaxCFGBlocksPerFunction = std::max(MaxCFGBlocksPerFunction,
2882                                          cfg->getNumBlockIDs());
2883     } else {
2884       ++NumFunctionsWithBadCFGs;
2885     }
2886   }
2887 }
2888 
2889 void clang::sema::AnalysisBasedWarnings::PrintStats() const {
2890   llvm::errs() << "\n*** Analysis Based Warnings Stats:\n";
2891 
2892   unsigned NumCFGsBuilt = NumFunctionsAnalyzed - NumFunctionsWithBadCFGs;
2893   unsigned AvgCFGBlocksPerFunction =
2894       !NumCFGsBuilt ? 0 : NumCFGBlocks/NumCFGsBuilt;
2895   llvm::errs() << NumFunctionsAnalyzed << " functions analyzed ("
2896                << NumFunctionsWithBadCFGs << " w/o CFGs).\n"
2897                << "  " << NumCFGBlocks << " CFG blocks built.\n"
2898                << "  " << AvgCFGBlocksPerFunction
2899                << " average CFG blocks per function.\n"
2900                << "  " << MaxCFGBlocksPerFunction
2901                << " max CFG blocks per function.\n";
2902 
2903   unsigned AvgUninitVariablesPerFunction = !NumUninitAnalysisFunctions ? 0
2904       : NumUninitAnalysisVariables/NumUninitAnalysisFunctions;
2905   unsigned AvgUninitBlockVisitsPerFunction = !NumUninitAnalysisFunctions ? 0
2906       : NumUninitAnalysisBlockVisits/NumUninitAnalysisFunctions;
2907   llvm::errs() << NumUninitAnalysisFunctions
2908                << " functions analyzed for uninitialiazed variables\n"
2909                << "  " << NumUninitAnalysisVariables << " variables analyzed.\n"
2910                << "  " << AvgUninitVariablesPerFunction
2911                << " average variables per function.\n"
2912                << "  " << MaxUninitAnalysisVariablesPerFunction
2913                << " max variables per function.\n"
2914                << "  " << NumUninitAnalysisBlockVisits << " block visits.\n"
2915                << "  " << AvgUninitBlockVisitsPerFunction
2916                << " average block visits per function.\n"
2917                << "  " << MaxUninitAnalysisBlockVisitsPerFunction
2918                << " max block visits per function.\n";
2919 }
2920