1 //=- AnalysisBasedWarnings.cpp - Sema warnings based on libAnalysis -*- C++ -*-=//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file defines analysis_warnings::[Policy,Executor].
10 // Together they are used by Sema to issue warnings based on inexpensive
11 // static analysis algorithms in libAnalysis.
12 //
13 //===----------------------------------------------------------------------===//
14
15 #include "clang/Sema/AnalysisBasedWarnings.h"
16 #include "clang/AST/DeclCXX.h"
17 #include "clang/AST/DeclObjC.h"
18 #include "clang/AST/EvaluatedExprVisitor.h"
19 #include "clang/AST/ExprCXX.h"
20 #include "clang/AST/ExprObjC.h"
21 #include "clang/AST/ParentMap.h"
22 #include "clang/AST/RecursiveASTVisitor.h"
23 #include "clang/AST/StmtCXX.h"
24 #include "clang/AST/StmtObjC.h"
25 #include "clang/AST/StmtVisitor.h"
26 #include "clang/Analysis/Analyses/CFGReachabilityAnalysis.h"
27 #include "clang/Analysis/Analyses/CalledOnceCheck.h"
28 #include "clang/Analysis/Analyses/Consumed.h"
29 #include "clang/Analysis/Analyses/ReachableCode.h"
30 #include "clang/Analysis/Analyses/ThreadSafety.h"
31 #include "clang/Analysis/Analyses/UninitializedValues.h"
32 #include "clang/Analysis/AnalysisDeclContext.h"
33 #include "clang/Analysis/CFG.h"
34 #include "clang/Analysis/CFGStmtMap.h"
35 #include "clang/Basic/SourceLocation.h"
36 #include "clang/Basic/SourceManager.h"
37 #include "clang/Lex/Preprocessor.h"
38 #include "clang/Sema/ScopeInfo.h"
39 #include "clang/Sema/SemaInternal.h"
40 #include "llvm/ADT/ArrayRef.h"
41 #include "llvm/ADT/BitVector.h"
42 #include "llvm/ADT/MapVector.h"
43 #include "llvm/ADT/SmallString.h"
44 #include "llvm/ADT/SmallVector.h"
45 #include "llvm/ADT/StringRef.h"
46 #include "llvm/Support/Casting.h"
47 #include <algorithm>
48 #include <deque>
49 #include <iterator>
50
51 using namespace clang;
52
53 //===----------------------------------------------------------------------===//
54 // Unreachable code analysis.
55 //===----------------------------------------------------------------------===//
56
57 namespace {
58 class UnreachableCodeHandler : public reachable_code::Callback {
59 Sema &S;
60 SourceRange PreviousSilenceableCondVal;
61
62 public:
UnreachableCodeHandler(Sema & s)63 UnreachableCodeHandler(Sema &s) : S(s) {}
64
HandleUnreachable(reachable_code::UnreachableKind UK,SourceLocation L,SourceRange SilenceableCondVal,SourceRange R1,SourceRange R2)65 void HandleUnreachable(reachable_code::UnreachableKind UK,
66 SourceLocation L,
67 SourceRange SilenceableCondVal,
68 SourceRange R1,
69 SourceRange R2) override {
70 // Avoid reporting multiple unreachable code diagnostics that are
71 // triggered by the same conditional value.
72 if (PreviousSilenceableCondVal.isValid() &&
73 SilenceableCondVal.isValid() &&
74 PreviousSilenceableCondVal == SilenceableCondVal)
75 return;
76 PreviousSilenceableCondVal = SilenceableCondVal;
77
78 unsigned diag = diag::warn_unreachable;
79 switch (UK) {
80 case reachable_code::UK_Break:
81 diag = diag::warn_unreachable_break;
82 break;
83 case reachable_code::UK_Return:
84 diag = diag::warn_unreachable_return;
85 break;
86 case reachable_code::UK_Loop_Increment:
87 diag = diag::warn_unreachable_loop_increment;
88 break;
89 case reachable_code::UK_Other:
90 break;
91 }
92
93 S.Diag(L, diag) << R1 << R2;
94
95 SourceLocation Open = SilenceableCondVal.getBegin();
96 if (Open.isValid()) {
97 SourceLocation Close = SilenceableCondVal.getEnd();
98 Close = S.getLocForEndOfToken(Close);
99 if (Close.isValid()) {
100 S.Diag(Open, diag::note_unreachable_silence)
101 << FixItHint::CreateInsertion(Open, "/* DISABLES CODE */ (")
102 << FixItHint::CreateInsertion(Close, ")");
103 }
104 }
105 }
106 };
107 } // anonymous namespace
108
109 /// CheckUnreachable - Check for unreachable code.
CheckUnreachable(Sema & S,AnalysisDeclContext & AC)110 static void CheckUnreachable(Sema &S, AnalysisDeclContext &AC) {
111 // As a heuristic prune all diagnostics not in the main file. Currently
112 // the majority of warnings in headers are false positives. These
113 // are largely caused by configuration state, e.g. preprocessor
114 // defined code, etc.
115 //
116 // Note that this is also a performance optimization. Analyzing
117 // headers many times can be expensive.
118 if (!S.getSourceManager().isInMainFile(AC.getDecl()->getBeginLoc()))
119 return;
120
121 UnreachableCodeHandler UC(S);
122 reachable_code::FindUnreachableCode(AC, S.getPreprocessor(), UC);
123 }
124
125 namespace {
126 /// Warn on logical operator errors in CFGBuilder
127 class LogicalErrorHandler : public CFGCallback {
128 Sema &S;
129
130 public:
LogicalErrorHandler(Sema & S)131 LogicalErrorHandler(Sema &S) : CFGCallback(), S(S) {}
132
HasMacroID(const Expr * E)133 static bool HasMacroID(const Expr *E) {
134 if (E->getExprLoc().isMacroID())
135 return true;
136
137 // Recurse to children.
138 for (const Stmt *SubStmt : E->children())
139 if (const Expr *SubExpr = dyn_cast_or_null<Expr>(SubStmt))
140 if (HasMacroID(SubExpr))
141 return true;
142
143 return false;
144 }
145
compareAlwaysTrue(const BinaryOperator * B,bool isAlwaysTrue)146 void compareAlwaysTrue(const BinaryOperator *B, bool isAlwaysTrue) override {
147 if (HasMacroID(B))
148 return;
149
150 SourceRange DiagRange = B->getSourceRange();
151 S.Diag(B->getExprLoc(), diag::warn_tautological_overlap_comparison)
152 << DiagRange << isAlwaysTrue;
153 }
154
compareBitwiseEquality(const BinaryOperator * B,bool isAlwaysTrue)155 void compareBitwiseEquality(const BinaryOperator *B,
156 bool isAlwaysTrue) override {
157 if (HasMacroID(B))
158 return;
159
160 SourceRange DiagRange = B->getSourceRange();
161 S.Diag(B->getExprLoc(), diag::warn_comparison_bitwise_always)
162 << DiagRange << isAlwaysTrue;
163 }
164
compareBitwiseOr(const BinaryOperator * B)165 void compareBitwiseOr(const BinaryOperator *B) override {
166 if (HasMacroID(B))
167 return;
168
169 SourceRange DiagRange = B->getSourceRange();
170 S.Diag(B->getExprLoc(), diag::warn_comparison_bitwise_or) << DiagRange;
171 }
172
hasActiveDiagnostics(DiagnosticsEngine & Diags,SourceLocation Loc)173 static bool hasActiveDiagnostics(DiagnosticsEngine &Diags,
174 SourceLocation Loc) {
175 return !Diags.isIgnored(diag::warn_tautological_overlap_comparison, Loc) ||
176 !Diags.isIgnored(diag::warn_comparison_bitwise_or, Loc);
177 }
178 };
179 } // anonymous namespace
180
181 //===----------------------------------------------------------------------===//
182 // Check for infinite self-recursion in functions
183 //===----------------------------------------------------------------------===//
184
185 // Returns true if the function is called anywhere within the CFGBlock.
186 // For member functions, the additional condition of being call from the
187 // this pointer is required.
hasRecursiveCallInPath(const FunctionDecl * FD,CFGBlock & Block)188 static bool hasRecursiveCallInPath(const FunctionDecl *FD, CFGBlock &Block) {
189 // Process all the Stmt's in this block to find any calls to FD.
190 for (const auto &B : Block) {
191 if (B.getKind() != CFGElement::Statement)
192 continue;
193
194 const CallExpr *CE = dyn_cast<CallExpr>(B.getAs<CFGStmt>()->getStmt());
195 if (!CE || !CE->getCalleeDecl() ||
196 CE->getCalleeDecl()->getCanonicalDecl() != FD)
197 continue;
198
199 // Skip function calls which are qualified with a templated class.
200 if (const DeclRefExpr *DRE =
201 dyn_cast<DeclRefExpr>(CE->getCallee()->IgnoreParenImpCasts())) {
202 if (NestedNameSpecifier *NNS = DRE->getQualifier()) {
203 if (NNS->getKind() == NestedNameSpecifier::TypeSpec &&
204 isa<TemplateSpecializationType>(NNS->getAsType())) {
205 continue;
206 }
207 }
208 }
209
210 const CXXMemberCallExpr *MCE = dyn_cast<CXXMemberCallExpr>(CE);
211 if (!MCE || isa<CXXThisExpr>(MCE->getImplicitObjectArgument()) ||
212 !MCE->getMethodDecl()->isVirtual())
213 return true;
214 }
215 return false;
216 }
217
218 // Returns true if every path from the entry block passes through a call to FD.
checkForRecursiveFunctionCall(const FunctionDecl * FD,CFG * cfg)219 static bool checkForRecursiveFunctionCall(const FunctionDecl *FD, CFG *cfg) {
220 llvm::SmallPtrSet<CFGBlock *, 16> Visited;
221 llvm::SmallVector<CFGBlock *, 16> WorkList;
222 // Keep track of whether we found at least one recursive path.
223 bool foundRecursion = false;
224
225 const unsigned ExitID = cfg->getExit().getBlockID();
226
227 // Seed the work list with the entry block.
228 WorkList.push_back(&cfg->getEntry());
229
230 while (!WorkList.empty()) {
231 CFGBlock *Block = WorkList.pop_back_val();
232
233 for (auto I = Block->succ_begin(), E = Block->succ_end(); I != E; ++I) {
234 if (CFGBlock *SuccBlock = *I) {
235 if (!Visited.insert(SuccBlock).second)
236 continue;
237
238 // Found a path to the exit node without a recursive call.
239 if (ExitID == SuccBlock->getBlockID())
240 return false;
241
242 // If the successor block contains a recursive call, end analysis there.
243 if (hasRecursiveCallInPath(FD, *SuccBlock)) {
244 foundRecursion = true;
245 continue;
246 }
247
248 WorkList.push_back(SuccBlock);
249 }
250 }
251 }
252 return foundRecursion;
253 }
254
checkRecursiveFunction(Sema & S,const FunctionDecl * FD,const Stmt * Body,AnalysisDeclContext & AC)255 static void checkRecursiveFunction(Sema &S, const FunctionDecl *FD,
256 const Stmt *Body, AnalysisDeclContext &AC) {
257 FD = FD->getCanonicalDecl();
258
259 // Only run on non-templated functions and non-templated members of
260 // templated classes.
261 if (FD->getTemplatedKind() != FunctionDecl::TK_NonTemplate &&
262 FD->getTemplatedKind() != FunctionDecl::TK_MemberSpecialization)
263 return;
264
265 CFG *cfg = AC.getCFG();
266 if (!cfg) return;
267
268 // If the exit block is unreachable, skip processing the function.
269 if (cfg->getExit().pred_empty())
270 return;
271
272 // Emit diagnostic if a recursive function call is detected for all paths.
273 if (checkForRecursiveFunctionCall(FD, cfg))
274 S.Diag(Body->getBeginLoc(), diag::warn_infinite_recursive_function);
275 }
276
277 //===----------------------------------------------------------------------===//
278 // Check for throw in a non-throwing function.
279 //===----------------------------------------------------------------------===//
280
281 /// Determine whether an exception thrown by E, unwinding from ThrowBlock,
282 /// can reach ExitBlock.
throwEscapes(Sema & S,const CXXThrowExpr * E,CFGBlock & ThrowBlock,CFG * Body)283 static bool throwEscapes(Sema &S, const CXXThrowExpr *E, CFGBlock &ThrowBlock,
284 CFG *Body) {
285 SmallVector<CFGBlock *, 16> Stack;
286 llvm::BitVector Queued(Body->getNumBlockIDs());
287
288 Stack.push_back(&ThrowBlock);
289 Queued[ThrowBlock.getBlockID()] = true;
290
291 while (!Stack.empty()) {
292 CFGBlock &UnwindBlock = *Stack.back();
293 Stack.pop_back();
294
295 for (auto &Succ : UnwindBlock.succs()) {
296 if (!Succ.isReachable() || Queued[Succ->getBlockID()])
297 continue;
298
299 if (Succ->getBlockID() == Body->getExit().getBlockID())
300 return true;
301
302 if (auto *Catch =
303 dyn_cast_or_null<CXXCatchStmt>(Succ->getLabel())) {
304 QualType Caught = Catch->getCaughtType();
305 if (Caught.isNull() || // catch (...) catches everything
306 !E->getSubExpr() || // throw; is considered cuaght by any handler
307 S.handlerCanCatch(Caught, E->getSubExpr()->getType()))
308 // Exception doesn't escape via this path.
309 break;
310 } else {
311 Stack.push_back(Succ);
312 Queued[Succ->getBlockID()] = true;
313 }
314 }
315 }
316
317 return false;
318 }
319
visitReachableThrows(CFG * BodyCFG,llvm::function_ref<void (const CXXThrowExpr *,CFGBlock &)> Visit)320 static void visitReachableThrows(
321 CFG *BodyCFG,
322 llvm::function_ref<void(const CXXThrowExpr *, CFGBlock &)> Visit) {
323 llvm::BitVector Reachable(BodyCFG->getNumBlockIDs());
324 clang::reachable_code::ScanReachableFromBlock(&BodyCFG->getEntry(), Reachable);
325 for (CFGBlock *B : *BodyCFG) {
326 if (!Reachable[B->getBlockID()])
327 continue;
328 for (CFGElement &E : *B) {
329 Optional<CFGStmt> S = E.getAs<CFGStmt>();
330 if (!S)
331 continue;
332 if (auto *Throw = dyn_cast<CXXThrowExpr>(S->getStmt()))
333 Visit(Throw, *B);
334 }
335 }
336 }
337
EmitDiagForCXXThrowInNonThrowingFunc(Sema & S,SourceLocation OpLoc,const FunctionDecl * FD)338 static void EmitDiagForCXXThrowInNonThrowingFunc(Sema &S, SourceLocation OpLoc,
339 const FunctionDecl *FD) {
340 if (!S.getSourceManager().isInSystemHeader(OpLoc) &&
341 FD->getTypeSourceInfo()) {
342 S.Diag(OpLoc, diag::warn_throw_in_noexcept_func) << FD;
343 if (S.getLangOpts().CPlusPlus11 &&
344 (isa<CXXDestructorDecl>(FD) ||
345 FD->getDeclName().getCXXOverloadedOperator() == OO_Delete ||
346 FD->getDeclName().getCXXOverloadedOperator() == OO_Array_Delete)) {
347 if (const auto *Ty = FD->getTypeSourceInfo()->getType()->
348 getAs<FunctionProtoType>())
349 S.Diag(FD->getLocation(), diag::note_throw_in_dtor)
350 << !isa<CXXDestructorDecl>(FD) << !Ty->hasExceptionSpec()
351 << FD->getExceptionSpecSourceRange();
352 } else
353 S.Diag(FD->getLocation(), diag::note_throw_in_function)
354 << FD->getExceptionSpecSourceRange();
355 }
356 }
357
checkThrowInNonThrowingFunc(Sema & S,const FunctionDecl * FD,AnalysisDeclContext & AC)358 static void checkThrowInNonThrowingFunc(Sema &S, const FunctionDecl *FD,
359 AnalysisDeclContext &AC) {
360 CFG *BodyCFG = AC.getCFG();
361 if (!BodyCFG)
362 return;
363 if (BodyCFG->getExit().pred_empty())
364 return;
365 visitReachableThrows(BodyCFG, [&](const CXXThrowExpr *Throw, CFGBlock &Block) {
366 if (throwEscapes(S, Throw, Block, BodyCFG))
367 EmitDiagForCXXThrowInNonThrowingFunc(S, Throw->getThrowLoc(), FD);
368 });
369 }
370
isNoexcept(const FunctionDecl * FD)371 static bool isNoexcept(const FunctionDecl *FD) {
372 const auto *FPT = FD->getType()->castAs<FunctionProtoType>();
373 if (FPT->isNothrow() || FD->hasAttr<NoThrowAttr>())
374 return true;
375 return false;
376 }
377
378 //===----------------------------------------------------------------------===//
379 // Check for missing return value.
380 //===----------------------------------------------------------------------===//
381
382 enum ControlFlowKind {
383 UnknownFallThrough,
384 NeverFallThrough,
385 MaybeFallThrough,
386 AlwaysFallThrough,
387 NeverFallThroughOrReturn
388 };
389
390 /// CheckFallThrough - Check that we don't fall off the end of a
391 /// Statement that should return a value.
392 ///
393 /// \returns AlwaysFallThrough iff we always fall off the end of the statement,
394 /// MaybeFallThrough iff we might or might not fall off the end,
395 /// NeverFallThroughOrReturn iff we never fall off the end of the statement or
396 /// return. We assume NeverFallThrough iff we never fall off the end of the
397 /// statement but we may return. We assume that functions not marked noreturn
398 /// will return.
CheckFallThrough(AnalysisDeclContext & AC)399 static ControlFlowKind CheckFallThrough(AnalysisDeclContext &AC) {
400 CFG *cfg = AC.getCFG();
401 if (!cfg) return UnknownFallThrough;
402
403 // The CFG leaves in dead things, and we don't want the dead code paths to
404 // confuse us, so we mark all live things first.
405 llvm::BitVector live(cfg->getNumBlockIDs());
406 unsigned count = reachable_code::ScanReachableFromBlock(&cfg->getEntry(),
407 live);
408
409 bool AddEHEdges = AC.getAddEHEdges();
410 if (!AddEHEdges && count != cfg->getNumBlockIDs())
411 // When there are things remaining dead, and we didn't add EH edges
412 // from CallExprs to the catch clauses, we have to go back and
413 // mark them as live.
414 for (const auto *B : *cfg) {
415 if (!live[B->getBlockID()]) {
416 if (B->pred_begin() == B->pred_end()) {
417 const Stmt *Term = B->getTerminatorStmt();
418 if (Term && isa<CXXTryStmt>(Term))
419 // When not adding EH edges from calls, catch clauses
420 // can otherwise seem dead. Avoid noting them as dead.
421 count += reachable_code::ScanReachableFromBlock(B, live);
422 continue;
423 }
424 }
425 }
426
427 // Now we know what is live, we check the live precessors of the exit block
428 // and look for fall through paths, being careful to ignore normal returns,
429 // and exceptional paths.
430 bool HasLiveReturn = false;
431 bool HasFakeEdge = false;
432 bool HasPlainEdge = false;
433 bool HasAbnormalEdge = false;
434
435 // Ignore default cases that aren't likely to be reachable because all
436 // enums in a switch(X) have explicit case statements.
437 CFGBlock::FilterOptions FO;
438 FO.IgnoreDefaultsWithCoveredEnums = 1;
439
440 for (CFGBlock::filtered_pred_iterator I =
441 cfg->getExit().filtered_pred_start_end(FO);
442 I.hasMore(); ++I) {
443 const CFGBlock &B = **I;
444 if (!live[B.getBlockID()])
445 continue;
446
447 // Skip blocks which contain an element marked as no-return. They don't
448 // represent actually viable edges into the exit block, so mark them as
449 // abnormal.
450 if (B.hasNoReturnElement()) {
451 HasAbnormalEdge = true;
452 continue;
453 }
454
455 // Destructors can appear after the 'return' in the CFG. This is
456 // normal. We need to look pass the destructors for the return
457 // statement (if it exists).
458 CFGBlock::const_reverse_iterator ri = B.rbegin(), re = B.rend();
459
460 for ( ; ri != re ; ++ri)
461 if (ri->getAs<CFGStmt>())
462 break;
463
464 // No more CFGElements in the block?
465 if (ri == re) {
466 const Stmt *Term = B.getTerminatorStmt();
467 if (Term && isa<CXXTryStmt>(Term)) {
468 HasAbnormalEdge = true;
469 continue;
470 }
471 // A labeled empty statement, or the entry block...
472 HasPlainEdge = true;
473 continue;
474 }
475
476 CFGStmt CS = ri->castAs<CFGStmt>();
477 const Stmt *S = CS.getStmt();
478 if (isa<ReturnStmt>(S) || isa<CoreturnStmt>(S)) {
479 HasLiveReturn = true;
480 continue;
481 }
482 if (isa<ObjCAtThrowStmt>(S)) {
483 HasFakeEdge = true;
484 continue;
485 }
486 if (isa<CXXThrowExpr>(S)) {
487 HasFakeEdge = true;
488 continue;
489 }
490 if (isa<MSAsmStmt>(S)) {
491 // TODO: Verify this is correct.
492 HasFakeEdge = true;
493 HasLiveReturn = true;
494 continue;
495 }
496 if (isa<CXXTryStmt>(S)) {
497 HasAbnormalEdge = true;
498 continue;
499 }
500 if (std::find(B.succ_begin(), B.succ_end(), &cfg->getExit())
501 == B.succ_end()) {
502 HasAbnormalEdge = true;
503 continue;
504 }
505
506 HasPlainEdge = true;
507 }
508 if (!HasPlainEdge) {
509 if (HasLiveReturn)
510 return NeverFallThrough;
511 return NeverFallThroughOrReturn;
512 }
513 if (HasAbnormalEdge || HasFakeEdge || HasLiveReturn)
514 return MaybeFallThrough;
515 // This says AlwaysFallThrough for calls to functions that are not marked
516 // noreturn, that don't return. If people would like this warning to be more
517 // accurate, such functions should be marked as noreturn.
518 return AlwaysFallThrough;
519 }
520
521 namespace {
522
523 struct CheckFallThroughDiagnostics {
524 unsigned diag_MaybeFallThrough_HasNoReturn;
525 unsigned diag_MaybeFallThrough_ReturnsNonVoid;
526 unsigned diag_AlwaysFallThrough_HasNoReturn;
527 unsigned diag_AlwaysFallThrough_ReturnsNonVoid;
528 unsigned diag_NeverFallThroughOrReturn;
529 enum { Function, Block, Lambda, Coroutine } funMode;
530 SourceLocation FuncLoc;
531
MakeForFunction__anondf82a8a60411::CheckFallThroughDiagnostics532 static CheckFallThroughDiagnostics MakeForFunction(const Decl *Func) {
533 CheckFallThroughDiagnostics D;
534 D.FuncLoc = Func->getLocation();
535 D.diag_MaybeFallThrough_HasNoReturn =
536 diag::warn_falloff_noreturn_function;
537 D.diag_MaybeFallThrough_ReturnsNonVoid =
538 diag::warn_maybe_falloff_nonvoid_function;
539 D.diag_AlwaysFallThrough_HasNoReturn =
540 diag::warn_falloff_noreturn_function;
541 D.diag_AlwaysFallThrough_ReturnsNonVoid =
542 diag::warn_falloff_nonvoid_function;
543
544 // Don't suggest that virtual functions be marked "noreturn", since they
545 // might be overridden by non-noreturn functions.
546 bool isVirtualMethod = false;
547 if (const CXXMethodDecl *Method = dyn_cast<CXXMethodDecl>(Func))
548 isVirtualMethod = Method->isVirtual();
549
550 // Don't suggest that template instantiations be marked "noreturn"
551 bool isTemplateInstantiation = false;
552 if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(Func))
553 isTemplateInstantiation = Function->isTemplateInstantiation();
554
555 if (!isVirtualMethod && !isTemplateInstantiation)
556 D.diag_NeverFallThroughOrReturn =
557 diag::warn_suggest_noreturn_function;
558 else
559 D.diag_NeverFallThroughOrReturn = 0;
560
561 D.funMode = Function;
562 return D;
563 }
564
MakeForCoroutine__anondf82a8a60411::CheckFallThroughDiagnostics565 static CheckFallThroughDiagnostics MakeForCoroutine(const Decl *Func) {
566 CheckFallThroughDiagnostics D;
567 D.FuncLoc = Func->getLocation();
568 D.diag_MaybeFallThrough_HasNoReturn = 0;
569 D.diag_MaybeFallThrough_ReturnsNonVoid =
570 diag::warn_maybe_falloff_nonvoid_coroutine;
571 D.diag_AlwaysFallThrough_HasNoReturn = 0;
572 D.diag_AlwaysFallThrough_ReturnsNonVoid =
573 diag::warn_falloff_nonvoid_coroutine;
574 D.funMode = Coroutine;
575 return D;
576 }
577
MakeForBlock__anondf82a8a60411::CheckFallThroughDiagnostics578 static CheckFallThroughDiagnostics MakeForBlock() {
579 CheckFallThroughDiagnostics D;
580 D.diag_MaybeFallThrough_HasNoReturn =
581 diag::err_noreturn_block_has_return_expr;
582 D.diag_MaybeFallThrough_ReturnsNonVoid =
583 diag::err_maybe_falloff_nonvoid_block;
584 D.diag_AlwaysFallThrough_HasNoReturn =
585 diag::err_noreturn_block_has_return_expr;
586 D.diag_AlwaysFallThrough_ReturnsNonVoid =
587 diag::err_falloff_nonvoid_block;
588 D.diag_NeverFallThroughOrReturn = 0;
589 D.funMode = Block;
590 return D;
591 }
592
MakeForLambda__anondf82a8a60411::CheckFallThroughDiagnostics593 static CheckFallThroughDiagnostics MakeForLambda() {
594 CheckFallThroughDiagnostics D;
595 D.diag_MaybeFallThrough_HasNoReturn =
596 diag::err_noreturn_lambda_has_return_expr;
597 D.diag_MaybeFallThrough_ReturnsNonVoid =
598 diag::warn_maybe_falloff_nonvoid_lambda;
599 D.diag_AlwaysFallThrough_HasNoReturn =
600 diag::err_noreturn_lambda_has_return_expr;
601 D.diag_AlwaysFallThrough_ReturnsNonVoid =
602 diag::warn_falloff_nonvoid_lambda;
603 D.diag_NeverFallThroughOrReturn = 0;
604 D.funMode = Lambda;
605 return D;
606 }
607
checkDiagnostics__anondf82a8a60411::CheckFallThroughDiagnostics608 bool checkDiagnostics(DiagnosticsEngine &D, bool ReturnsVoid,
609 bool HasNoReturn) const {
610 if (funMode == Function) {
611 return (ReturnsVoid ||
612 D.isIgnored(diag::warn_maybe_falloff_nonvoid_function,
613 FuncLoc)) &&
614 (!HasNoReturn ||
615 D.isIgnored(diag::warn_noreturn_function_has_return_expr,
616 FuncLoc)) &&
617 (!ReturnsVoid ||
618 D.isIgnored(diag::warn_suggest_noreturn_block, FuncLoc));
619 }
620 if (funMode == Coroutine) {
621 return (ReturnsVoid ||
622 D.isIgnored(diag::warn_maybe_falloff_nonvoid_function, FuncLoc) ||
623 D.isIgnored(diag::warn_maybe_falloff_nonvoid_coroutine,
624 FuncLoc)) &&
625 (!HasNoReturn);
626 }
627 // For blocks / lambdas.
628 return ReturnsVoid && !HasNoReturn;
629 }
630 };
631
632 } // anonymous namespace
633
634 /// CheckFallThroughForBody - Check that we don't fall off the end of a
635 /// function that should return a value. Check that we don't fall off the end
636 /// of a noreturn function. We assume that functions and blocks not marked
637 /// noreturn will return.
CheckFallThroughForBody(Sema & S,const Decl * D,const Stmt * Body,QualType BlockType,const CheckFallThroughDiagnostics & CD,AnalysisDeclContext & AC,sema::FunctionScopeInfo * FSI)638 static void CheckFallThroughForBody(Sema &S, const Decl *D, const Stmt *Body,
639 QualType BlockType,
640 const CheckFallThroughDiagnostics &CD,
641 AnalysisDeclContext &AC,
642 sema::FunctionScopeInfo *FSI) {
643
644 bool ReturnsVoid = false;
645 bool HasNoReturn = false;
646 bool IsCoroutine = FSI->isCoroutine();
647
648 if (const auto *FD = dyn_cast<FunctionDecl>(D)) {
649 if (const auto *CBody = dyn_cast<CoroutineBodyStmt>(Body))
650 ReturnsVoid = CBody->getFallthroughHandler() != nullptr;
651 else
652 ReturnsVoid = FD->getReturnType()->isVoidType();
653 HasNoReturn = FD->isNoReturn();
654 }
655 else if (const auto *MD = dyn_cast<ObjCMethodDecl>(D)) {
656 ReturnsVoid = MD->getReturnType()->isVoidType();
657 HasNoReturn = MD->hasAttr<NoReturnAttr>();
658 }
659 else if (isa<BlockDecl>(D)) {
660 if (const FunctionType *FT =
661 BlockType->getPointeeType()->getAs<FunctionType>()) {
662 if (FT->getReturnType()->isVoidType())
663 ReturnsVoid = true;
664 if (FT->getNoReturnAttr())
665 HasNoReturn = true;
666 }
667 }
668
669 DiagnosticsEngine &Diags = S.getDiagnostics();
670
671 // Short circuit for compilation speed.
672 if (CD.checkDiagnostics(Diags, ReturnsVoid, HasNoReturn))
673 return;
674 SourceLocation LBrace = Body->getBeginLoc(), RBrace = Body->getEndLoc();
675 auto EmitDiag = [&](SourceLocation Loc, unsigned DiagID) {
676 if (IsCoroutine)
677 S.Diag(Loc, DiagID) << FSI->CoroutinePromise->getType();
678 else
679 S.Diag(Loc, DiagID);
680 };
681
682 // cpu_dispatch functions permit empty function bodies for ICC compatibility.
683 if (D->getAsFunction() && D->getAsFunction()->isCPUDispatchMultiVersion())
684 return;
685
686 // Either in a function body compound statement, or a function-try-block.
687 switch (CheckFallThrough(AC)) {
688 case UnknownFallThrough:
689 break;
690
691 case MaybeFallThrough:
692 if (HasNoReturn)
693 EmitDiag(RBrace, CD.diag_MaybeFallThrough_HasNoReturn);
694 else if (!ReturnsVoid)
695 EmitDiag(RBrace, CD.diag_MaybeFallThrough_ReturnsNonVoid);
696 break;
697 case AlwaysFallThrough:
698 if (HasNoReturn)
699 EmitDiag(RBrace, CD.diag_AlwaysFallThrough_HasNoReturn);
700 else if (!ReturnsVoid)
701 EmitDiag(RBrace, CD.diag_AlwaysFallThrough_ReturnsNonVoid);
702 break;
703 case NeverFallThroughOrReturn:
704 if (ReturnsVoid && !HasNoReturn && CD.diag_NeverFallThroughOrReturn) {
705 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
706 S.Diag(LBrace, CD.diag_NeverFallThroughOrReturn) << 0 << FD;
707 } else if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) {
708 S.Diag(LBrace, CD.diag_NeverFallThroughOrReturn) << 1 << MD;
709 } else {
710 S.Diag(LBrace, CD.diag_NeverFallThroughOrReturn);
711 }
712 }
713 break;
714 case NeverFallThrough:
715 break;
716 }
717 }
718
719 //===----------------------------------------------------------------------===//
720 // -Wuninitialized
721 //===----------------------------------------------------------------------===//
722
723 namespace {
724 /// ContainsReference - A visitor class to search for references to
725 /// a particular declaration (the needle) within any evaluated component of an
726 /// expression (recursively).
727 class ContainsReference : public ConstEvaluatedExprVisitor<ContainsReference> {
728 bool FoundReference;
729 const DeclRefExpr *Needle;
730
731 public:
732 typedef ConstEvaluatedExprVisitor<ContainsReference> Inherited;
733
ContainsReference(ASTContext & Context,const DeclRefExpr * Needle)734 ContainsReference(ASTContext &Context, const DeclRefExpr *Needle)
735 : Inherited(Context), FoundReference(false), Needle(Needle) {}
736
VisitExpr(const Expr * E)737 void VisitExpr(const Expr *E) {
738 // Stop evaluating if we already have a reference.
739 if (FoundReference)
740 return;
741
742 Inherited::VisitExpr(E);
743 }
744
VisitDeclRefExpr(const DeclRefExpr * E)745 void VisitDeclRefExpr(const DeclRefExpr *E) {
746 if (E == Needle)
747 FoundReference = true;
748 else
749 Inherited::VisitDeclRefExpr(E);
750 }
751
doesContainReference() const752 bool doesContainReference() const { return FoundReference; }
753 };
754 } // anonymous namespace
755
SuggestInitializationFixit(Sema & S,const VarDecl * VD)756 static bool SuggestInitializationFixit(Sema &S, const VarDecl *VD) {
757 QualType VariableTy = VD->getType().getCanonicalType();
758 if (VariableTy->isBlockPointerType() &&
759 !VD->hasAttr<BlocksAttr>()) {
760 S.Diag(VD->getLocation(), diag::note_block_var_fixit_add_initialization)
761 << VD->getDeclName()
762 << FixItHint::CreateInsertion(VD->getLocation(), "__block ");
763 return true;
764 }
765
766 // Don't issue a fixit if there is already an initializer.
767 if (VD->getInit())
768 return false;
769
770 // Don't suggest a fixit inside macros.
771 if (VD->getEndLoc().isMacroID())
772 return false;
773
774 SourceLocation Loc = S.getLocForEndOfToken(VD->getEndLoc());
775
776 // Suggest possible initialization (if any).
777 std::string Init = S.getFixItZeroInitializerForType(VariableTy, Loc);
778 if (Init.empty())
779 return false;
780
781 S.Diag(Loc, diag::note_var_fixit_add_initialization) << VD->getDeclName()
782 << FixItHint::CreateInsertion(Loc, Init);
783 return true;
784 }
785
786 /// Create a fixit to remove an if-like statement, on the assumption that its
787 /// condition is CondVal.
CreateIfFixit(Sema & S,const Stmt * If,const Stmt * Then,const Stmt * Else,bool CondVal,FixItHint & Fixit1,FixItHint & Fixit2)788 static void CreateIfFixit(Sema &S, const Stmt *If, const Stmt *Then,
789 const Stmt *Else, bool CondVal,
790 FixItHint &Fixit1, FixItHint &Fixit2) {
791 if (CondVal) {
792 // If condition is always true, remove all but the 'then'.
793 Fixit1 = FixItHint::CreateRemoval(
794 CharSourceRange::getCharRange(If->getBeginLoc(), Then->getBeginLoc()));
795 if (Else) {
796 SourceLocation ElseKwLoc = S.getLocForEndOfToken(Then->getEndLoc());
797 Fixit2 =
798 FixItHint::CreateRemoval(SourceRange(ElseKwLoc, Else->getEndLoc()));
799 }
800 } else {
801 // If condition is always false, remove all but the 'else'.
802 if (Else)
803 Fixit1 = FixItHint::CreateRemoval(CharSourceRange::getCharRange(
804 If->getBeginLoc(), Else->getBeginLoc()));
805 else
806 Fixit1 = FixItHint::CreateRemoval(If->getSourceRange());
807 }
808 }
809
810 /// DiagUninitUse -- Helper function to produce a diagnostic for an
811 /// uninitialized use of a variable.
DiagUninitUse(Sema & S,const VarDecl * VD,const UninitUse & Use,bool IsCapturedByBlock)812 static void DiagUninitUse(Sema &S, const VarDecl *VD, const UninitUse &Use,
813 bool IsCapturedByBlock) {
814 bool Diagnosed = false;
815
816 switch (Use.getKind()) {
817 case UninitUse::Always:
818 S.Diag(Use.getUser()->getBeginLoc(), diag::warn_uninit_var)
819 << VD->getDeclName() << IsCapturedByBlock
820 << Use.getUser()->getSourceRange();
821 return;
822
823 case UninitUse::AfterDecl:
824 case UninitUse::AfterCall:
825 S.Diag(VD->getLocation(), diag::warn_sometimes_uninit_var)
826 << VD->getDeclName() << IsCapturedByBlock
827 << (Use.getKind() == UninitUse::AfterDecl ? 4 : 5)
828 << const_cast<DeclContext*>(VD->getLexicalDeclContext())
829 << VD->getSourceRange();
830 S.Diag(Use.getUser()->getBeginLoc(), diag::note_uninit_var_use)
831 << IsCapturedByBlock << Use.getUser()->getSourceRange();
832 return;
833
834 case UninitUse::Maybe:
835 case UninitUse::Sometimes:
836 // Carry on to report sometimes-uninitialized branches, if possible,
837 // or a 'may be used uninitialized' diagnostic otherwise.
838 break;
839 }
840
841 // Diagnose each branch which leads to a sometimes-uninitialized use.
842 for (UninitUse::branch_iterator I = Use.branch_begin(), E = Use.branch_end();
843 I != E; ++I) {
844 assert(Use.getKind() == UninitUse::Sometimes);
845
846 const Expr *User = Use.getUser();
847 const Stmt *Term = I->Terminator;
848
849 // Information used when building the diagnostic.
850 unsigned DiagKind;
851 StringRef Str;
852 SourceRange Range;
853
854 // FixIts to suppress the diagnostic by removing the dead condition.
855 // For all binary terminators, branch 0 is taken if the condition is true,
856 // and branch 1 is taken if the condition is false.
857 int RemoveDiagKind = -1;
858 const char *FixitStr =
859 S.getLangOpts().CPlusPlus ? (I->Output ? "true" : "false")
860 : (I->Output ? "1" : "0");
861 FixItHint Fixit1, Fixit2;
862
863 switch (Term ? Term->getStmtClass() : Stmt::DeclStmtClass) {
864 default:
865 // Don't know how to report this. Just fall back to 'may be used
866 // uninitialized'. FIXME: Can this happen?
867 continue;
868
869 // "condition is true / condition is false".
870 case Stmt::IfStmtClass: {
871 const IfStmt *IS = cast<IfStmt>(Term);
872 DiagKind = 0;
873 Str = "if";
874 Range = IS->getCond()->getSourceRange();
875 RemoveDiagKind = 0;
876 CreateIfFixit(S, IS, IS->getThen(), IS->getElse(),
877 I->Output, Fixit1, Fixit2);
878 break;
879 }
880 case Stmt::ConditionalOperatorClass: {
881 const ConditionalOperator *CO = cast<ConditionalOperator>(Term);
882 DiagKind = 0;
883 Str = "?:";
884 Range = CO->getCond()->getSourceRange();
885 RemoveDiagKind = 0;
886 CreateIfFixit(S, CO, CO->getTrueExpr(), CO->getFalseExpr(),
887 I->Output, Fixit1, Fixit2);
888 break;
889 }
890 case Stmt::BinaryOperatorClass: {
891 const BinaryOperator *BO = cast<BinaryOperator>(Term);
892 if (!BO->isLogicalOp())
893 continue;
894 DiagKind = 0;
895 Str = BO->getOpcodeStr();
896 Range = BO->getLHS()->getSourceRange();
897 RemoveDiagKind = 0;
898 if ((BO->getOpcode() == BO_LAnd && I->Output) ||
899 (BO->getOpcode() == BO_LOr && !I->Output))
900 // true && y -> y, false || y -> y.
901 Fixit1 = FixItHint::CreateRemoval(
902 SourceRange(BO->getBeginLoc(), BO->getOperatorLoc()));
903 else
904 // false && y -> false, true || y -> true.
905 Fixit1 = FixItHint::CreateReplacement(BO->getSourceRange(), FixitStr);
906 break;
907 }
908
909 // "loop is entered / loop is exited".
910 case Stmt::WhileStmtClass:
911 DiagKind = 1;
912 Str = "while";
913 Range = cast<WhileStmt>(Term)->getCond()->getSourceRange();
914 RemoveDiagKind = 1;
915 Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
916 break;
917 case Stmt::ForStmtClass:
918 DiagKind = 1;
919 Str = "for";
920 Range = cast<ForStmt>(Term)->getCond()->getSourceRange();
921 RemoveDiagKind = 1;
922 if (I->Output)
923 Fixit1 = FixItHint::CreateRemoval(Range);
924 else
925 Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
926 break;
927 case Stmt::CXXForRangeStmtClass:
928 if (I->Output == 1) {
929 // The use occurs if a range-based for loop's body never executes.
930 // That may be impossible, and there's no syntactic fix for this,
931 // so treat it as a 'may be uninitialized' case.
932 continue;
933 }
934 DiagKind = 1;
935 Str = "for";
936 Range = cast<CXXForRangeStmt>(Term)->getRangeInit()->getSourceRange();
937 break;
938
939 // "condition is true / loop is exited".
940 case Stmt::DoStmtClass:
941 DiagKind = 2;
942 Str = "do";
943 Range = cast<DoStmt>(Term)->getCond()->getSourceRange();
944 RemoveDiagKind = 1;
945 Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
946 break;
947
948 // "switch case is taken".
949 case Stmt::CaseStmtClass:
950 DiagKind = 3;
951 Str = "case";
952 Range = cast<CaseStmt>(Term)->getLHS()->getSourceRange();
953 break;
954 case Stmt::DefaultStmtClass:
955 DiagKind = 3;
956 Str = "default";
957 Range = cast<DefaultStmt>(Term)->getDefaultLoc();
958 break;
959 }
960
961 S.Diag(Range.getBegin(), diag::warn_sometimes_uninit_var)
962 << VD->getDeclName() << IsCapturedByBlock << DiagKind
963 << Str << I->Output << Range;
964 S.Diag(User->getBeginLoc(), diag::note_uninit_var_use)
965 << IsCapturedByBlock << User->getSourceRange();
966 if (RemoveDiagKind != -1)
967 S.Diag(Fixit1.RemoveRange.getBegin(), diag::note_uninit_fixit_remove_cond)
968 << RemoveDiagKind << Str << I->Output << Fixit1 << Fixit2;
969
970 Diagnosed = true;
971 }
972
973 if (!Diagnosed)
974 S.Diag(Use.getUser()->getBeginLoc(), diag::warn_maybe_uninit_var)
975 << VD->getDeclName() << IsCapturedByBlock
976 << Use.getUser()->getSourceRange();
977 }
978
979 /// Diagnose uninitialized const reference usages.
DiagnoseUninitializedConstRefUse(Sema & S,const VarDecl * VD,const UninitUse & Use)980 static bool DiagnoseUninitializedConstRefUse(Sema &S, const VarDecl *VD,
981 const UninitUse &Use) {
982 S.Diag(Use.getUser()->getBeginLoc(), diag::warn_uninit_const_reference)
983 << VD->getDeclName() << Use.getUser()->getSourceRange();
984 return true;
985 }
986
987 /// DiagnoseUninitializedUse -- Helper function for diagnosing uses of an
988 /// uninitialized variable. This manages the different forms of diagnostic
989 /// emitted for particular types of uses. Returns true if the use was diagnosed
990 /// as a warning. If a particular use is one we omit warnings for, returns
991 /// false.
DiagnoseUninitializedUse(Sema & S,const VarDecl * VD,const UninitUse & Use,bool alwaysReportSelfInit=false)992 static bool DiagnoseUninitializedUse(Sema &S, const VarDecl *VD,
993 const UninitUse &Use,
994 bool alwaysReportSelfInit = false) {
995 if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(Use.getUser())) {
996 // Inspect the initializer of the variable declaration which is
997 // being referenced prior to its initialization. We emit
998 // specialized diagnostics for self-initialization, and we
999 // specifically avoid warning about self references which take the
1000 // form of:
1001 //
1002 // int x = x;
1003 //
1004 // This is used to indicate to GCC that 'x' is intentionally left
1005 // uninitialized. Proven code paths which access 'x' in
1006 // an uninitialized state after this will still warn.
1007 if (const Expr *Initializer = VD->getInit()) {
1008 if (!alwaysReportSelfInit && DRE == Initializer->IgnoreParenImpCasts())
1009 return false;
1010
1011 ContainsReference CR(S.Context, DRE);
1012 CR.Visit(Initializer);
1013 if (CR.doesContainReference()) {
1014 S.Diag(DRE->getBeginLoc(), diag::warn_uninit_self_reference_in_init)
1015 << VD->getDeclName() << VD->getLocation() << DRE->getSourceRange();
1016 return true;
1017 }
1018 }
1019
1020 DiagUninitUse(S, VD, Use, false);
1021 } else {
1022 const BlockExpr *BE = cast<BlockExpr>(Use.getUser());
1023 if (VD->getType()->isBlockPointerType() && !VD->hasAttr<BlocksAttr>())
1024 S.Diag(BE->getBeginLoc(),
1025 diag::warn_uninit_byref_blockvar_captured_by_block)
1026 << VD->getDeclName()
1027 << VD->getType().getQualifiers().hasObjCLifetime();
1028 else
1029 DiagUninitUse(S, VD, Use, true);
1030 }
1031
1032 // Report where the variable was declared when the use wasn't within
1033 // the initializer of that declaration & we didn't already suggest
1034 // an initialization fixit.
1035 if (!SuggestInitializationFixit(S, VD))
1036 S.Diag(VD->getBeginLoc(), diag::note_var_declared_here)
1037 << VD->getDeclName();
1038
1039 return true;
1040 }
1041
1042 namespace {
1043 class FallthroughMapper : public RecursiveASTVisitor<FallthroughMapper> {
1044 public:
FallthroughMapper(Sema & S)1045 FallthroughMapper(Sema &S)
1046 : FoundSwitchStatements(false),
1047 S(S) {
1048 }
1049
foundSwitchStatements() const1050 bool foundSwitchStatements() const { return FoundSwitchStatements; }
1051
markFallthroughVisited(const AttributedStmt * Stmt)1052 void markFallthroughVisited(const AttributedStmt *Stmt) {
1053 bool Found = FallthroughStmts.erase(Stmt);
1054 assert(Found);
1055 (void)Found;
1056 }
1057
1058 typedef llvm::SmallPtrSet<const AttributedStmt*, 8> AttrStmts;
1059
getFallthroughStmts() const1060 const AttrStmts &getFallthroughStmts() const {
1061 return FallthroughStmts;
1062 }
1063
fillReachableBlocks(CFG * Cfg)1064 void fillReachableBlocks(CFG *Cfg) {
1065 assert(ReachableBlocks.empty() && "ReachableBlocks already filled");
1066 std::deque<const CFGBlock *> BlockQueue;
1067
1068 ReachableBlocks.insert(&Cfg->getEntry());
1069 BlockQueue.push_back(&Cfg->getEntry());
1070 // Mark all case blocks reachable to avoid problems with switching on
1071 // constants, covered enums, etc.
1072 // These blocks can contain fall-through annotations, and we don't want to
1073 // issue a warn_fallthrough_attr_unreachable for them.
1074 for (const auto *B : *Cfg) {
1075 const Stmt *L = B->getLabel();
1076 if (L && isa<SwitchCase>(L) && ReachableBlocks.insert(B).second)
1077 BlockQueue.push_back(B);
1078 }
1079
1080 while (!BlockQueue.empty()) {
1081 const CFGBlock *P = BlockQueue.front();
1082 BlockQueue.pop_front();
1083 for (CFGBlock::const_succ_iterator I = P->succ_begin(),
1084 E = P->succ_end();
1085 I != E; ++I) {
1086 if (*I && ReachableBlocks.insert(*I).second)
1087 BlockQueue.push_back(*I);
1088 }
1089 }
1090 }
1091
checkFallThroughIntoBlock(const CFGBlock & B,int & AnnotatedCnt,bool IsTemplateInstantiation)1092 bool checkFallThroughIntoBlock(const CFGBlock &B, int &AnnotatedCnt,
1093 bool IsTemplateInstantiation) {
1094 assert(!ReachableBlocks.empty() && "ReachableBlocks empty");
1095
1096 int UnannotatedCnt = 0;
1097 AnnotatedCnt = 0;
1098
1099 std::deque<const CFGBlock*> BlockQueue(B.pred_begin(), B.pred_end());
1100 while (!BlockQueue.empty()) {
1101 const CFGBlock *P = BlockQueue.front();
1102 BlockQueue.pop_front();
1103 if (!P) continue;
1104
1105 const Stmt *Term = P->getTerminatorStmt();
1106 if (Term && isa<SwitchStmt>(Term))
1107 continue; // Switch statement, good.
1108
1109 const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(P->getLabel());
1110 if (SW && SW->getSubStmt() == B.getLabel() && P->begin() == P->end())
1111 continue; // Previous case label has no statements, good.
1112
1113 const LabelStmt *L = dyn_cast_or_null<LabelStmt>(P->getLabel());
1114 if (L && L->getSubStmt() == B.getLabel() && P->begin() == P->end())
1115 continue; // Case label is preceded with a normal label, good.
1116
1117 if (!ReachableBlocks.count(P)) {
1118 for (CFGBlock::const_reverse_iterator ElemIt = P->rbegin(),
1119 ElemEnd = P->rend();
1120 ElemIt != ElemEnd; ++ElemIt) {
1121 if (Optional<CFGStmt> CS = ElemIt->getAs<CFGStmt>()) {
1122 if (const AttributedStmt *AS = asFallThroughAttr(CS->getStmt())) {
1123 // Don't issue a warning for an unreachable fallthrough
1124 // attribute in template instantiations as it may not be
1125 // unreachable in all instantiations of the template.
1126 if (!IsTemplateInstantiation)
1127 S.Diag(AS->getBeginLoc(),
1128 diag::warn_fallthrough_attr_unreachable);
1129 markFallthroughVisited(AS);
1130 ++AnnotatedCnt;
1131 break;
1132 }
1133 // Don't care about other unreachable statements.
1134 }
1135 }
1136 // If there are no unreachable statements, this may be a special
1137 // case in CFG:
1138 // case X: {
1139 // A a; // A has a destructor.
1140 // break;
1141 // }
1142 // // <<<< This place is represented by a 'hanging' CFG block.
1143 // case Y:
1144 continue;
1145 }
1146
1147 const Stmt *LastStmt = getLastStmt(*P);
1148 if (const AttributedStmt *AS = asFallThroughAttr(LastStmt)) {
1149 markFallthroughVisited(AS);
1150 ++AnnotatedCnt;
1151 continue; // Fallthrough annotation, good.
1152 }
1153
1154 if (!LastStmt) { // This block contains no executable statements.
1155 // Traverse its predecessors.
1156 std::copy(P->pred_begin(), P->pred_end(),
1157 std::back_inserter(BlockQueue));
1158 continue;
1159 }
1160
1161 ++UnannotatedCnt;
1162 }
1163 return !!UnannotatedCnt;
1164 }
1165
1166 // RecursiveASTVisitor setup.
shouldWalkTypesOfTypeLocs() const1167 bool shouldWalkTypesOfTypeLocs() const { return false; }
1168
VisitAttributedStmt(AttributedStmt * S)1169 bool VisitAttributedStmt(AttributedStmt *S) {
1170 if (asFallThroughAttr(S))
1171 FallthroughStmts.insert(S);
1172 return true;
1173 }
1174
VisitSwitchStmt(SwitchStmt * S)1175 bool VisitSwitchStmt(SwitchStmt *S) {
1176 FoundSwitchStatements = true;
1177 return true;
1178 }
1179
1180 // We don't want to traverse local type declarations. We analyze their
1181 // methods separately.
TraverseDecl(Decl * D)1182 bool TraverseDecl(Decl *D) { return true; }
1183
1184 // We analyze lambda bodies separately. Skip them here.
TraverseLambdaExpr(LambdaExpr * LE)1185 bool TraverseLambdaExpr(LambdaExpr *LE) {
1186 // Traverse the captures, but not the body.
1187 for (const auto C : zip(LE->captures(), LE->capture_inits()))
1188 TraverseLambdaCapture(LE, &std::get<0>(C), std::get<1>(C));
1189 return true;
1190 }
1191
1192 private:
1193
asFallThroughAttr(const Stmt * S)1194 static const AttributedStmt *asFallThroughAttr(const Stmt *S) {
1195 if (const AttributedStmt *AS = dyn_cast_or_null<AttributedStmt>(S)) {
1196 if (hasSpecificAttr<FallThroughAttr>(AS->getAttrs()))
1197 return AS;
1198 }
1199 return nullptr;
1200 }
1201
getLastStmt(const CFGBlock & B)1202 static const Stmt *getLastStmt(const CFGBlock &B) {
1203 if (const Stmt *Term = B.getTerminatorStmt())
1204 return Term;
1205 for (CFGBlock::const_reverse_iterator ElemIt = B.rbegin(),
1206 ElemEnd = B.rend();
1207 ElemIt != ElemEnd; ++ElemIt) {
1208 if (Optional<CFGStmt> CS = ElemIt->getAs<CFGStmt>())
1209 return CS->getStmt();
1210 }
1211 // Workaround to detect a statement thrown out by CFGBuilder:
1212 // case X: {} case Y:
1213 // case X: ; case Y:
1214 if (const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(B.getLabel()))
1215 if (!isa<SwitchCase>(SW->getSubStmt()))
1216 return SW->getSubStmt();
1217
1218 return nullptr;
1219 }
1220
1221 bool FoundSwitchStatements;
1222 AttrStmts FallthroughStmts;
1223 Sema &S;
1224 llvm::SmallPtrSet<const CFGBlock *, 16> ReachableBlocks;
1225 };
1226 } // anonymous namespace
1227
getFallthroughAttrSpelling(Preprocessor & PP,SourceLocation Loc)1228 static StringRef getFallthroughAttrSpelling(Preprocessor &PP,
1229 SourceLocation Loc) {
1230 TokenValue FallthroughTokens[] = {
1231 tok::l_square, tok::l_square,
1232 PP.getIdentifierInfo("fallthrough"),
1233 tok::r_square, tok::r_square
1234 };
1235
1236 TokenValue ClangFallthroughTokens[] = {
1237 tok::l_square, tok::l_square, PP.getIdentifierInfo("clang"),
1238 tok::coloncolon, PP.getIdentifierInfo("fallthrough"),
1239 tok::r_square, tok::r_square
1240 };
1241
1242 bool PreferClangAttr = !PP.getLangOpts().CPlusPlus17 && !PP.getLangOpts().C2x;
1243
1244 StringRef MacroName;
1245 if (PreferClangAttr)
1246 MacroName = PP.getLastMacroWithSpelling(Loc, ClangFallthroughTokens);
1247 if (MacroName.empty())
1248 MacroName = PP.getLastMacroWithSpelling(Loc, FallthroughTokens);
1249 if (MacroName.empty() && !PreferClangAttr)
1250 MacroName = PP.getLastMacroWithSpelling(Loc, ClangFallthroughTokens);
1251 if (MacroName.empty()) {
1252 if (!PreferClangAttr)
1253 MacroName = "[[fallthrough]]";
1254 else if (PP.getLangOpts().CPlusPlus)
1255 MacroName = "[[clang::fallthrough]]";
1256 else
1257 MacroName = "__attribute__((fallthrough))";
1258 }
1259 return MacroName;
1260 }
1261
DiagnoseSwitchLabelsFallthrough(Sema & S,AnalysisDeclContext & AC,bool PerFunction)1262 static void DiagnoseSwitchLabelsFallthrough(Sema &S, AnalysisDeclContext &AC,
1263 bool PerFunction) {
1264 FallthroughMapper FM(S);
1265 FM.TraverseStmt(AC.getBody());
1266
1267 if (!FM.foundSwitchStatements())
1268 return;
1269
1270 if (PerFunction && FM.getFallthroughStmts().empty())
1271 return;
1272
1273 CFG *Cfg = AC.getCFG();
1274
1275 if (!Cfg)
1276 return;
1277
1278 FM.fillReachableBlocks(Cfg);
1279
1280 for (const CFGBlock *B : llvm::reverse(*Cfg)) {
1281 const Stmt *Label = B->getLabel();
1282
1283 if (!Label || !isa<SwitchCase>(Label))
1284 continue;
1285
1286 int AnnotatedCnt;
1287
1288 bool IsTemplateInstantiation = false;
1289 if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(AC.getDecl()))
1290 IsTemplateInstantiation = Function->isTemplateInstantiation();
1291 if (!FM.checkFallThroughIntoBlock(*B, AnnotatedCnt,
1292 IsTemplateInstantiation))
1293 continue;
1294
1295 S.Diag(Label->getBeginLoc(),
1296 PerFunction ? diag::warn_unannotated_fallthrough_per_function
1297 : diag::warn_unannotated_fallthrough);
1298
1299 if (!AnnotatedCnt) {
1300 SourceLocation L = Label->getBeginLoc();
1301 if (L.isMacroID())
1302 continue;
1303
1304 const Stmt *Term = B->getTerminatorStmt();
1305 // Skip empty cases.
1306 while (B->empty() && !Term && B->succ_size() == 1) {
1307 B = *B->succ_begin();
1308 Term = B->getTerminatorStmt();
1309 }
1310 if (!(B->empty() && Term && isa<BreakStmt>(Term))) {
1311 Preprocessor &PP = S.getPreprocessor();
1312 StringRef AnnotationSpelling = getFallthroughAttrSpelling(PP, L);
1313 SmallString<64> TextToInsert(AnnotationSpelling);
1314 TextToInsert += "; ";
1315 S.Diag(L, diag::note_insert_fallthrough_fixit)
1316 << AnnotationSpelling
1317 << FixItHint::CreateInsertion(L, TextToInsert);
1318 }
1319 S.Diag(L, diag::note_insert_break_fixit)
1320 << FixItHint::CreateInsertion(L, "break; ");
1321 }
1322 }
1323
1324 for (const auto *F : FM.getFallthroughStmts())
1325 S.Diag(F->getBeginLoc(), diag::err_fallthrough_attr_invalid_placement);
1326 }
1327
isInLoop(const ASTContext & Ctx,const ParentMap & PM,const Stmt * S)1328 static bool isInLoop(const ASTContext &Ctx, const ParentMap &PM,
1329 const Stmt *S) {
1330 assert(S);
1331
1332 do {
1333 switch (S->getStmtClass()) {
1334 case Stmt::ForStmtClass:
1335 case Stmt::WhileStmtClass:
1336 case Stmt::CXXForRangeStmtClass:
1337 case Stmt::ObjCForCollectionStmtClass:
1338 return true;
1339 case Stmt::DoStmtClass: {
1340 Expr::EvalResult Result;
1341 if (!cast<DoStmt>(S)->getCond()->EvaluateAsInt(Result, Ctx))
1342 return true;
1343 return Result.Val.getInt().getBoolValue();
1344 }
1345 default:
1346 break;
1347 }
1348 } while ((S = PM.getParent(S)));
1349
1350 return false;
1351 }
1352
diagnoseRepeatedUseOfWeak(Sema & S,const sema::FunctionScopeInfo * CurFn,const Decl * D,const ParentMap & PM)1353 static void diagnoseRepeatedUseOfWeak(Sema &S,
1354 const sema::FunctionScopeInfo *CurFn,
1355 const Decl *D,
1356 const ParentMap &PM) {
1357 typedef sema::FunctionScopeInfo::WeakObjectProfileTy WeakObjectProfileTy;
1358 typedef sema::FunctionScopeInfo::WeakObjectUseMap WeakObjectUseMap;
1359 typedef sema::FunctionScopeInfo::WeakUseVector WeakUseVector;
1360 typedef std::pair<const Stmt *, WeakObjectUseMap::const_iterator>
1361 StmtUsesPair;
1362
1363 ASTContext &Ctx = S.getASTContext();
1364
1365 const WeakObjectUseMap &WeakMap = CurFn->getWeakObjectUses();
1366
1367 // Extract all weak objects that are referenced more than once.
1368 SmallVector<StmtUsesPair, 8> UsesByStmt;
1369 for (WeakObjectUseMap::const_iterator I = WeakMap.begin(), E = WeakMap.end();
1370 I != E; ++I) {
1371 const WeakUseVector &Uses = I->second;
1372
1373 // Find the first read of the weak object.
1374 WeakUseVector::const_iterator UI = Uses.begin(), UE = Uses.end();
1375 for ( ; UI != UE; ++UI) {
1376 if (UI->isUnsafe())
1377 break;
1378 }
1379
1380 // If there were only writes to this object, don't warn.
1381 if (UI == UE)
1382 continue;
1383
1384 // If there was only one read, followed by any number of writes, and the
1385 // read is not within a loop, don't warn. Additionally, don't warn in a
1386 // loop if the base object is a local variable -- local variables are often
1387 // changed in loops.
1388 if (UI == Uses.begin()) {
1389 WeakUseVector::const_iterator UI2 = UI;
1390 for (++UI2; UI2 != UE; ++UI2)
1391 if (UI2->isUnsafe())
1392 break;
1393
1394 if (UI2 == UE) {
1395 if (!isInLoop(Ctx, PM, UI->getUseExpr()))
1396 continue;
1397
1398 const WeakObjectProfileTy &Profile = I->first;
1399 if (!Profile.isExactProfile())
1400 continue;
1401
1402 const NamedDecl *Base = Profile.getBase();
1403 if (!Base)
1404 Base = Profile.getProperty();
1405 assert(Base && "A profile always has a base or property.");
1406
1407 if (const VarDecl *BaseVar = dyn_cast<VarDecl>(Base))
1408 if (BaseVar->hasLocalStorage() && !isa<ParmVarDecl>(Base))
1409 continue;
1410 }
1411 }
1412
1413 UsesByStmt.push_back(StmtUsesPair(UI->getUseExpr(), I));
1414 }
1415
1416 if (UsesByStmt.empty())
1417 return;
1418
1419 // Sort by first use so that we emit the warnings in a deterministic order.
1420 SourceManager &SM = S.getSourceManager();
1421 llvm::sort(UsesByStmt,
1422 [&SM](const StmtUsesPair &LHS, const StmtUsesPair &RHS) {
1423 return SM.isBeforeInTranslationUnit(LHS.first->getBeginLoc(),
1424 RHS.first->getBeginLoc());
1425 });
1426
1427 // Classify the current code body for better warning text.
1428 // This enum should stay in sync with the cases in
1429 // warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak.
1430 // FIXME: Should we use a common classification enum and the same set of
1431 // possibilities all throughout Sema?
1432 enum {
1433 Function,
1434 Method,
1435 Block,
1436 Lambda
1437 } FunctionKind;
1438
1439 if (isa<sema::BlockScopeInfo>(CurFn))
1440 FunctionKind = Block;
1441 else if (isa<sema::LambdaScopeInfo>(CurFn))
1442 FunctionKind = Lambda;
1443 else if (isa<ObjCMethodDecl>(D))
1444 FunctionKind = Method;
1445 else
1446 FunctionKind = Function;
1447
1448 // Iterate through the sorted problems and emit warnings for each.
1449 for (const auto &P : UsesByStmt) {
1450 const Stmt *FirstRead = P.first;
1451 const WeakObjectProfileTy &Key = P.second->first;
1452 const WeakUseVector &Uses = P.second->second;
1453
1454 // For complicated expressions like 'a.b.c' and 'x.b.c', WeakObjectProfileTy
1455 // may not contain enough information to determine that these are different
1456 // properties. We can only be 100% sure of a repeated use in certain cases,
1457 // and we adjust the diagnostic kind accordingly so that the less certain
1458 // case can be turned off if it is too noisy.
1459 unsigned DiagKind;
1460 if (Key.isExactProfile())
1461 DiagKind = diag::warn_arc_repeated_use_of_weak;
1462 else
1463 DiagKind = diag::warn_arc_possible_repeated_use_of_weak;
1464
1465 // Classify the weak object being accessed for better warning text.
1466 // This enum should stay in sync with the cases in
1467 // warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak.
1468 enum {
1469 Variable,
1470 Property,
1471 ImplicitProperty,
1472 Ivar
1473 } ObjectKind;
1474
1475 const NamedDecl *KeyProp = Key.getProperty();
1476 if (isa<VarDecl>(KeyProp))
1477 ObjectKind = Variable;
1478 else if (isa<ObjCPropertyDecl>(KeyProp))
1479 ObjectKind = Property;
1480 else if (isa<ObjCMethodDecl>(KeyProp))
1481 ObjectKind = ImplicitProperty;
1482 else if (isa<ObjCIvarDecl>(KeyProp))
1483 ObjectKind = Ivar;
1484 else
1485 llvm_unreachable("Unexpected weak object kind!");
1486
1487 // Do not warn about IBOutlet weak property receivers being set to null
1488 // since they are typically only used from the main thread.
1489 if (const ObjCPropertyDecl *Prop = dyn_cast<ObjCPropertyDecl>(KeyProp))
1490 if (Prop->hasAttr<IBOutletAttr>())
1491 continue;
1492
1493 // Show the first time the object was read.
1494 S.Diag(FirstRead->getBeginLoc(), DiagKind)
1495 << int(ObjectKind) << KeyProp << int(FunctionKind)
1496 << FirstRead->getSourceRange();
1497
1498 // Print all the other accesses as notes.
1499 for (const auto &Use : Uses) {
1500 if (Use.getUseExpr() == FirstRead)
1501 continue;
1502 S.Diag(Use.getUseExpr()->getBeginLoc(),
1503 diag::note_arc_weak_also_accessed_here)
1504 << Use.getUseExpr()->getSourceRange();
1505 }
1506 }
1507 }
1508
1509 namespace clang {
1510 namespace {
1511 typedef SmallVector<PartialDiagnosticAt, 1> OptionalNotes;
1512 typedef std::pair<PartialDiagnosticAt, OptionalNotes> DelayedDiag;
1513 typedef std::list<DelayedDiag> DiagList;
1514
1515 struct SortDiagBySourceLocation {
1516 SourceManager &SM;
SortDiagBySourceLocationclang::__anondf82a8a60c11::SortDiagBySourceLocation1517 SortDiagBySourceLocation(SourceManager &SM) : SM(SM) {}
1518
operator ()clang::__anondf82a8a60c11::SortDiagBySourceLocation1519 bool operator()(const DelayedDiag &left, const DelayedDiag &right) {
1520 // Although this call will be slow, this is only called when outputting
1521 // multiple warnings.
1522 return SM.isBeforeInTranslationUnit(left.first.first, right.first.first);
1523 }
1524 };
1525 } // anonymous namespace
1526 } // namespace clang
1527
1528 namespace {
1529 class UninitValsDiagReporter : public UninitVariablesHandler {
1530 Sema &S;
1531 typedef SmallVector<UninitUse, 2> UsesVec;
1532 typedef llvm::PointerIntPair<UsesVec *, 1, bool> MappedType;
1533 // Prefer using MapVector to DenseMap, so that iteration order will be
1534 // the same as insertion order. This is needed to obtain a deterministic
1535 // order of diagnostics when calling flushDiagnostics().
1536 typedef llvm::MapVector<const VarDecl *, MappedType> UsesMap;
1537 UsesMap uses;
1538 UsesMap constRefUses;
1539
1540 public:
UninitValsDiagReporter(Sema & S)1541 UninitValsDiagReporter(Sema &S) : S(S) {}
~UninitValsDiagReporter()1542 ~UninitValsDiagReporter() override { flushDiagnostics(); }
1543
getUses(UsesMap & um,const VarDecl * vd)1544 MappedType &getUses(UsesMap &um, const VarDecl *vd) {
1545 MappedType &V = um[vd];
1546 if (!V.getPointer())
1547 V.setPointer(new UsesVec());
1548 return V;
1549 }
1550
handleUseOfUninitVariable(const VarDecl * vd,const UninitUse & use)1551 void handleUseOfUninitVariable(const VarDecl *vd,
1552 const UninitUse &use) override {
1553 getUses(uses, vd).getPointer()->push_back(use);
1554 }
1555
handleConstRefUseOfUninitVariable(const VarDecl * vd,const UninitUse & use)1556 void handleConstRefUseOfUninitVariable(const VarDecl *vd,
1557 const UninitUse &use) override {
1558 getUses(constRefUses, vd).getPointer()->push_back(use);
1559 }
1560
handleSelfInit(const VarDecl * vd)1561 void handleSelfInit(const VarDecl *vd) override {
1562 getUses(uses, vd).setInt(true);
1563 getUses(constRefUses, vd).setInt(true);
1564 }
1565
flushDiagnostics()1566 void flushDiagnostics() {
1567 for (const auto &P : uses) {
1568 const VarDecl *vd = P.first;
1569 const MappedType &V = P.second;
1570
1571 UsesVec *vec = V.getPointer();
1572 bool hasSelfInit = V.getInt();
1573
1574 // Specially handle the case where we have uses of an uninitialized
1575 // variable, but the root cause is an idiomatic self-init. We want
1576 // to report the diagnostic at the self-init since that is the root cause.
1577 if (!vec->empty() && hasSelfInit && hasAlwaysUninitializedUse(vec))
1578 DiagnoseUninitializedUse(S, vd,
1579 UninitUse(vd->getInit()->IgnoreParenCasts(),
1580 /* isAlwaysUninit */ true),
1581 /* alwaysReportSelfInit */ true);
1582 else {
1583 // Sort the uses by their SourceLocations. While not strictly
1584 // guaranteed to produce them in line/column order, this will provide
1585 // a stable ordering.
1586 llvm::sort(vec->begin(), vec->end(),
1587 [](const UninitUse &a, const UninitUse &b) {
1588 // Prefer a more confident report over a less confident one.
1589 if (a.getKind() != b.getKind())
1590 return a.getKind() > b.getKind();
1591 return a.getUser()->getBeginLoc() < b.getUser()->getBeginLoc();
1592 });
1593
1594 for (const auto &U : *vec) {
1595 // If we have self-init, downgrade all uses to 'may be uninitialized'.
1596 UninitUse Use = hasSelfInit ? UninitUse(U.getUser(), false) : U;
1597
1598 if (DiagnoseUninitializedUse(S, vd, Use))
1599 // Skip further diagnostics for this variable. We try to warn only
1600 // on the first point at which a variable is used uninitialized.
1601 break;
1602 }
1603 }
1604
1605 // Release the uses vector.
1606 delete vec;
1607 }
1608
1609 uses.clear();
1610
1611 // Flush all const reference uses diags.
1612 for (const auto &P : constRefUses) {
1613 const VarDecl *vd = P.first;
1614 const MappedType &V = P.second;
1615
1616 UsesVec *vec = V.getPointer();
1617 bool hasSelfInit = V.getInt();
1618
1619 if (!vec->empty() && hasSelfInit && hasAlwaysUninitializedUse(vec))
1620 DiagnoseUninitializedUse(S, vd,
1621 UninitUse(vd->getInit()->IgnoreParenCasts(),
1622 /* isAlwaysUninit */ true),
1623 /* alwaysReportSelfInit */ true);
1624 else {
1625 for (const auto &U : *vec) {
1626 if (DiagnoseUninitializedConstRefUse(S, vd, U))
1627 break;
1628 }
1629 }
1630
1631 // Release the uses vector.
1632 delete vec;
1633 }
1634
1635 constRefUses.clear();
1636 }
1637
1638 private:
hasAlwaysUninitializedUse(const UsesVec * vec)1639 static bool hasAlwaysUninitializedUse(const UsesVec* vec) {
1640 return std::any_of(vec->begin(), vec->end(), [](const UninitUse &U) {
1641 return U.getKind() == UninitUse::Always ||
1642 U.getKind() == UninitUse::AfterCall ||
1643 U.getKind() == UninitUse::AfterDecl;
1644 });
1645 }
1646 };
1647
1648 /// Inter-procedural data for the called-once checker.
1649 class CalledOnceInterProceduralData {
1650 public:
1651 // Add the delayed warning for the given block.
addDelayedWarning(const BlockDecl * Block,PartialDiagnosticAt && Warning)1652 void addDelayedWarning(const BlockDecl *Block,
1653 PartialDiagnosticAt &&Warning) {
1654 DelayedBlockWarnings[Block].emplace_back(std::move(Warning));
1655 }
1656 // Report all of the warnings we've gathered for the given block.
flushWarnings(const BlockDecl * Block,Sema & S)1657 void flushWarnings(const BlockDecl *Block, Sema &S) {
1658 for (const PartialDiagnosticAt &Delayed : DelayedBlockWarnings[Block])
1659 S.Diag(Delayed.first, Delayed.second);
1660
1661 discardWarnings(Block);
1662 }
1663 // Discard all of the warnings we've gathered for the given block.
discardWarnings(const BlockDecl * Block)1664 void discardWarnings(const BlockDecl *Block) {
1665 DelayedBlockWarnings.erase(Block);
1666 }
1667
1668 private:
1669 using DelayedDiagnostics = SmallVector<PartialDiagnosticAt, 2>;
1670 llvm::DenseMap<const BlockDecl *, DelayedDiagnostics> DelayedBlockWarnings;
1671 };
1672
1673 class CalledOnceCheckReporter : public CalledOnceCheckHandler {
1674 public:
CalledOnceCheckReporter(Sema & S,CalledOnceInterProceduralData & Data)1675 CalledOnceCheckReporter(Sema &S, CalledOnceInterProceduralData &Data)
1676 : S(S), Data(Data) {}
handleDoubleCall(const ParmVarDecl * Parameter,const Expr * Call,const Expr * PrevCall,bool IsCompletionHandler,bool Poised)1677 void handleDoubleCall(const ParmVarDecl *Parameter, const Expr *Call,
1678 const Expr *PrevCall, bool IsCompletionHandler,
1679 bool Poised) override {
1680 auto DiagToReport = IsCompletionHandler
1681 ? diag::warn_completion_handler_called_twice
1682 : diag::warn_called_once_gets_called_twice;
1683 S.Diag(Call->getBeginLoc(), DiagToReport) << Parameter;
1684 S.Diag(PrevCall->getBeginLoc(), diag::note_called_once_gets_called_twice)
1685 << Poised;
1686 }
1687
handleNeverCalled(const ParmVarDecl * Parameter,bool IsCompletionHandler)1688 void handleNeverCalled(const ParmVarDecl *Parameter,
1689 bool IsCompletionHandler) override {
1690 auto DiagToReport = IsCompletionHandler
1691 ? diag::warn_completion_handler_never_called
1692 : diag::warn_called_once_never_called;
1693 S.Diag(Parameter->getBeginLoc(), DiagToReport)
1694 << Parameter << /* Captured */ false;
1695 }
1696
handleNeverCalled(const ParmVarDecl * Parameter,const Decl * Function,const Stmt * Where,NeverCalledReason Reason,bool IsCalledDirectly,bool IsCompletionHandler)1697 void handleNeverCalled(const ParmVarDecl *Parameter, const Decl *Function,
1698 const Stmt *Where, NeverCalledReason Reason,
1699 bool IsCalledDirectly,
1700 bool IsCompletionHandler) override {
1701 auto DiagToReport = IsCompletionHandler
1702 ? diag::warn_completion_handler_never_called_when
1703 : diag::warn_called_once_never_called_when;
1704 PartialDiagnosticAt Warning(Where->getBeginLoc(), S.PDiag(DiagToReport)
1705 << Parameter
1706 << IsCalledDirectly
1707 << (unsigned)Reason);
1708
1709 if (const auto *Block = dyn_cast<BlockDecl>(Function)) {
1710 // We shouldn't report these warnings on blocks immediately
1711 Data.addDelayedWarning(Block, std::move(Warning));
1712 } else {
1713 S.Diag(Warning.first, Warning.second);
1714 }
1715 }
1716
handleCapturedNeverCalled(const ParmVarDecl * Parameter,const Decl * Where,bool IsCompletionHandler)1717 void handleCapturedNeverCalled(const ParmVarDecl *Parameter,
1718 const Decl *Where,
1719 bool IsCompletionHandler) override {
1720 auto DiagToReport = IsCompletionHandler
1721 ? diag::warn_completion_handler_never_called
1722 : diag::warn_called_once_never_called;
1723 S.Diag(Where->getBeginLoc(), DiagToReport)
1724 << Parameter << /* Captured */ true;
1725 }
1726
1727 void
handleBlockThatIsGuaranteedToBeCalledOnce(const BlockDecl * Block)1728 handleBlockThatIsGuaranteedToBeCalledOnce(const BlockDecl *Block) override {
1729 Data.flushWarnings(Block, S);
1730 }
1731
handleBlockWithNoGuarantees(const BlockDecl * Block)1732 void handleBlockWithNoGuarantees(const BlockDecl *Block) override {
1733 Data.discardWarnings(Block);
1734 }
1735
1736 private:
1737 Sema &S;
1738 CalledOnceInterProceduralData &Data;
1739 };
1740
1741 constexpr unsigned CalledOnceWarnings[] = {
1742 diag::warn_called_once_never_called,
1743 diag::warn_called_once_never_called_when,
1744 diag::warn_called_once_gets_called_twice};
1745
1746 constexpr unsigned CompletionHandlerWarnings[]{
1747 diag::warn_completion_handler_never_called,
1748 diag::warn_completion_handler_never_called_when,
1749 diag::warn_completion_handler_called_twice};
1750
shouldAnalyzeCalledOnceImpl(llvm::ArrayRef<unsigned> DiagIDs,const DiagnosticsEngine & Diags,SourceLocation At)1751 bool shouldAnalyzeCalledOnceImpl(llvm::ArrayRef<unsigned> DiagIDs,
1752 const DiagnosticsEngine &Diags,
1753 SourceLocation At) {
1754 return llvm::any_of(DiagIDs, [&Diags, At](unsigned DiagID) {
1755 return !Diags.isIgnored(DiagID, At);
1756 });
1757 }
1758
shouldAnalyzeCalledOnceConventions(const DiagnosticsEngine & Diags,SourceLocation At)1759 bool shouldAnalyzeCalledOnceConventions(const DiagnosticsEngine &Diags,
1760 SourceLocation At) {
1761 return shouldAnalyzeCalledOnceImpl(CompletionHandlerWarnings, Diags, At);
1762 }
1763
shouldAnalyzeCalledOnceParameters(const DiagnosticsEngine & Diags,SourceLocation At)1764 bool shouldAnalyzeCalledOnceParameters(const DiagnosticsEngine &Diags,
1765 SourceLocation At) {
1766 return shouldAnalyzeCalledOnceImpl(CalledOnceWarnings, Diags, At) ||
1767 shouldAnalyzeCalledOnceConventions(Diags, At);
1768 }
1769 } // anonymous namespace
1770
1771 //===----------------------------------------------------------------------===//
1772 // -Wthread-safety
1773 //===----------------------------------------------------------------------===//
1774 namespace clang {
1775 namespace threadSafety {
1776 namespace {
1777 class ThreadSafetyReporter : public clang::threadSafety::ThreadSafetyHandler {
1778 Sema &S;
1779 DiagList Warnings;
1780 SourceLocation FunLocation, FunEndLocation;
1781
1782 const FunctionDecl *CurrentFunction;
1783 bool Verbose;
1784
getNotes() const1785 OptionalNotes getNotes() const {
1786 if (Verbose && CurrentFunction) {
1787 PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
1788 S.PDiag(diag::note_thread_warning_in_fun)
1789 << CurrentFunction);
1790 return OptionalNotes(1, FNote);
1791 }
1792 return OptionalNotes();
1793 }
1794
getNotes(const PartialDiagnosticAt & Note) const1795 OptionalNotes getNotes(const PartialDiagnosticAt &Note) const {
1796 OptionalNotes ONS(1, Note);
1797 if (Verbose && CurrentFunction) {
1798 PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
1799 S.PDiag(diag::note_thread_warning_in_fun)
1800 << CurrentFunction);
1801 ONS.push_back(std::move(FNote));
1802 }
1803 return ONS;
1804 }
1805
getNotes(const PartialDiagnosticAt & Note1,const PartialDiagnosticAt & Note2) const1806 OptionalNotes getNotes(const PartialDiagnosticAt &Note1,
1807 const PartialDiagnosticAt &Note2) const {
1808 OptionalNotes ONS;
1809 ONS.push_back(Note1);
1810 ONS.push_back(Note2);
1811 if (Verbose && CurrentFunction) {
1812 PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
1813 S.PDiag(diag::note_thread_warning_in_fun)
1814 << CurrentFunction);
1815 ONS.push_back(std::move(FNote));
1816 }
1817 return ONS;
1818 }
1819
makeLockedHereNote(SourceLocation LocLocked,StringRef Kind)1820 OptionalNotes makeLockedHereNote(SourceLocation LocLocked, StringRef Kind) {
1821 return LocLocked.isValid()
1822 ? getNotes(PartialDiagnosticAt(
1823 LocLocked, S.PDiag(diag::note_locked_here) << Kind))
1824 : getNotes();
1825 }
1826
makeUnlockedHereNote(SourceLocation LocUnlocked,StringRef Kind)1827 OptionalNotes makeUnlockedHereNote(SourceLocation LocUnlocked,
1828 StringRef Kind) {
1829 return LocUnlocked.isValid()
1830 ? getNotes(PartialDiagnosticAt(
1831 LocUnlocked, S.PDiag(diag::note_unlocked_here) << Kind))
1832 : getNotes();
1833 }
1834
1835 public:
ThreadSafetyReporter(Sema & S,SourceLocation FL,SourceLocation FEL)1836 ThreadSafetyReporter(Sema &S, SourceLocation FL, SourceLocation FEL)
1837 : S(S), FunLocation(FL), FunEndLocation(FEL),
1838 CurrentFunction(nullptr), Verbose(false) {}
1839
setVerbose(bool b)1840 void setVerbose(bool b) { Verbose = b; }
1841
1842 /// Emit all buffered diagnostics in order of sourcelocation.
1843 /// We need to output diagnostics produced while iterating through
1844 /// the lockset in deterministic order, so this function orders diagnostics
1845 /// and outputs them.
emitDiagnostics()1846 void emitDiagnostics() {
1847 Warnings.sort(SortDiagBySourceLocation(S.getSourceManager()));
1848 for (const auto &Diag : Warnings) {
1849 S.Diag(Diag.first.first, Diag.first.second);
1850 for (const auto &Note : Diag.second)
1851 S.Diag(Note.first, Note.second);
1852 }
1853 }
1854
handleInvalidLockExp(StringRef Kind,SourceLocation Loc)1855 void handleInvalidLockExp(StringRef Kind, SourceLocation Loc) override {
1856 PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_cannot_resolve_lock)
1857 << Loc);
1858 Warnings.emplace_back(std::move(Warning), getNotes());
1859 }
1860
handleUnmatchedUnlock(StringRef Kind,Name LockName,SourceLocation Loc,SourceLocation LocPreviousUnlock)1861 void handleUnmatchedUnlock(StringRef Kind, Name LockName, SourceLocation Loc,
1862 SourceLocation LocPreviousUnlock) override {
1863 if (Loc.isInvalid())
1864 Loc = FunLocation;
1865 PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_unlock_but_no_lock)
1866 << Kind << LockName);
1867 Warnings.emplace_back(std::move(Warning),
1868 makeUnlockedHereNote(LocPreviousUnlock, Kind));
1869 }
1870
handleIncorrectUnlockKind(StringRef Kind,Name LockName,LockKind Expected,LockKind Received,SourceLocation LocLocked,SourceLocation LocUnlock)1871 void handleIncorrectUnlockKind(StringRef Kind, Name LockName,
1872 LockKind Expected, LockKind Received,
1873 SourceLocation LocLocked,
1874 SourceLocation LocUnlock) override {
1875 if (LocUnlock.isInvalid())
1876 LocUnlock = FunLocation;
1877 PartialDiagnosticAt Warning(
1878 LocUnlock, S.PDiag(diag::warn_unlock_kind_mismatch)
1879 << Kind << LockName << Received << Expected);
1880 Warnings.emplace_back(std::move(Warning),
1881 makeLockedHereNote(LocLocked, Kind));
1882 }
1883
handleDoubleLock(StringRef Kind,Name LockName,SourceLocation LocLocked,SourceLocation LocDoubleLock)1884 void handleDoubleLock(StringRef Kind, Name LockName, SourceLocation LocLocked,
1885 SourceLocation LocDoubleLock) override {
1886 if (LocDoubleLock.isInvalid())
1887 LocDoubleLock = FunLocation;
1888 PartialDiagnosticAt Warning(LocDoubleLock, S.PDiag(diag::warn_double_lock)
1889 << Kind << LockName);
1890 Warnings.emplace_back(std::move(Warning),
1891 makeLockedHereNote(LocLocked, Kind));
1892 }
1893
handleMutexHeldEndOfScope(StringRef Kind,Name LockName,SourceLocation LocLocked,SourceLocation LocEndOfScope,LockErrorKind LEK)1894 void handleMutexHeldEndOfScope(StringRef Kind, Name LockName,
1895 SourceLocation LocLocked,
1896 SourceLocation LocEndOfScope,
1897 LockErrorKind LEK) override {
1898 unsigned DiagID = 0;
1899 switch (LEK) {
1900 case LEK_LockedSomePredecessors:
1901 DiagID = diag::warn_lock_some_predecessors;
1902 break;
1903 case LEK_LockedSomeLoopIterations:
1904 DiagID = diag::warn_expecting_lock_held_on_loop;
1905 break;
1906 case LEK_LockedAtEndOfFunction:
1907 DiagID = diag::warn_no_unlock;
1908 break;
1909 case LEK_NotLockedAtEndOfFunction:
1910 DiagID = diag::warn_expecting_locked;
1911 break;
1912 }
1913 if (LocEndOfScope.isInvalid())
1914 LocEndOfScope = FunEndLocation;
1915
1916 PartialDiagnosticAt Warning(LocEndOfScope, S.PDiag(DiagID) << Kind
1917 << LockName);
1918 Warnings.emplace_back(std::move(Warning),
1919 makeLockedHereNote(LocLocked, Kind));
1920 }
1921
handleExclusiveAndShared(StringRef Kind,Name LockName,SourceLocation Loc1,SourceLocation Loc2)1922 void handleExclusiveAndShared(StringRef Kind, Name LockName,
1923 SourceLocation Loc1,
1924 SourceLocation Loc2) override {
1925 PartialDiagnosticAt Warning(Loc1,
1926 S.PDiag(diag::warn_lock_exclusive_and_shared)
1927 << Kind << LockName);
1928 PartialDiagnosticAt Note(Loc2, S.PDiag(diag::note_lock_exclusive_and_shared)
1929 << Kind << LockName);
1930 Warnings.emplace_back(std::move(Warning), getNotes(Note));
1931 }
1932
handleNoMutexHeld(StringRef Kind,const NamedDecl * D,ProtectedOperationKind POK,AccessKind AK,SourceLocation Loc)1933 void handleNoMutexHeld(StringRef Kind, const NamedDecl *D,
1934 ProtectedOperationKind POK, AccessKind AK,
1935 SourceLocation Loc) override {
1936 assert((POK == POK_VarAccess || POK == POK_VarDereference) &&
1937 "Only works for variables");
1938 unsigned DiagID = POK == POK_VarAccess?
1939 diag::warn_variable_requires_any_lock:
1940 diag::warn_var_deref_requires_any_lock;
1941 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID)
1942 << D << getLockKindFromAccessKind(AK));
1943 Warnings.emplace_back(std::move(Warning), getNotes());
1944 }
1945
handleMutexNotHeld(StringRef Kind,const NamedDecl * D,ProtectedOperationKind POK,Name LockName,LockKind LK,SourceLocation Loc,Name * PossibleMatch)1946 void handleMutexNotHeld(StringRef Kind, const NamedDecl *D,
1947 ProtectedOperationKind POK, Name LockName,
1948 LockKind LK, SourceLocation Loc,
1949 Name *PossibleMatch) override {
1950 unsigned DiagID = 0;
1951 if (PossibleMatch) {
1952 switch (POK) {
1953 case POK_VarAccess:
1954 DiagID = diag::warn_variable_requires_lock_precise;
1955 break;
1956 case POK_VarDereference:
1957 DiagID = diag::warn_var_deref_requires_lock_precise;
1958 break;
1959 case POK_FunctionCall:
1960 DiagID = diag::warn_fun_requires_lock_precise;
1961 break;
1962 case POK_PassByRef:
1963 DiagID = diag::warn_guarded_pass_by_reference;
1964 break;
1965 case POK_PtPassByRef:
1966 DiagID = diag::warn_pt_guarded_pass_by_reference;
1967 break;
1968 }
1969 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << Kind
1970 << D
1971 << LockName << LK);
1972 PartialDiagnosticAt Note(Loc, S.PDiag(diag::note_found_mutex_near_match)
1973 << *PossibleMatch);
1974 if (Verbose && POK == POK_VarAccess) {
1975 PartialDiagnosticAt VNote(D->getLocation(),
1976 S.PDiag(diag::note_guarded_by_declared_here)
1977 << D->getDeclName());
1978 Warnings.emplace_back(std::move(Warning), getNotes(Note, VNote));
1979 } else
1980 Warnings.emplace_back(std::move(Warning), getNotes(Note));
1981 } else {
1982 switch (POK) {
1983 case POK_VarAccess:
1984 DiagID = diag::warn_variable_requires_lock;
1985 break;
1986 case POK_VarDereference:
1987 DiagID = diag::warn_var_deref_requires_lock;
1988 break;
1989 case POK_FunctionCall:
1990 DiagID = diag::warn_fun_requires_lock;
1991 break;
1992 case POK_PassByRef:
1993 DiagID = diag::warn_guarded_pass_by_reference;
1994 break;
1995 case POK_PtPassByRef:
1996 DiagID = diag::warn_pt_guarded_pass_by_reference;
1997 break;
1998 }
1999 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << Kind
2000 << D
2001 << LockName << LK);
2002 if (Verbose && POK == POK_VarAccess) {
2003 PartialDiagnosticAt Note(D->getLocation(),
2004 S.PDiag(diag::note_guarded_by_declared_here));
2005 Warnings.emplace_back(std::move(Warning), getNotes(Note));
2006 } else
2007 Warnings.emplace_back(std::move(Warning), getNotes());
2008 }
2009 }
2010
handleNegativeNotHeld(StringRef Kind,Name LockName,Name Neg,SourceLocation Loc)2011 void handleNegativeNotHeld(StringRef Kind, Name LockName, Name Neg,
2012 SourceLocation Loc) override {
2013 PartialDiagnosticAt Warning(Loc,
2014 S.PDiag(diag::warn_acquire_requires_negative_cap)
2015 << Kind << LockName << Neg);
2016 Warnings.emplace_back(std::move(Warning), getNotes());
2017 }
2018
handleNegativeNotHeld(const NamedDecl * D,Name LockName,SourceLocation Loc)2019 void handleNegativeNotHeld(const NamedDecl *D, Name LockName,
2020 SourceLocation Loc) override {
2021 PartialDiagnosticAt Warning(
2022 Loc, S.PDiag(diag::warn_fun_requires_negative_cap) << D << LockName);
2023 Warnings.emplace_back(std::move(Warning), getNotes());
2024 }
2025
handleFunExcludesLock(StringRef Kind,Name FunName,Name LockName,SourceLocation Loc)2026 void handleFunExcludesLock(StringRef Kind, Name FunName, Name LockName,
2027 SourceLocation Loc) override {
2028 PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_fun_excludes_mutex)
2029 << Kind << FunName << LockName);
2030 Warnings.emplace_back(std::move(Warning), getNotes());
2031 }
2032
handleLockAcquiredBefore(StringRef Kind,Name L1Name,Name L2Name,SourceLocation Loc)2033 void handleLockAcquiredBefore(StringRef Kind, Name L1Name, Name L2Name,
2034 SourceLocation Loc) override {
2035 PartialDiagnosticAt Warning(Loc,
2036 S.PDiag(diag::warn_acquired_before) << Kind << L1Name << L2Name);
2037 Warnings.emplace_back(std::move(Warning), getNotes());
2038 }
2039
handleBeforeAfterCycle(Name L1Name,SourceLocation Loc)2040 void handleBeforeAfterCycle(Name L1Name, SourceLocation Loc) override {
2041 PartialDiagnosticAt Warning(Loc,
2042 S.PDiag(diag::warn_acquired_before_after_cycle) << L1Name);
2043 Warnings.emplace_back(std::move(Warning), getNotes());
2044 }
2045
enterFunction(const FunctionDecl * FD)2046 void enterFunction(const FunctionDecl* FD) override {
2047 CurrentFunction = FD;
2048 }
2049
leaveFunction(const FunctionDecl * FD)2050 void leaveFunction(const FunctionDecl* FD) override {
2051 CurrentFunction = nullptr;
2052 }
2053 };
2054 } // anonymous namespace
2055 } // namespace threadSafety
2056 } // namespace clang
2057
2058 //===----------------------------------------------------------------------===//
2059 // -Wconsumed
2060 //===----------------------------------------------------------------------===//
2061
2062 namespace clang {
2063 namespace consumed {
2064 namespace {
2065 class ConsumedWarningsHandler : public ConsumedWarningsHandlerBase {
2066
2067 Sema &S;
2068 DiagList Warnings;
2069
2070 public:
2071
ConsumedWarningsHandler(Sema & S)2072 ConsumedWarningsHandler(Sema &S) : S(S) {}
2073
emitDiagnostics()2074 void emitDiagnostics() override {
2075 Warnings.sort(SortDiagBySourceLocation(S.getSourceManager()));
2076 for (const auto &Diag : Warnings) {
2077 S.Diag(Diag.first.first, Diag.first.second);
2078 for (const auto &Note : Diag.second)
2079 S.Diag(Note.first, Note.second);
2080 }
2081 }
2082
warnLoopStateMismatch(SourceLocation Loc,StringRef VariableName)2083 void warnLoopStateMismatch(SourceLocation Loc,
2084 StringRef VariableName) override {
2085 PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_loop_state_mismatch) <<
2086 VariableName);
2087
2088 Warnings.emplace_back(std::move(Warning), OptionalNotes());
2089 }
2090
warnParamReturnTypestateMismatch(SourceLocation Loc,StringRef VariableName,StringRef ExpectedState,StringRef ObservedState)2091 void warnParamReturnTypestateMismatch(SourceLocation Loc,
2092 StringRef VariableName,
2093 StringRef ExpectedState,
2094 StringRef ObservedState) override {
2095
2096 PartialDiagnosticAt Warning(Loc, S.PDiag(
2097 diag::warn_param_return_typestate_mismatch) << VariableName <<
2098 ExpectedState << ObservedState);
2099
2100 Warnings.emplace_back(std::move(Warning), OptionalNotes());
2101 }
2102
warnParamTypestateMismatch(SourceLocation Loc,StringRef ExpectedState,StringRef ObservedState)2103 void warnParamTypestateMismatch(SourceLocation Loc, StringRef ExpectedState,
2104 StringRef ObservedState) override {
2105
2106 PartialDiagnosticAt Warning(Loc, S.PDiag(
2107 diag::warn_param_typestate_mismatch) << ExpectedState << ObservedState);
2108
2109 Warnings.emplace_back(std::move(Warning), OptionalNotes());
2110 }
2111
warnReturnTypestateForUnconsumableType(SourceLocation Loc,StringRef TypeName)2112 void warnReturnTypestateForUnconsumableType(SourceLocation Loc,
2113 StringRef TypeName) override {
2114 PartialDiagnosticAt Warning(Loc, S.PDiag(
2115 diag::warn_return_typestate_for_unconsumable_type) << TypeName);
2116
2117 Warnings.emplace_back(std::move(Warning), OptionalNotes());
2118 }
2119
warnReturnTypestateMismatch(SourceLocation Loc,StringRef ExpectedState,StringRef ObservedState)2120 void warnReturnTypestateMismatch(SourceLocation Loc, StringRef ExpectedState,
2121 StringRef ObservedState) override {
2122
2123 PartialDiagnosticAt Warning(Loc, S.PDiag(
2124 diag::warn_return_typestate_mismatch) << ExpectedState << ObservedState);
2125
2126 Warnings.emplace_back(std::move(Warning), OptionalNotes());
2127 }
2128
warnUseOfTempInInvalidState(StringRef MethodName,StringRef State,SourceLocation Loc)2129 void warnUseOfTempInInvalidState(StringRef MethodName, StringRef State,
2130 SourceLocation Loc) override {
2131
2132 PartialDiagnosticAt Warning(Loc, S.PDiag(
2133 diag::warn_use_of_temp_in_invalid_state) << MethodName << State);
2134
2135 Warnings.emplace_back(std::move(Warning), OptionalNotes());
2136 }
2137
warnUseInInvalidState(StringRef MethodName,StringRef VariableName,StringRef State,SourceLocation Loc)2138 void warnUseInInvalidState(StringRef MethodName, StringRef VariableName,
2139 StringRef State, SourceLocation Loc) override {
2140
2141 PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_use_in_invalid_state) <<
2142 MethodName << VariableName << State);
2143
2144 Warnings.emplace_back(std::move(Warning), OptionalNotes());
2145 }
2146 };
2147 } // anonymous namespace
2148 } // namespace consumed
2149 } // namespace clang
2150
2151 //===----------------------------------------------------------------------===//
2152 // AnalysisBasedWarnings - Worker object used by Sema to execute analysis-based
2153 // warnings on a function, method, or block.
2154 //===----------------------------------------------------------------------===//
2155
Policy()2156 sema::AnalysisBasedWarnings::Policy::Policy() {
2157 enableCheckFallThrough = 1;
2158 enableCheckUnreachable = 0;
2159 enableThreadSafetyAnalysis = 0;
2160 enableConsumedAnalysis = 0;
2161 }
2162
2163 /// InterProceduralData aims to be a storage of whatever data should be passed
2164 /// between analyses of different functions.
2165 ///
2166 /// At the moment, its primary goal is to make the information gathered during
2167 /// the analysis of the blocks available during the analysis of the enclosing
2168 /// function. This is important due to the fact that blocks are analyzed before
2169 /// the enclosed function is even parsed fully, so it is not viable to access
2170 /// anything in the outer scope while analyzing the block. On the other hand,
2171 /// re-building CFG for blocks and re-analyzing them when we do have all the
2172 /// information (i.e. during the analysis of the enclosing function) seems to be
2173 /// ill-designed.
2174 class sema::AnalysisBasedWarnings::InterProceduralData {
2175 public:
2176 // It is important to analyze blocks within functions because it's a very
2177 // common pattern to capture completion handler parameters by blocks.
2178 CalledOnceInterProceduralData CalledOnceData;
2179 };
2180
isEnabled(DiagnosticsEngine & D,unsigned diag)2181 static unsigned isEnabled(DiagnosticsEngine &D, unsigned diag) {
2182 return (unsigned)!D.isIgnored(diag, SourceLocation());
2183 }
2184
AnalysisBasedWarnings(Sema & s)2185 sema::AnalysisBasedWarnings::AnalysisBasedWarnings(Sema &s)
2186 : S(s), IPData(std::make_unique<InterProceduralData>()),
2187 NumFunctionsAnalyzed(0), NumFunctionsWithBadCFGs(0), NumCFGBlocks(0),
2188 MaxCFGBlocksPerFunction(0), NumUninitAnalysisFunctions(0),
2189 NumUninitAnalysisVariables(0), MaxUninitAnalysisVariablesPerFunction(0),
2190 NumUninitAnalysisBlockVisits(0),
2191 MaxUninitAnalysisBlockVisitsPerFunction(0) {
2192
2193 using namespace diag;
2194 DiagnosticsEngine &D = S.getDiagnostics();
2195
2196 DefaultPolicy.enableCheckUnreachable =
2197 isEnabled(D, warn_unreachable) || isEnabled(D, warn_unreachable_break) ||
2198 isEnabled(D, warn_unreachable_return) ||
2199 isEnabled(D, warn_unreachable_loop_increment);
2200
2201 DefaultPolicy.enableThreadSafetyAnalysis = isEnabled(D, warn_double_lock);
2202
2203 DefaultPolicy.enableConsumedAnalysis =
2204 isEnabled(D, warn_use_in_invalid_state);
2205 }
2206
2207 // We need this here for unique_ptr with forward declared class.
2208 sema::AnalysisBasedWarnings::~AnalysisBasedWarnings() = default;
2209
flushDiagnostics(Sema & S,const sema::FunctionScopeInfo * fscope)2210 static void flushDiagnostics(Sema &S, const sema::FunctionScopeInfo *fscope) {
2211 for (const auto &D : fscope->PossiblyUnreachableDiags)
2212 S.Diag(D.Loc, D.PD);
2213 }
2214
IssueWarnings(sema::AnalysisBasedWarnings::Policy P,sema::FunctionScopeInfo * fscope,const Decl * D,QualType BlockType)2215 void clang::sema::AnalysisBasedWarnings::IssueWarnings(
2216 sema::AnalysisBasedWarnings::Policy P, sema::FunctionScopeInfo *fscope,
2217 const Decl *D, QualType BlockType) {
2218
2219 // We avoid doing analysis-based warnings when there are errors for
2220 // two reasons:
2221 // (1) The CFGs often can't be constructed (if the body is invalid), so
2222 // don't bother trying.
2223 // (2) The code already has problems; running the analysis just takes more
2224 // time.
2225 DiagnosticsEngine &Diags = S.getDiagnostics();
2226
2227 // Do not do any analysis if we are going to just ignore them.
2228 if (Diags.getIgnoreAllWarnings() ||
2229 (Diags.getSuppressSystemWarnings() &&
2230 S.SourceMgr.isInSystemHeader(D->getLocation())))
2231 return;
2232
2233 // For code in dependent contexts, we'll do this at instantiation time.
2234 if (cast<DeclContext>(D)->isDependentContext())
2235 return;
2236
2237 if (S.hasUncompilableErrorOccurred()) {
2238 // Flush out any possibly unreachable diagnostics.
2239 flushDiagnostics(S, fscope);
2240 return;
2241 }
2242
2243 const Stmt *Body = D->getBody();
2244 assert(Body);
2245
2246 // Construct the analysis context with the specified CFG build options.
2247 AnalysisDeclContext AC(/* AnalysisDeclContextManager */ nullptr, D);
2248
2249 // Don't generate EH edges for CallExprs as we'd like to avoid the n^2
2250 // explosion for destructors that can result and the compile time hit.
2251 AC.getCFGBuildOptions().PruneTriviallyFalseEdges = true;
2252 AC.getCFGBuildOptions().AddEHEdges = false;
2253 AC.getCFGBuildOptions().AddInitializers = true;
2254 AC.getCFGBuildOptions().AddImplicitDtors = true;
2255 AC.getCFGBuildOptions().AddTemporaryDtors = true;
2256 AC.getCFGBuildOptions().AddCXXNewAllocator = false;
2257 AC.getCFGBuildOptions().AddCXXDefaultInitExprInCtors = true;
2258
2259 // Force that certain expressions appear as CFGElements in the CFG. This
2260 // is used to speed up various analyses.
2261 // FIXME: This isn't the right factoring. This is here for initial
2262 // prototyping, but we need a way for analyses to say what expressions they
2263 // expect to always be CFGElements and then fill in the BuildOptions
2264 // appropriately. This is essentially a layering violation.
2265 if (P.enableCheckUnreachable || P.enableThreadSafetyAnalysis ||
2266 P.enableConsumedAnalysis) {
2267 // Unreachable code analysis and thread safety require a linearized CFG.
2268 AC.getCFGBuildOptions().setAllAlwaysAdd();
2269 }
2270 else {
2271 AC.getCFGBuildOptions()
2272 .setAlwaysAdd(Stmt::BinaryOperatorClass)
2273 .setAlwaysAdd(Stmt::CompoundAssignOperatorClass)
2274 .setAlwaysAdd(Stmt::BlockExprClass)
2275 .setAlwaysAdd(Stmt::CStyleCastExprClass)
2276 .setAlwaysAdd(Stmt::DeclRefExprClass)
2277 .setAlwaysAdd(Stmt::ImplicitCastExprClass)
2278 .setAlwaysAdd(Stmt::UnaryOperatorClass)
2279 .setAlwaysAdd(Stmt::AttributedStmtClass);
2280 }
2281
2282 // Install the logical handler.
2283 llvm::Optional<LogicalErrorHandler> LEH;
2284 if (LogicalErrorHandler::hasActiveDiagnostics(Diags, D->getBeginLoc())) {
2285 LEH.emplace(S);
2286 AC.getCFGBuildOptions().Observer = &*LEH;
2287 }
2288
2289 // Emit delayed diagnostics.
2290 if (!fscope->PossiblyUnreachableDiags.empty()) {
2291 bool analyzed = false;
2292
2293 // Register the expressions with the CFGBuilder.
2294 for (const auto &D : fscope->PossiblyUnreachableDiags) {
2295 for (const Stmt *S : D.Stmts)
2296 AC.registerForcedBlockExpression(S);
2297 }
2298
2299 if (AC.getCFG()) {
2300 analyzed = true;
2301 for (const auto &D : fscope->PossiblyUnreachableDiags) {
2302 bool AllReachable = true;
2303 for (const Stmt *S : D.Stmts) {
2304 const CFGBlock *block = AC.getBlockForRegisteredExpression(S);
2305 CFGReverseBlockReachabilityAnalysis *cra =
2306 AC.getCFGReachablityAnalysis();
2307 // FIXME: We should be able to assert that block is non-null, but
2308 // the CFG analysis can skip potentially-evaluated expressions in
2309 // edge cases; see test/Sema/vla-2.c.
2310 if (block && cra) {
2311 // Can this block be reached from the entrance?
2312 if (!cra->isReachable(&AC.getCFG()->getEntry(), block)) {
2313 AllReachable = false;
2314 break;
2315 }
2316 }
2317 // If we cannot map to a basic block, assume the statement is
2318 // reachable.
2319 }
2320
2321 if (AllReachable)
2322 S.Diag(D.Loc, D.PD);
2323 }
2324 }
2325
2326 if (!analyzed)
2327 flushDiagnostics(S, fscope);
2328 }
2329
2330 // Warning: check missing 'return'
2331 if (P.enableCheckFallThrough) {
2332 const CheckFallThroughDiagnostics &CD =
2333 (isa<BlockDecl>(D)
2334 ? CheckFallThroughDiagnostics::MakeForBlock()
2335 : (isa<CXXMethodDecl>(D) &&
2336 cast<CXXMethodDecl>(D)->getOverloadedOperator() == OO_Call &&
2337 cast<CXXMethodDecl>(D)->getParent()->isLambda())
2338 ? CheckFallThroughDiagnostics::MakeForLambda()
2339 : (fscope->isCoroutine()
2340 ? CheckFallThroughDiagnostics::MakeForCoroutine(D)
2341 : CheckFallThroughDiagnostics::MakeForFunction(D)));
2342 CheckFallThroughForBody(S, D, Body, BlockType, CD, AC, fscope);
2343 }
2344
2345 // Warning: check for unreachable code
2346 if (P.enableCheckUnreachable) {
2347 // Only check for unreachable code on non-template instantiations.
2348 // Different template instantiations can effectively change the control-flow
2349 // and it is very difficult to prove that a snippet of code in a template
2350 // is unreachable for all instantiations.
2351 bool isTemplateInstantiation = false;
2352 if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(D))
2353 isTemplateInstantiation = Function->isTemplateInstantiation();
2354 if (!isTemplateInstantiation)
2355 CheckUnreachable(S, AC);
2356 }
2357
2358 // Check for thread safety violations
2359 if (P.enableThreadSafetyAnalysis) {
2360 SourceLocation FL = AC.getDecl()->getLocation();
2361 SourceLocation FEL = AC.getDecl()->getEndLoc();
2362 threadSafety::ThreadSafetyReporter Reporter(S, FL, FEL);
2363 if (!Diags.isIgnored(diag::warn_thread_safety_beta, D->getBeginLoc()))
2364 Reporter.setIssueBetaWarnings(true);
2365 if (!Diags.isIgnored(diag::warn_thread_safety_verbose, D->getBeginLoc()))
2366 Reporter.setVerbose(true);
2367
2368 threadSafety::runThreadSafetyAnalysis(AC, Reporter,
2369 &S.ThreadSafetyDeclCache);
2370 Reporter.emitDiagnostics();
2371 }
2372
2373 // Check for violations of consumed properties.
2374 if (P.enableConsumedAnalysis) {
2375 consumed::ConsumedWarningsHandler WarningHandler(S);
2376 consumed::ConsumedAnalyzer Analyzer(WarningHandler);
2377 Analyzer.run(AC);
2378 }
2379
2380 if (!Diags.isIgnored(diag::warn_uninit_var, D->getBeginLoc()) ||
2381 !Diags.isIgnored(diag::warn_sometimes_uninit_var, D->getBeginLoc()) ||
2382 !Diags.isIgnored(diag::warn_maybe_uninit_var, D->getBeginLoc()) ||
2383 !Diags.isIgnored(diag::warn_uninit_const_reference, D->getBeginLoc())) {
2384 if (CFG *cfg = AC.getCFG()) {
2385 UninitValsDiagReporter reporter(S);
2386 UninitVariablesAnalysisStats stats;
2387 std::memset(&stats, 0, sizeof(UninitVariablesAnalysisStats));
2388 runUninitializedVariablesAnalysis(*cast<DeclContext>(D), *cfg, AC,
2389 reporter, stats);
2390
2391 if (S.CollectStats && stats.NumVariablesAnalyzed > 0) {
2392 ++NumUninitAnalysisFunctions;
2393 NumUninitAnalysisVariables += stats.NumVariablesAnalyzed;
2394 NumUninitAnalysisBlockVisits += stats.NumBlockVisits;
2395 MaxUninitAnalysisVariablesPerFunction =
2396 std::max(MaxUninitAnalysisVariablesPerFunction,
2397 stats.NumVariablesAnalyzed);
2398 MaxUninitAnalysisBlockVisitsPerFunction =
2399 std::max(MaxUninitAnalysisBlockVisitsPerFunction,
2400 stats.NumBlockVisits);
2401 }
2402 }
2403 }
2404
2405 // Check for violations of "called once" parameter properties.
2406 if (S.getLangOpts().ObjC && !S.getLangOpts().CPlusPlus &&
2407 shouldAnalyzeCalledOnceParameters(Diags, D->getBeginLoc())) {
2408 if (AC.getCFG()) {
2409 CalledOnceCheckReporter Reporter(S, IPData->CalledOnceData);
2410 checkCalledOnceParameters(
2411 AC, Reporter,
2412 shouldAnalyzeCalledOnceConventions(Diags, D->getBeginLoc()));
2413 }
2414 }
2415
2416 bool FallThroughDiagFull =
2417 !Diags.isIgnored(diag::warn_unannotated_fallthrough, D->getBeginLoc());
2418 bool FallThroughDiagPerFunction = !Diags.isIgnored(
2419 diag::warn_unannotated_fallthrough_per_function, D->getBeginLoc());
2420 if (FallThroughDiagFull || FallThroughDiagPerFunction ||
2421 fscope->HasFallthroughStmt) {
2422 DiagnoseSwitchLabelsFallthrough(S, AC, !FallThroughDiagFull);
2423 }
2424
2425 if (S.getLangOpts().ObjCWeak &&
2426 !Diags.isIgnored(diag::warn_arc_repeated_use_of_weak, D->getBeginLoc()))
2427 diagnoseRepeatedUseOfWeak(S, fscope, D, AC.getParentMap());
2428
2429
2430 // Check for infinite self-recursion in functions
2431 if (!Diags.isIgnored(diag::warn_infinite_recursive_function,
2432 D->getBeginLoc())) {
2433 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
2434 checkRecursiveFunction(S, FD, Body, AC);
2435 }
2436 }
2437
2438 // Check for throw out of non-throwing function.
2439 if (!Diags.isIgnored(diag::warn_throw_in_noexcept_func, D->getBeginLoc()))
2440 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D))
2441 if (S.getLangOpts().CPlusPlus && isNoexcept(FD))
2442 checkThrowInNonThrowingFunc(S, FD, AC);
2443
2444 // If none of the previous checks caused a CFG build, trigger one here
2445 // for the logical error handler.
2446 if (LogicalErrorHandler::hasActiveDiagnostics(Diags, D->getBeginLoc())) {
2447 AC.getCFG();
2448 }
2449
2450 // Collect statistics about the CFG if it was built.
2451 if (S.CollectStats && AC.isCFGBuilt()) {
2452 ++NumFunctionsAnalyzed;
2453 if (CFG *cfg = AC.getCFG()) {
2454 // If we successfully built a CFG for this context, record some more
2455 // detail information about it.
2456 NumCFGBlocks += cfg->getNumBlockIDs();
2457 MaxCFGBlocksPerFunction = std::max(MaxCFGBlocksPerFunction,
2458 cfg->getNumBlockIDs());
2459 } else {
2460 ++NumFunctionsWithBadCFGs;
2461 }
2462 }
2463 }
2464
PrintStats() const2465 void clang::sema::AnalysisBasedWarnings::PrintStats() const {
2466 llvm::errs() << "\n*** Analysis Based Warnings Stats:\n";
2467
2468 unsigned NumCFGsBuilt = NumFunctionsAnalyzed - NumFunctionsWithBadCFGs;
2469 unsigned AvgCFGBlocksPerFunction =
2470 !NumCFGsBuilt ? 0 : NumCFGBlocks/NumCFGsBuilt;
2471 llvm::errs() << NumFunctionsAnalyzed << " functions analyzed ("
2472 << NumFunctionsWithBadCFGs << " w/o CFGs).\n"
2473 << " " << NumCFGBlocks << " CFG blocks built.\n"
2474 << " " << AvgCFGBlocksPerFunction
2475 << " average CFG blocks per function.\n"
2476 << " " << MaxCFGBlocksPerFunction
2477 << " max CFG blocks per function.\n";
2478
2479 unsigned AvgUninitVariablesPerFunction = !NumUninitAnalysisFunctions ? 0
2480 : NumUninitAnalysisVariables/NumUninitAnalysisFunctions;
2481 unsigned AvgUninitBlockVisitsPerFunction = !NumUninitAnalysisFunctions ? 0
2482 : NumUninitAnalysisBlockVisits/NumUninitAnalysisFunctions;
2483 llvm::errs() << NumUninitAnalysisFunctions
2484 << " functions analyzed for uninitialiazed variables\n"
2485 << " " << NumUninitAnalysisVariables << " variables analyzed.\n"
2486 << " " << AvgUninitVariablesPerFunction
2487 << " average variables per function.\n"
2488 << " " << MaxUninitAnalysisVariablesPerFunction
2489 << " max variables per function.\n"
2490 << " " << NumUninitAnalysisBlockVisits << " block visits.\n"
2491 << " " << AvgUninitBlockVisitsPerFunction
2492 << " average block visits per function.\n"
2493 << " " << MaxUninitAnalysisBlockVisitsPerFunction
2494 << " max block visits per function.\n";
2495 }
2496