1 //=- AnalysisBasedWarnings.cpp - Sema warnings based on libAnalysis -*- C++ -*-=//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file defines analysis_warnings::[Policy,Executor].
11 // Together they are used by Sema to issue warnings based on inexpensive
12 // static analysis algorithms in libAnalysis.
13 //
14 //===----------------------------------------------------------------------===//
15
16 #include "clang/Sema/AnalysisBasedWarnings.h"
17 #include "clang/Sema/SemaInternal.h"
18 #include "clang/Sema/ScopeInfo.h"
19 #include "clang/Basic/SourceManager.h"
20 #include "clang/Basic/SourceLocation.h"
21 #include "clang/Lex/Preprocessor.h"
22 #include "clang/Lex/Lexer.h"
23 #include "clang/AST/DeclObjC.h"
24 #include "clang/AST/DeclCXX.h"
25 #include "clang/AST/ExprObjC.h"
26 #include "clang/AST/ExprCXX.h"
27 #include "clang/AST/StmtObjC.h"
28 #include "clang/AST/StmtCXX.h"
29 #include "clang/AST/EvaluatedExprVisitor.h"
30 #include "clang/AST/StmtVisitor.h"
31 #include "clang/AST/RecursiveASTVisitor.h"
32 #include "clang/Analysis/AnalysisContext.h"
33 #include "clang/Analysis/CFG.h"
34 #include "clang/Analysis/Analyses/ReachableCode.h"
35 #include "clang/Analysis/Analyses/CFGReachabilityAnalysis.h"
36 #include "clang/Analysis/Analyses/ThreadSafety.h"
37 #include "clang/Analysis/CFGStmtMap.h"
38 #include "clang/Analysis/Analyses/UninitializedValues.h"
39 #include "llvm/ADT/BitVector.h"
40 #include "llvm/ADT/FoldingSet.h"
41 #include "llvm/ADT/ImmutableMap.h"
42 #include "llvm/ADT/PostOrderIterator.h"
43 #include "llvm/ADT/SmallVector.h"
44 #include "llvm/ADT/StringRef.h"
45 #include "llvm/Support/Casting.h"
46 #include <algorithm>
47 #include <iterator>
48 #include <vector>
49 #include <deque>
50
51 using namespace clang;
52
53 //===----------------------------------------------------------------------===//
54 // Unreachable code analysis.
55 //===----------------------------------------------------------------------===//
56
57 namespace {
58 class UnreachableCodeHandler : public reachable_code::Callback {
59 Sema &S;
60 public:
UnreachableCodeHandler(Sema & s)61 UnreachableCodeHandler(Sema &s) : S(s) {}
62
HandleUnreachable(SourceLocation L,SourceRange R1,SourceRange R2)63 void HandleUnreachable(SourceLocation L, SourceRange R1, SourceRange R2) {
64 S.Diag(L, diag::warn_unreachable) << R1 << R2;
65 }
66 };
67 }
68
69 /// CheckUnreachable - Check for unreachable code.
CheckUnreachable(Sema & S,AnalysisDeclContext & AC)70 static void CheckUnreachable(Sema &S, AnalysisDeclContext &AC) {
71 UnreachableCodeHandler UC(S);
72 reachable_code::FindUnreachableCode(AC, UC);
73 }
74
75 //===----------------------------------------------------------------------===//
76 // Check for missing return value.
77 //===----------------------------------------------------------------------===//
78
79 enum ControlFlowKind {
80 UnknownFallThrough,
81 NeverFallThrough,
82 MaybeFallThrough,
83 AlwaysFallThrough,
84 NeverFallThroughOrReturn
85 };
86
87 /// CheckFallThrough - Check that we don't fall off the end of a
88 /// Statement that should return a value.
89 ///
90 /// \returns AlwaysFallThrough iff we always fall off the end of the statement,
91 /// MaybeFallThrough iff we might or might not fall off the end,
92 /// NeverFallThroughOrReturn iff we never fall off the end of the statement or
93 /// return. We assume NeverFallThrough iff we never fall off the end of the
94 /// statement but we may return. We assume that functions not marked noreturn
95 /// will return.
CheckFallThrough(AnalysisDeclContext & AC)96 static ControlFlowKind CheckFallThrough(AnalysisDeclContext &AC) {
97 CFG *cfg = AC.getCFG();
98 if (cfg == 0) return UnknownFallThrough;
99
100 // The CFG leaves in dead things, and we don't want the dead code paths to
101 // confuse us, so we mark all live things first.
102 llvm::BitVector live(cfg->getNumBlockIDs());
103 unsigned count = reachable_code::ScanReachableFromBlock(&cfg->getEntry(),
104 live);
105
106 bool AddEHEdges = AC.getAddEHEdges();
107 if (!AddEHEdges && count != cfg->getNumBlockIDs())
108 // When there are things remaining dead, and we didn't add EH edges
109 // from CallExprs to the catch clauses, we have to go back and
110 // mark them as live.
111 for (CFG::iterator I = cfg->begin(), E = cfg->end(); I != E; ++I) {
112 CFGBlock &b = **I;
113 if (!live[b.getBlockID()]) {
114 if (b.pred_begin() == b.pred_end()) {
115 if (b.getTerminator() && isa<CXXTryStmt>(b.getTerminator()))
116 // When not adding EH edges from calls, catch clauses
117 // can otherwise seem dead. Avoid noting them as dead.
118 count += reachable_code::ScanReachableFromBlock(&b, live);
119 continue;
120 }
121 }
122 }
123
124 // Now we know what is live, we check the live precessors of the exit block
125 // and look for fall through paths, being careful to ignore normal returns,
126 // and exceptional paths.
127 bool HasLiveReturn = false;
128 bool HasFakeEdge = false;
129 bool HasPlainEdge = false;
130 bool HasAbnormalEdge = false;
131
132 // Ignore default cases that aren't likely to be reachable because all
133 // enums in a switch(X) have explicit case statements.
134 CFGBlock::FilterOptions FO;
135 FO.IgnoreDefaultsWithCoveredEnums = 1;
136
137 for (CFGBlock::filtered_pred_iterator
138 I = cfg->getExit().filtered_pred_start_end(FO); I.hasMore(); ++I) {
139 const CFGBlock& B = **I;
140 if (!live[B.getBlockID()])
141 continue;
142
143 // Skip blocks which contain an element marked as no-return. They don't
144 // represent actually viable edges into the exit block, so mark them as
145 // abnormal.
146 if (B.hasNoReturnElement()) {
147 HasAbnormalEdge = true;
148 continue;
149 }
150
151 // Destructors can appear after the 'return' in the CFG. This is
152 // normal. We need to look pass the destructors for the return
153 // statement (if it exists).
154 CFGBlock::const_reverse_iterator ri = B.rbegin(), re = B.rend();
155
156 for ( ; ri != re ; ++ri)
157 if (isa<CFGStmt>(*ri))
158 break;
159
160 // No more CFGElements in the block?
161 if (ri == re) {
162 if (B.getTerminator() && isa<CXXTryStmt>(B.getTerminator())) {
163 HasAbnormalEdge = true;
164 continue;
165 }
166 // A labeled empty statement, or the entry block...
167 HasPlainEdge = true;
168 continue;
169 }
170
171 CFGStmt CS = cast<CFGStmt>(*ri);
172 const Stmt *S = CS.getStmt();
173 if (isa<ReturnStmt>(S)) {
174 HasLiveReturn = true;
175 continue;
176 }
177 if (isa<ObjCAtThrowStmt>(S)) {
178 HasFakeEdge = true;
179 continue;
180 }
181 if (isa<CXXThrowExpr>(S)) {
182 HasFakeEdge = true;
183 continue;
184 }
185 if (isa<MSAsmStmt>(S)) {
186 // TODO: Verify this is correct.
187 HasFakeEdge = true;
188 HasLiveReturn = true;
189 continue;
190 }
191 if (isa<CXXTryStmt>(S)) {
192 HasAbnormalEdge = true;
193 continue;
194 }
195 if (std::find(B.succ_begin(), B.succ_end(), &cfg->getExit())
196 == B.succ_end()) {
197 HasAbnormalEdge = true;
198 continue;
199 }
200
201 HasPlainEdge = true;
202 }
203 if (!HasPlainEdge) {
204 if (HasLiveReturn)
205 return NeverFallThrough;
206 return NeverFallThroughOrReturn;
207 }
208 if (HasAbnormalEdge || HasFakeEdge || HasLiveReturn)
209 return MaybeFallThrough;
210 // This says AlwaysFallThrough for calls to functions that are not marked
211 // noreturn, that don't return. If people would like this warning to be more
212 // accurate, such functions should be marked as noreturn.
213 return AlwaysFallThrough;
214 }
215
216 namespace {
217
218 struct CheckFallThroughDiagnostics {
219 unsigned diag_MaybeFallThrough_HasNoReturn;
220 unsigned diag_MaybeFallThrough_ReturnsNonVoid;
221 unsigned diag_AlwaysFallThrough_HasNoReturn;
222 unsigned diag_AlwaysFallThrough_ReturnsNonVoid;
223 unsigned diag_NeverFallThroughOrReturn;
224 enum { Function, Block, Lambda } funMode;
225 SourceLocation FuncLoc;
226
MakeForFunction__anon4f73e2630211::CheckFallThroughDiagnostics227 static CheckFallThroughDiagnostics MakeForFunction(const Decl *Func) {
228 CheckFallThroughDiagnostics D;
229 D.FuncLoc = Func->getLocation();
230 D.diag_MaybeFallThrough_HasNoReturn =
231 diag::warn_falloff_noreturn_function;
232 D.diag_MaybeFallThrough_ReturnsNonVoid =
233 diag::warn_maybe_falloff_nonvoid_function;
234 D.diag_AlwaysFallThrough_HasNoReturn =
235 diag::warn_falloff_noreturn_function;
236 D.diag_AlwaysFallThrough_ReturnsNonVoid =
237 diag::warn_falloff_nonvoid_function;
238
239 // Don't suggest that virtual functions be marked "noreturn", since they
240 // might be overridden by non-noreturn functions.
241 bool isVirtualMethod = false;
242 if (const CXXMethodDecl *Method = dyn_cast<CXXMethodDecl>(Func))
243 isVirtualMethod = Method->isVirtual();
244
245 // Don't suggest that template instantiations be marked "noreturn"
246 bool isTemplateInstantiation = false;
247 if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(Func))
248 isTemplateInstantiation = Function->isTemplateInstantiation();
249
250 if (!isVirtualMethod && !isTemplateInstantiation)
251 D.diag_NeverFallThroughOrReturn =
252 diag::warn_suggest_noreturn_function;
253 else
254 D.diag_NeverFallThroughOrReturn = 0;
255
256 D.funMode = Function;
257 return D;
258 }
259
MakeForBlock__anon4f73e2630211::CheckFallThroughDiagnostics260 static CheckFallThroughDiagnostics MakeForBlock() {
261 CheckFallThroughDiagnostics D;
262 D.diag_MaybeFallThrough_HasNoReturn =
263 diag::err_noreturn_block_has_return_expr;
264 D.diag_MaybeFallThrough_ReturnsNonVoid =
265 diag::err_maybe_falloff_nonvoid_block;
266 D.diag_AlwaysFallThrough_HasNoReturn =
267 diag::err_noreturn_block_has_return_expr;
268 D.diag_AlwaysFallThrough_ReturnsNonVoid =
269 diag::err_falloff_nonvoid_block;
270 D.diag_NeverFallThroughOrReturn =
271 diag::warn_suggest_noreturn_block;
272 D.funMode = Block;
273 return D;
274 }
275
MakeForLambda__anon4f73e2630211::CheckFallThroughDiagnostics276 static CheckFallThroughDiagnostics MakeForLambda() {
277 CheckFallThroughDiagnostics D;
278 D.diag_MaybeFallThrough_HasNoReturn =
279 diag::err_noreturn_lambda_has_return_expr;
280 D.diag_MaybeFallThrough_ReturnsNonVoid =
281 diag::warn_maybe_falloff_nonvoid_lambda;
282 D.diag_AlwaysFallThrough_HasNoReturn =
283 diag::err_noreturn_lambda_has_return_expr;
284 D.diag_AlwaysFallThrough_ReturnsNonVoid =
285 diag::warn_falloff_nonvoid_lambda;
286 D.diag_NeverFallThroughOrReturn = 0;
287 D.funMode = Lambda;
288 return D;
289 }
290
checkDiagnostics__anon4f73e2630211::CheckFallThroughDiagnostics291 bool checkDiagnostics(DiagnosticsEngine &D, bool ReturnsVoid,
292 bool HasNoReturn) const {
293 if (funMode == Function) {
294 return (ReturnsVoid ||
295 D.getDiagnosticLevel(diag::warn_maybe_falloff_nonvoid_function,
296 FuncLoc) == DiagnosticsEngine::Ignored)
297 && (!HasNoReturn ||
298 D.getDiagnosticLevel(diag::warn_noreturn_function_has_return_expr,
299 FuncLoc) == DiagnosticsEngine::Ignored)
300 && (!ReturnsVoid ||
301 D.getDiagnosticLevel(diag::warn_suggest_noreturn_block, FuncLoc)
302 == DiagnosticsEngine::Ignored);
303 }
304
305 // For blocks / lambdas.
306 return ReturnsVoid && !HasNoReturn
307 && ((funMode == Lambda) ||
308 D.getDiagnosticLevel(diag::warn_suggest_noreturn_block, FuncLoc)
309 == DiagnosticsEngine::Ignored);
310 }
311 };
312
313 }
314
315 /// CheckFallThroughForFunctionDef - Check that we don't fall off the end of a
316 /// function that should return a value. Check that we don't fall off the end
317 /// of a noreturn function. We assume that functions and blocks not marked
318 /// noreturn will return.
CheckFallThroughForBody(Sema & S,const Decl * D,const Stmt * Body,const BlockExpr * blkExpr,const CheckFallThroughDiagnostics & CD,AnalysisDeclContext & AC)319 static void CheckFallThroughForBody(Sema &S, const Decl *D, const Stmt *Body,
320 const BlockExpr *blkExpr,
321 const CheckFallThroughDiagnostics& CD,
322 AnalysisDeclContext &AC) {
323
324 bool ReturnsVoid = false;
325 bool HasNoReturn = false;
326
327 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
328 ReturnsVoid = FD->getResultType()->isVoidType();
329 HasNoReturn = FD->hasAttr<NoReturnAttr>() ||
330 FD->getType()->getAs<FunctionType>()->getNoReturnAttr();
331 }
332 else if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) {
333 ReturnsVoid = MD->getResultType()->isVoidType();
334 HasNoReturn = MD->hasAttr<NoReturnAttr>();
335 }
336 else if (isa<BlockDecl>(D)) {
337 QualType BlockTy = blkExpr->getType();
338 if (const FunctionType *FT =
339 BlockTy->getPointeeType()->getAs<FunctionType>()) {
340 if (FT->getResultType()->isVoidType())
341 ReturnsVoid = true;
342 if (FT->getNoReturnAttr())
343 HasNoReturn = true;
344 }
345 }
346
347 DiagnosticsEngine &Diags = S.getDiagnostics();
348
349 // Short circuit for compilation speed.
350 if (CD.checkDiagnostics(Diags, ReturnsVoid, HasNoReturn))
351 return;
352
353 // FIXME: Function try block
354 if (const CompoundStmt *Compound = dyn_cast<CompoundStmt>(Body)) {
355 switch (CheckFallThrough(AC)) {
356 case UnknownFallThrough:
357 break;
358
359 case MaybeFallThrough:
360 if (HasNoReturn)
361 S.Diag(Compound->getRBracLoc(),
362 CD.diag_MaybeFallThrough_HasNoReturn);
363 else if (!ReturnsVoid)
364 S.Diag(Compound->getRBracLoc(),
365 CD.diag_MaybeFallThrough_ReturnsNonVoid);
366 break;
367 case AlwaysFallThrough:
368 if (HasNoReturn)
369 S.Diag(Compound->getRBracLoc(),
370 CD.diag_AlwaysFallThrough_HasNoReturn);
371 else if (!ReturnsVoid)
372 S.Diag(Compound->getRBracLoc(),
373 CD.diag_AlwaysFallThrough_ReturnsNonVoid);
374 break;
375 case NeverFallThroughOrReturn:
376 if (ReturnsVoid && !HasNoReturn && CD.diag_NeverFallThroughOrReturn) {
377 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
378 S.Diag(Compound->getLBracLoc(), CD.diag_NeverFallThroughOrReturn)
379 << 0 << FD;
380 } else if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) {
381 S.Diag(Compound->getLBracLoc(), CD.diag_NeverFallThroughOrReturn)
382 << 1 << MD;
383 } else {
384 S.Diag(Compound->getLBracLoc(), CD.diag_NeverFallThroughOrReturn);
385 }
386 }
387 break;
388 case NeverFallThrough:
389 break;
390 }
391 }
392 }
393
394 //===----------------------------------------------------------------------===//
395 // -Wuninitialized
396 //===----------------------------------------------------------------------===//
397
398 namespace {
399 /// ContainsReference - A visitor class to search for references to
400 /// a particular declaration (the needle) within any evaluated component of an
401 /// expression (recursively).
402 class ContainsReference : public EvaluatedExprVisitor<ContainsReference> {
403 bool FoundReference;
404 const DeclRefExpr *Needle;
405
406 public:
ContainsReference(ASTContext & Context,const DeclRefExpr * Needle)407 ContainsReference(ASTContext &Context, const DeclRefExpr *Needle)
408 : EvaluatedExprVisitor<ContainsReference>(Context),
409 FoundReference(false), Needle(Needle) {}
410
VisitExpr(Expr * E)411 void VisitExpr(Expr *E) {
412 // Stop evaluating if we already have a reference.
413 if (FoundReference)
414 return;
415
416 EvaluatedExprVisitor<ContainsReference>::VisitExpr(E);
417 }
418
VisitDeclRefExpr(DeclRefExpr * E)419 void VisitDeclRefExpr(DeclRefExpr *E) {
420 if (E == Needle)
421 FoundReference = true;
422 else
423 EvaluatedExprVisitor<ContainsReference>::VisitDeclRefExpr(E);
424 }
425
doesContainReference() const426 bool doesContainReference() const { return FoundReference; }
427 };
428 }
429
SuggestInitializationFixit(Sema & S,const VarDecl * VD)430 static bool SuggestInitializationFixit(Sema &S, const VarDecl *VD) {
431 QualType VariableTy = VD->getType().getCanonicalType();
432 if (VariableTy->isBlockPointerType() &&
433 !VD->hasAttr<BlocksAttr>()) {
434 S.Diag(VD->getLocation(), diag::note_block_var_fixit_add_initialization) << VD->getDeclName()
435 << FixItHint::CreateInsertion(VD->getLocation(), "__block ");
436 return true;
437 }
438
439 // Don't issue a fixit if there is already an initializer.
440 if (VD->getInit())
441 return false;
442
443 // Suggest possible initialization (if any).
444 std::string Init = S.getFixItZeroInitializerForType(VariableTy);
445 if (Init.empty())
446 return false;
447
448 // Don't suggest a fixit inside macros.
449 if (VD->getLocEnd().isMacroID())
450 return false;
451
452 SourceLocation Loc = S.PP.getLocForEndOfToken(VD->getLocEnd());
453
454 S.Diag(Loc, diag::note_var_fixit_add_initialization) << VD->getDeclName()
455 << FixItHint::CreateInsertion(Loc, Init);
456 return true;
457 }
458
459 /// Create a fixit to remove an if-like statement, on the assumption that its
460 /// condition is CondVal.
CreateIfFixit(Sema & S,const Stmt * If,const Stmt * Then,const Stmt * Else,bool CondVal,FixItHint & Fixit1,FixItHint & Fixit2)461 static void CreateIfFixit(Sema &S, const Stmt *If, const Stmt *Then,
462 const Stmt *Else, bool CondVal,
463 FixItHint &Fixit1, FixItHint &Fixit2) {
464 if (CondVal) {
465 // If condition is always true, remove all but the 'then'.
466 Fixit1 = FixItHint::CreateRemoval(
467 CharSourceRange::getCharRange(If->getLocStart(),
468 Then->getLocStart()));
469 if (Else) {
470 SourceLocation ElseKwLoc = Lexer::getLocForEndOfToken(
471 Then->getLocEnd(), 0, S.getSourceManager(), S.getLangOpts());
472 Fixit2 = FixItHint::CreateRemoval(
473 SourceRange(ElseKwLoc, Else->getLocEnd()));
474 }
475 } else {
476 // If condition is always false, remove all but the 'else'.
477 if (Else)
478 Fixit1 = FixItHint::CreateRemoval(
479 CharSourceRange::getCharRange(If->getLocStart(),
480 Else->getLocStart()));
481 else
482 Fixit1 = FixItHint::CreateRemoval(If->getSourceRange());
483 }
484 }
485
486 /// DiagUninitUse -- Helper function to produce a diagnostic for an
487 /// uninitialized use of a variable.
DiagUninitUse(Sema & S,const VarDecl * VD,const UninitUse & Use,bool IsCapturedByBlock)488 static void DiagUninitUse(Sema &S, const VarDecl *VD, const UninitUse &Use,
489 bool IsCapturedByBlock) {
490 bool Diagnosed = false;
491
492 // Diagnose each branch which leads to a sometimes-uninitialized use.
493 for (UninitUse::branch_iterator I = Use.branch_begin(), E = Use.branch_end();
494 I != E; ++I) {
495 assert(Use.getKind() == UninitUse::Sometimes);
496
497 const Expr *User = Use.getUser();
498 const Stmt *Term = I->Terminator;
499
500 // Information used when building the diagnostic.
501 unsigned DiagKind;
502 const char *Str;
503 SourceRange Range;
504
505 // FixIts to suppress the diagnosic by removing the dead condition.
506 // For all binary terminators, branch 0 is taken if the condition is true,
507 // and branch 1 is taken if the condition is false.
508 int RemoveDiagKind = -1;
509 const char *FixitStr =
510 S.getLangOpts().CPlusPlus ? (I->Output ? "true" : "false")
511 : (I->Output ? "1" : "0");
512 FixItHint Fixit1, Fixit2;
513
514 switch (Term->getStmtClass()) {
515 default:
516 // Don't know how to report this. Just fall back to 'may be used
517 // uninitialized'. This happens for range-based for, which the user
518 // can't explicitly fix.
519 // FIXME: This also happens if the first use of a variable is always
520 // uninitialized, eg "for (int n; n < 10; ++n)". We should report that
521 // with the 'is uninitialized' diagnostic.
522 continue;
523
524 // "condition is true / condition is false".
525 case Stmt::IfStmtClass: {
526 const IfStmt *IS = cast<IfStmt>(Term);
527 DiagKind = 0;
528 Str = "if";
529 Range = IS->getCond()->getSourceRange();
530 RemoveDiagKind = 0;
531 CreateIfFixit(S, IS, IS->getThen(), IS->getElse(),
532 I->Output, Fixit1, Fixit2);
533 break;
534 }
535 case Stmt::ConditionalOperatorClass: {
536 const ConditionalOperator *CO = cast<ConditionalOperator>(Term);
537 DiagKind = 0;
538 Str = "?:";
539 Range = CO->getCond()->getSourceRange();
540 RemoveDiagKind = 0;
541 CreateIfFixit(S, CO, CO->getTrueExpr(), CO->getFalseExpr(),
542 I->Output, Fixit1, Fixit2);
543 break;
544 }
545 case Stmt::BinaryOperatorClass: {
546 const BinaryOperator *BO = cast<BinaryOperator>(Term);
547 if (!BO->isLogicalOp())
548 continue;
549 DiagKind = 0;
550 Str = BO->getOpcodeStr();
551 Range = BO->getLHS()->getSourceRange();
552 RemoveDiagKind = 0;
553 if ((BO->getOpcode() == BO_LAnd && I->Output) ||
554 (BO->getOpcode() == BO_LOr && !I->Output))
555 // true && y -> y, false || y -> y.
556 Fixit1 = FixItHint::CreateRemoval(SourceRange(BO->getLocStart(),
557 BO->getOperatorLoc()));
558 else
559 // false && y -> false, true || y -> true.
560 Fixit1 = FixItHint::CreateReplacement(BO->getSourceRange(), FixitStr);
561 break;
562 }
563
564 // "loop is entered / loop is exited".
565 case Stmt::WhileStmtClass:
566 DiagKind = 1;
567 Str = "while";
568 Range = cast<WhileStmt>(Term)->getCond()->getSourceRange();
569 RemoveDiagKind = 1;
570 Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
571 break;
572 case Stmt::ForStmtClass:
573 DiagKind = 1;
574 Str = "for";
575 Range = cast<ForStmt>(Term)->getCond()->getSourceRange();
576 RemoveDiagKind = 1;
577 if (I->Output)
578 Fixit1 = FixItHint::CreateRemoval(Range);
579 else
580 Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
581 break;
582
583 // "condition is true / loop is exited".
584 case Stmt::DoStmtClass:
585 DiagKind = 2;
586 Str = "do";
587 Range = cast<DoStmt>(Term)->getCond()->getSourceRange();
588 RemoveDiagKind = 1;
589 Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
590 break;
591
592 // "switch case is taken".
593 case Stmt::CaseStmtClass:
594 DiagKind = 3;
595 Str = "case";
596 Range = cast<CaseStmt>(Term)->getLHS()->getSourceRange();
597 break;
598 case Stmt::DefaultStmtClass:
599 DiagKind = 3;
600 Str = "default";
601 Range = cast<DefaultStmt>(Term)->getDefaultLoc();
602 break;
603 }
604
605 S.Diag(Range.getBegin(), diag::warn_sometimes_uninit_var)
606 << VD->getDeclName() << IsCapturedByBlock << DiagKind
607 << Str << I->Output << Range;
608 S.Diag(User->getLocStart(), diag::note_uninit_var_use)
609 << IsCapturedByBlock << User->getSourceRange();
610 if (RemoveDiagKind != -1)
611 S.Diag(Fixit1.RemoveRange.getBegin(), diag::note_uninit_fixit_remove_cond)
612 << RemoveDiagKind << Str << I->Output << Fixit1 << Fixit2;
613
614 Diagnosed = true;
615 }
616
617 if (!Diagnosed)
618 S.Diag(Use.getUser()->getLocStart(),
619 Use.getKind() == UninitUse::Always ? diag::warn_uninit_var
620 : diag::warn_maybe_uninit_var)
621 << VD->getDeclName() << IsCapturedByBlock
622 << Use.getUser()->getSourceRange();
623 }
624
625 /// DiagnoseUninitializedUse -- Helper function for diagnosing uses of an
626 /// uninitialized variable. This manages the different forms of diagnostic
627 /// emitted for particular types of uses. Returns true if the use was diagnosed
628 /// as a warning. If a particular use is one we omit warnings for, returns
629 /// false.
DiagnoseUninitializedUse(Sema & S,const VarDecl * VD,const UninitUse & Use,bool alwaysReportSelfInit=false)630 static bool DiagnoseUninitializedUse(Sema &S, const VarDecl *VD,
631 const UninitUse &Use,
632 bool alwaysReportSelfInit = false) {
633
634 if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(Use.getUser())) {
635 // Inspect the initializer of the variable declaration which is
636 // being referenced prior to its initialization. We emit
637 // specialized diagnostics for self-initialization, and we
638 // specifically avoid warning about self references which take the
639 // form of:
640 //
641 // int x = x;
642 //
643 // This is used to indicate to GCC that 'x' is intentionally left
644 // uninitialized. Proven code paths which access 'x' in
645 // an uninitialized state after this will still warn.
646 if (const Expr *Initializer = VD->getInit()) {
647 if (!alwaysReportSelfInit && DRE == Initializer->IgnoreParenImpCasts())
648 return false;
649
650 ContainsReference CR(S.Context, DRE);
651 CR.Visit(const_cast<Expr*>(Initializer));
652 if (CR.doesContainReference()) {
653 S.Diag(DRE->getLocStart(),
654 diag::warn_uninit_self_reference_in_init)
655 << VD->getDeclName() << VD->getLocation() << DRE->getSourceRange();
656 return true;
657 }
658 }
659
660 DiagUninitUse(S, VD, Use, false);
661 } else {
662 const BlockExpr *BE = cast<BlockExpr>(Use.getUser());
663 if (VD->getType()->isBlockPointerType() && !VD->hasAttr<BlocksAttr>())
664 S.Diag(BE->getLocStart(),
665 diag::warn_uninit_byref_blockvar_captured_by_block)
666 << VD->getDeclName();
667 else
668 DiagUninitUse(S, VD, Use, true);
669 }
670
671 // Report where the variable was declared when the use wasn't within
672 // the initializer of that declaration & we didn't already suggest
673 // an initialization fixit.
674 if (!SuggestInitializationFixit(S, VD))
675 S.Diag(VD->getLocStart(), diag::note_uninit_var_def)
676 << VD->getDeclName();
677
678 return true;
679 }
680
681 namespace {
682 class FallthroughMapper : public RecursiveASTVisitor<FallthroughMapper> {
683 public:
FallthroughMapper(Sema & S)684 FallthroughMapper(Sema &S)
685 : FoundSwitchStatements(false),
686 S(S) {
687 }
688
foundSwitchStatements() const689 bool foundSwitchStatements() const { return FoundSwitchStatements; }
690
markFallthroughVisited(const AttributedStmt * Stmt)691 void markFallthroughVisited(const AttributedStmt *Stmt) {
692 bool Found = FallthroughStmts.erase(Stmt);
693 assert(Found);
694 (void)Found;
695 }
696
697 typedef llvm::SmallPtrSet<const AttributedStmt*, 8> AttrStmts;
698
getFallthroughStmts() const699 const AttrStmts &getFallthroughStmts() const {
700 return FallthroughStmts;
701 }
702
checkFallThroughIntoBlock(const CFGBlock & B,int & AnnotatedCnt)703 bool checkFallThroughIntoBlock(const CFGBlock &B, int &AnnotatedCnt) {
704 int UnannotatedCnt = 0;
705 AnnotatedCnt = 0;
706
707 std::deque<const CFGBlock*> BlockQueue;
708
709 std::copy(B.pred_begin(), B.pred_end(), std::back_inserter(BlockQueue));
710
711 while (!BlockQueue.empty()) {
712 const CFGBlock *P = BlockQueue.front();
713 BlockQueue.pop_front();
714
715 const Stmt *Term = P->getTerminator();
716 if (Term && isa<SwitchStmt>(Term))
717 continue; // Switch statement, good.
718
719 const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(P->getLabel());
720 if (SW && SW->getSubStmt() == B.getLabel() && P->begin() == P->end())
721 continue; // Previous case label has no statements, good.
722
723 if (P->pred_begin() == P->pred_end()) { // The block is unreachable.
724 // This only catches trivially unreachable blocks.
725 for (CFGBlock::const_iterator ElIt = P->begin(), ElEnd = P->end();
726 ElIt != ElEnd; ++ElIt) {
727 if (const CFGStmt *CS = ElIt->getAs<CFGStmt>()){
728 if (const AttributedStmt *AS = asFallThroughAttr(CS->getStmt())) {
729 S.Diag(AS->getLocStart(),
730 diag::warn_fallthrough_attr_unreachable);
731 markFallthroughVisited(AS);
732 ++AnnotatedCnt;
733 }
734 // Don't care about other unreachable statements.
735 }
736 }
737 // If there are no unreachable statements, this may be a special
738 // case in CFG:
739 // case X: {
740 // A a; // A has a destructor.
741 // break;
742 // }
743 // // <<<< This place is represented by a 'hanging' CFG block.
744 // case Y:
745 continue;
746 }
747
748 const Stmt *LastStmt = getLastStmt(*P);
749 if (const AttributedStmt *AS = asFallThroughAttr(LastStmt)) {
750 markFallthroughVisited(AS);
751 ++AnnotatedCnt;
752 continue; // Fallthrough annotation, good.
753 }
754
755 if (!LastStmt) { // This block contains no executable statements.
756 // Traverse its predecessors.
757 std::copy(P->pred_begin(), P->pred_end(),
758 std::back_inserter(BlockQueue));
759 continue;
760 }
761
762 ++UnannotatedCnt;
763 }
764 return !!UnannotatedCnt;
765 }
766
767 // RecursiveASTVisitor setup.
shouldWalkTypesOfTypeLocs() const768 bool shouldWalkTypesOfTypeLocs() const { return false; }
769
VisitAttributedStmt(AttributedStmt * S)770 bool VisitAttributedStmt(AttributedStmt *S) {
771 if (asFallThroughAttr(S))
772 FallthroughStmts.insert(S);
773 return true;
774 }
775
VisitSwitchStmt(SwitchStmt * S)776 bool VisitSwitchStmt(SwitchStmt *S) {
777 FoundSwitchStatements = true;
778 return true;
779 }
780
781 private:
782
asFallThroughAttr(const Stmt * S)783 static const AttributedStmt *asFallThroughAttr(const Stmt *S) {
784 if (const AttributedStmt *AS = dyn_cast_or_null<AttributedStmt>(S)) {
785 if (hasSpecificAttr<FallThroughAttr>(AS->getAttrs()))
786 return AS;
787 }
788 return 0;
789 }
790
getLastStmt(const CFGBlock & B)791 static const Stmt *getLastStmt(const CFGBlock &B) {
792 if (const Stmt *Term = B.getTerminator())
793 return Term;
794 for (CFGBlock::const_reverse_iterator ElemIt = B.rbegin(),
795 ElemEnd = B.rend();
796 ElemIt != ElemEnd; ++ElemIt) {
797 if (const CFGStmt *CS = ElemIt->getAs<CFGStmt>())
798 return CS->getStmt();
799 }
800 // Workaround to detect a statement thrown out by CFGBuilder:
801 // case X: {} case Y:
802 // case X: ; case Y:
803 if (const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(B.getLabel()))
804 if (!isa<SwitchCase>(SW->getSubStmt()))
805 return SW->getSubStmt();
806
807 return 0;
808 }
809
810 bool FoundSwitchStatements;
811 AttrStmts FallthroughStmts;
812 Sema &S;
813 };
814 }
815
DiagnoseSwitchLabelsFallthrough(Sema & S,AnalysisDeclContext & AC,bool PerFunction)816 static void DiagnoseSwitchLabelsFallthrough(Sema &S, AnalysisDeclContext &AC,
817 bool PerFunction) {
818 FallthroughMapper FM(S);
819 FM.TraverseStmt(AC.getBody());
820
821 if (!FM.foundSwitchStatements())
822 return;
823
824 if (PerFunction && FM.getFallthroughStmts().empty())
825 return;
826
827 CFG *Cfg = AC.getCFG();
828
829 if (!Cfg)
830 return;
831
832 int AnnotatedCnt;
833
834 for (CFG::reverse_iterator I = Cfg->rbegin(), E = Cfg->rend(); I != E; ++I) {
835 const CFGBlock &B = **I;
836 const Stmt *Label = B.getLabel();
837
838 if (!Label || !isa<SwitchCase>(Label))
839 continue;
840
841 if (!FM.checkFallThroughIntoBlock(B, AnnotatedCnt))
842 continue;
843
844 S.Diag(Label->getLocStart(),
845 PerFunction ? diag::warn_unannotated_fallthrough_per_function
846 : diag::warn_unannotated_fallthrough);
847
848 if (!AnnotatedCnt) {
849 SourceLocation L = Label->getLocStart();
850 if (L.isMacroID())
851 continue;
852 if (S.getLangOpts().CPlusPlus0x) {
853 const Stmt *Term = B.getTerminator();
854 if (!(B.empty() && Term && isa<BreakStmt>(Term))) {
855 S.Diag(L, diag::note_insert_fallthrough_fixit) <<
856 FixItHint::CreateInsertion(L, "[[clang::fallthrough]]; ");
857 }
858 }
859 S.Diag(L, diag::note_insert_break_fixit) <<
860 FixItHint::CreateInsertion(L, "break; ");
861 }
862 }
863
864 const FallthroughMapper::AttrStmts &Fallthroughs = FM.getFallthroughStmts();
865 for (FallthroughMapper::AttrStmts::const_iterator I = Fallthroughs.begin(),
866 E = Fallthroughs.end();
867 I != E; ++I) {
868 S.Diag((*I)->getLocStart(), diag::warn_fallthrough_attr_invalid_placement);
869 }
870
871 }
872
873 namespace {
874 struct SLocSort {
operator ()__anon4f73e2630611::SLocSort875 bool operator()(const UninitUse &a, const UninitUse &b) {
876 // Prefer a more confident report over a less confident one.
877 if (a.getKind() != b.getKind())
878 return a.getKind() > b.getKind();
879 SourceLocation aLoc = a.getUser()->getLocStart();
880 SourceLocation bLoc = b.getUser()->getLocStart();
881 return aLoc.getRawEncoding() < bLoc.getRawEncoding();
882 }
883 };
884
885 class UninitValsDiagReporter : public UninitVariablesHandler {
886 Sema &S;
887 typedef SmallVector<UninitUse, 2> UsesVec;
888 typedef llvm::DenseMap<const VarDecl *, std::pair<UsesVec*, bool> > UsesMap;
889 UsesMap *uses;
890
891 public:
UninitValsDiagReporter(Sema & S)892 UninitValsDiagReporter(Sema &S) : S(S), uses(0) {}
~UninitValsDiagReporter()893 ~UninitValsDiagReporter() {
894 flushDiagnostics();
895 }
896
getUses(const VarDecl * vd)897 std::pair<UsesVec*, bool> &getUses(const VarDecl *vd) {
898 if (!uses)
899 uses = new UsesMap();
900
901 UsesMap::mapped_type &V = (*uses)[vd];
902 UsesVec *&vec = V.first;
903 if (!vec)
904 vec = new UsesVec();
905
906 return V;
907 }
908
handleUseOfUninitVariable(const VarDecl * vd,const UninitUse & use)909 void handleUseOfUninitVariable(const VarDecl *vd, const UninitUse &use) {
910 getUses(vd).first->push_back(use);
911 }
912
handleSelfInit(const VarDecl * vd)913 void handleSelfInit(const VarDecl *vd) {
914 getUses(vd).second = true;
915 }
916
flushDiagnostics()917 void flushDiagnostics() {
918 if (!uses)
919 return;
920
921 // FIXME: This iteration order, and thus the resulting diagnostic order,
922 // is nondeterministic.
923 for (UsesMap::iterator i = uses->begin(), e = uses->end(); i != e; ++i) {
924 const VarDecl *vd = i->first;
925 const UsesMap::mapped_type &V = i->second;
926
927 UsesVec *vec = V.first;
928 bool hasSelfInit = V.second;
929
930 // Specially handle the case where we have uses of an uninitialized
931 // variable, but the root cause is an idiomatic self-init. We want
932 // to report the diagnostic at the self-init since that is the root cause.
933 if (!vec->empty() && hasSelfInit && hasAlwaysUninitializedUse(vec))
934 DiagnoseUninitializedUse(S, vd,
935 UninitUse(vd->getInit()->IgnoreParenCasts(),
936 /* isAlwaysUninit */ true),
937 /* alwaysReportSelfInit */ true);
938 else {
939 // Sort the uses by their SourceLocations. While not strictly
940 // guaranteed to produce them in line/column order, this will provide
941 // a stable ordering.
942 std::sort(vec->begin(), vec->end(), SLocSort());
943
944 for (UsesVec::iterator vi = vec->begin(), ve = vec->end(); vi != ve;
945 ++vi) {
946 // If we have self-init, downgrade all uses to 'may be uninitialized'.
947 UninitUse Use = hasSelfInit ? UninitUse(vi->getUser(), false) : *vi;
948
949 if (DiagnoseUninitializedUse(S, vd, Use))
950 // Skip further diagnostics for this variable. We try to warn only
951 // on the first point at which a variable is used uninitialized.
952 break;
953 }
954 }
955
956 // Release the uses vector.
957 delete vec;
958 }
959 delete uses;
960 }
961
962 private:
hasAlwaysUninitializedUse(const UsesVec * vec)963 static bool hasAlwaysUninitializedUse(const UsesVec* vec) {
964 for (UsesVec::const_iterator i = vec->begin(), e = vec->end(); i != e; ++i) {
965 if (i->getKind() == UninitUse::Always) {
966 return true;
967 }
968 }
969 return false;
970 }
971 };
972 }
973
974
975 //===----------------------------------------------------------------------===//
976 // -Wthread-safety
977 //===----------------------------------------------------------------------===//
978 namespace clang {
979 namespace thread_safety {
980 typedef llvm::SmallVector<PartialDiagnosticAt, 1> OptionalNotes;
981 typedef std::pair<PartialDiagnosticAt, OptionalNotes> DelayedDiag;
982 typedef std::list<DelayedDiag> DiagList;
983
984 struct SortDiagBySourceLocation {
985 SourceManager &SM;
SortDiagBySourceLocationclang::thread_safety::SortDiagBySourceLocation986 SortDiagBySourceLocation(SourceManager &SM) : SM(SM) {}
987
operator ()clang::thread_safety::SortDiagBySourceLocation988 bool operator()(const DelayedDiag &left, const DelayedDiag &right) {
989 // Although this call will be slow, this is only called when outputting
990 // multiple warnings.
991 return SM.isBeforeInTranslationUnit(left.first.first, right.first.first);
992 }
993 };
994
995 namespace {
996 class ThreadSafetyReporter : public clang::thread_safety::ThreadSafetyHandler {
997 Sema &S;
998 DiagList Warnings;
999 SourceLocation FunLocation, FunEndLocation;
1000
1001 // Helper functions
warnLockMismatch(unsigned DiagID,Name LockName,SourceLocation Loc)1002 void warnLockMismatch(unsigned DiagID, Name LockName, SourceLocation Loc) {
1003 // Gracefully handle rare cases when the analysis can't get a more
1004 // precise source location.
1005 if (!Loc.isValid())
1006 Loc = FunLocation;
1007 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << LockName);
1008 Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
1009 }
1010
1011 public:
ThreadSafetyReporter(Sema & S,SourceLocation FL,SourceLocation FEL)1012 ThreadSafetyReporter(Sema &S, SourceLocation FL, SourceLocation FEL)
1013 : S(S), FunLocation(FL), FunEndLocation(FEL) {}
1014
1015 /// \brief Emit all buffered diagnostics in order of sourcelocation.
1016 /// We need to output diagnostics produced while iterating through
1017 /// the lockset in deterministic order, so this function orders diagnostics
1018 /// and outputs them.
emitDiagnostics()1019 void emitDiagnostics() {
1020 Warnings.sort(SortDiagBySourceLocation(S.getSourceManager()));
1021 for (DiagList::iterator I = Warnings.begin(), E = Warnings.end();
1022 I != E; ++I) {
1023 S.Diag(I->first.first, I->first.second);
1024 const OptionalNotes &Notes = I->second;
1025 for (unsigned NoteI = 0, NoteN = Notes.size(); NoteI != NoteN; ++NoteI)
1026 S.Diag(Notes[NoteI].first, Notes[NoteI].second);
1027 }
1028 }
1029
handleInvalidLockExp(SourceLocation Loc)1030 void handleInvalidLockExp(SourceLocation Loc) {
1031 PartialDiagnosticAt Warning(Loc,
1032 S.PDiag(diag::warn_cannot_resolve_lock) << Loc);
1033 Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
1034 }
handleUnmatchedUnlock(Name LockName,SourceLocation Loc)1035 void handleUnmatchedUnlock(Name LockName, SourceLocation Loc) {
1036 warnLockMismatch(diag::warn_unlock_but_no_lock, LockName, Loc);
1037 }
1038
handleDoubleLock(Name LockName,SourceLocation Loc)1039 void handleDoubleLock(Name LockName, SourceLocation Loc) {
1040 warnLockMismatch(diag::warn_double_lock, LockName, Loc);
1041 }
1042
handleMutexHeldEndOfScope(Name LockName,SourceLocation LocLocked,SourceLocation LocEndOfScope,LockErrorKind LEK)1043 void handleMutexHeldEndOfScope(Name LockName, SourceLocation LocLocked,
1044 SourceLocation LocEndOfScope,
1045 LockErrorKind LEK){
1046 unsigned DiagID = 0;
1047 switch (LEK) {
1048 case LEK_LockedSomePredecessors:
1049 DiagID = diag::warn_lock_some_predecessors;
1050 break;
1051 case LEK_LockedSomeLoopIterations:
1052 DiagID = diag::warn_expecting_lock_held_on_loop;
1053 break;
1054 case LEK_LockedAtEndOfFunction:
1055 DiagID = diag::warn_no_unlock;
1056 break;
1057 case LEK_NotLockedAtEndOfFunction:
1058 DiagID = diag::warn_expecting_locked;
1059 break;
1060 }
1061 if (LocEndOfScope.isInvalid())
1062 LocEndOfScope = FunEndLocation;
1063
1064 PartialDiagnosticAt Warning(LocEndOfScope, S.PDiag(DiagID) << LockName);
1065 PartialDiagnosticAt Note(LocLocked, S.PDiag(diag::note_locked_here));
1066 Warnings.push_back(DelayedDiag(Warning, OptionalNotes(1, Note)));
1067 }
1068
1069
handleExclusiveAndShared(Name LockName,SourceLocation Loc1,SourceLocation Loc2)1070 void handleExclusiveAndShared(Name LockName, SourceLocation Loc1,
1071 SourceLocation Loc2) {
1072 PartialDiagnosticAt Warning(
1073 Loc1, S.PDiag(diag::warn_lock_exclusive_and_shared) << LockName);
1074 PartialDiagnosticAt Note(
1075 Loc2, S.PDiag(diag::note_lock_exclusive_and_shared) << LockName);
1076 Warnings.push_back(DelayedDiag(Warning, OptionalNotes(1, Note)));
1077 }
1078
handleNoMutexHeld(const NamedDecl * D,ProtectedOperationKind POK,AccessKind AK,SourceLocation Loc)1079 void handleNoMutexHeld(const NamedDecl *D, ProtectedOperationKind POK,
1080 AccessKind AK, SourceLocation Loc) {
1081 assert((POK == POK_VarAccess || POK == POK_VarDereference)
1082 && "Only works for variables");
1083 unsigned DiagID = POK == POK_VarAccess?
1084 diag::warn_variable_requires_any_lock:
1085 diag::warn_var_deref_requires_any_lock;
1086 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID)
1087 << D->getName() << getLockKindFromAccessKind(AK));
1088 Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
1089 }
1090
handleMutexNotHeld(const NamedDecl * D,ProtectedOperationKind POK,Name LockName,LockKind LK,SourceLocation Loc,Name * PossibleMatch)1091 void handleMutexNotHeld(const NamedDecl *D, ProtectedOperationKind POK,
1092 Name LockName, LockKind LK, SourceLocation Loc,
1093 Name *PossibleMatch) {
1094 unsigned DiagID = 0;
1095 if (PossibleMatch) {
1096 switch (POK) {
1097 case POK_VarAccess:
1098 DiagID = diag::warn_variable_requires_lock_precise;
1099 break;
1100 case POK_VarDereference:
1101 DiagID = diag::warn_var_deref_requires_lock_precise;
1102 break;
1103 case POK_FunctionCall:
1104 DiagID = diag::warn_fun_requires_lock_precise;
1105 break;
1106 }
1107 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID)
1108 << D->getName() << LockName << LK);
1109 PartialDiagnosticAt Note(Loc, S.PDiag(diag::note_found_mutex_near_match)
1110 << *PossibleMatch);
1111 Warnings.push_back(DelayedDiag(Warning, OptionalNotes(1, Note)));
1112 } else {
1113 switch (POK) {
1114 case POK_VarAccess:
1115 DiagID = diag::warn_variable_requires_lock;
1116 break;
1117 case POK_VarDereference:
1118 DiagID = diag::warn_var_deref_requires_lock;
1119 break;
1120 case POK_FunctionCall:
1121 DiagID = diag::warn_fun_requires_lock;
1122 break;
1123 }
1124 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID)
1125 << D->getName() << LockName << LK);
1126 Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
1127 }
1128 }
1129
handleFunExcludesLock(Name FunName,Name LockName,SourceLocation Loc)1130 void handleFunExcludesLock(Name FunName, Name LockName, SourceLocation Loc) {
1131 PartialDiagnosticAt Warning(Loc,
1132 S.PDiag(diag::warn_fun_excludes_mutex) << FunName << LockName);
1133 Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
1134 }
1135 };
1136 }
1137 }
1138 }
1139
1140 //===----------------------------------------------------------------------===//
1141 // AnalysisBasedWarnings - Worker object used by Sema to execute analysis-based
1142 // warnings on a function, method, or block.
1143 //===----------------------------------------------------------------------===//
1144
Policy()1145 clang::sema::AnalysisBasedWarnings::Policy::Policy() {
1146 enableCheckFallThrough = 1;
1147 enableCheckUnreachable = 0;
1148 enableThreadSafetyAnalysis = 0;
1149 }
1150
AnalysisBasedWarnings(Sema & s)1151 clang::sema::AnalysisBasedWarnings::AnalysisBasedWarnings(Sema &s)
1152 : S(s),
1153 NumFunctionsAnalyzed(0),
1154 NumFunctionsWithBadCFGs(0),
1155 NumCFGBlocks(0),
1156 MaxCFGBlocksPerFunction(0),
1157 NumUninitAnalysisFunctions(0),
1158 NumUninitAnalysisVariables(0),
1159 MaxUninitAnalysisVariablesPerFunction(0),
1160 NumUninitAnalysisBlockVisits(0),
1161 MaxUninitAnalysisBlockVisitsPerFunction(0) {
1162 DiagnosticsEngine &D = S.getDiagnostics();
1163 DefaultPolicy.enableCheckUnreachable = (unsigned)
1164 (D.getDiagnosticLevel(diag::warn_unreachable, SourceLocation()) !=
1165 DiagnosticsEngine::Ignored);
1166 DefaultPolicy.enableThreadSafetyAnalysis = (unsigned)
1167 (D.getDiagnosticLevel(diag::warn_double_lock, SourceLocation()) !=
1168 DiagnosticsEngine::Ignored);
1169
1170 }
1171
flushDiagnostics(Sema & S,sema::FunctionScopeInfo * fscope)1172 static void flushDiagnostics(Sema &S, sema::FunctionScopeInfo *fscope) {
1173 for (SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator
1174 i = fscope->PossiblyUnreachableDiags.begin(),
1175 e = fscope->PossiblyUnreachableDiags.end();
1176 i != e; ++i) {
1177 const sema::PossiblyUnreachableDiag &D = *i;
1178 S.Diag(D.Loc, D.PD);
1179 }
1180 }
1181
1182 void clang::sema::
IssueWarnings(sema::AnalysisBasedWarnings::Policy P,sema::FunctionScopeInfo * fscope,const Decl * D,const BlockExpr * blkExpr)1183 AnalysisBasedWarnings::IssueWarnings(sema::AnalysisBasedWarnings::Policy P,
1184 sema::FunctionScopeInfo *fscope,
1185 const Decl *D, const BlockExpr *blkExpr) {
1186
1187 // We avoid doing analysis-based warnings when there are errors for
1188 // two reasons:
1189 // (1) The CFGs often can't be constructed (if the body is invalid), so
1190 // don't bother trying.
1191 // (2) The code already has problems; running the analysis just takes more
1192 // time.
1193 DiagnosticsEngine &Diags = S.getDiagnostics();
1194
1195 // Do not do any analysis for declarations in system headers if we are
1196 // going to just ignore them.
1197 if (Diags.getSuppressSystemWarnings() &&
1198 S.SourceMgr.isInSystemHeader(D->getLocation()))
1199 return;
1200
1201 // For code in dependent contexts, we'll do this at instantiation time.
1202 if (cast<DeclContext>(D)->isDependentContext())
1203 return;
1204
1205 if (Diags.hasErrorOccurred() || Diags.hasFatalErrorOccurred()) {
1206 // Flush out any possibly unreachable diagnostics.
1207 flushDiagnostics(S, fscope);
1208 return;
1209 }
1210
1211 const Stmt *Body = D->getBody();
1212 assert(Body);
1213
1214 AnalysisDeclContext AC(/* AnalysisDeclContextManager */ 0, D);
1215
1216 // Don't generate EH edges for CallExprs as we'd like to avoid the n^2
1217 // explosion for destrutors that can result and the compile time hit.
1218 AC.getCFGBuildOptions().PruneTriviallyFalseEdges = true;
1219 AC.getCFGBuildOptions().AddEHEdges = false;
1220 AC.getCFGBuildOptions().AddInitializers = true;
1221 AC.getCFGBuildOptions().AddImplicitDtors = true;
1222 AC.getCFGBuildOptions().AddTemporaryDtors = true;
1223
1224 // Force that certain expressions appear as CFGElements in the CFG. This
1225 // is used to speed up various analyses.
1226 // FIXME: This isn't the right factoring. This is here for initial
1227 // prototyping, but we need a way for analyses to say what expressions they
1228 // expect to always be CFGElements and then fill in the BuildOptions
1229 // appropriately. This is essentially a layering violation.
1230 if (P.enableCheckUnreachable || P.enableThreadSafetyAnalysis) {
1231 // Unreachable code analysis and thread safety require a linearized CFG.
1232 AC.getCFGBuildOptions().setAllAlwaysAdd();
1233 }
1234 else {
1235 AC.getCFGBuildOptions()
1236 .setAlwaysAdd(Stmt::BinaryOperatorClass)
1237 .setAlwaysAdd(Stmt::CompoundAssignOperatorClass)
1238 .setAlwaysAdd(Stmt::BlockExprClass)
1239 .setAlwaysAdd(Stmt::CStyleCastExprClass)
1240 .setAlwaysAdd(Stmt::DeclRefExprClass)
1241 .setAlwaysAdd(Stmt::ImplicitCastExprClass)
1242 .setAlwaysAdd(Stmt::UnaryOperatorClass)
1243 .setAlwaysAdd(Stmt::AttributedStmtClass);
1244 }
1245
1246 // Construct the analysis context with the specified CFG build options.
1247
1248 // Emit delayed diagnostics.
1249 if (!fscope->PossiblyUnreachableDiags.empty()) {
1250 bool analyzed = false;
1251
1252 // Register the expressions with the CFGBuilder.
1253 for (SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator
1254 i = fscope->PossiblyUnreachableDiags.begin(),
1255 e = fscope->PossiblyUnreachableDiags.end();
1256 i != e; ++i) {
1257 if (const Stmt *stmt = i->stmt)
1258 AC.registerForcedBlockExpression(stmt);
1259 }
1260
1261 if (AC.getCFG()) {
1262 analyzed = true;
1263 for (SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator
1264 i = fscope->PossiblyUnreachableDiags.begin(),
1265 e = fscope->PossiblyUnreachableDiags.end();
1266 i != e; ++i)
1267 {
1268 const sema::PossiblyUnreachableDiag &D = *i;
1269 bool processed = false;
1270 if (const Stmt *stmt = i->stmt) {
1271 const CFGBlock *block = AC.getBlockForRegisteredExpression(stmt);
1272 CFGReverseBlockReachabilityAnalysis *cra =
1273 AC.getCFGReachablityAnalysis();
1274 // FIXME: We should be able to assert that block is non-null, but
1275 // the CFG analysis can skip potentially-evaluated expressions in
1276 // edge cases; see test/Sema/vla-2.c.
1277 if (block && cra) {
1278 // Can this block be reached from the entrance?
1279 if (cra->isReachable(&AC.getCFG()->getEntry(), block))
1280 S.Diag(D.Loc, D.PD);
1281 processed = true;
1282 }
1283 }
1284 if (!processed) {
1285 // Emit the warning anyway if we cannot map to a basic block.
1286 S.Diag(D.Loc, D.PD);
1287 }
1288 }
1289 }
1290
1291 if (!analyzed)
1292 flushDiagnostics(S, fscope);
1293 }
1294
1295
1296 // Warning: check missing 'return'
1297 if (P.enableCheckFallThrough) {
1298 const CheckFallThroughDiagnostics &CD =
1299 (isa<BlockDecl>(D) ? CheckFallThroughDiagnostics::MakeForBlock()
1300 : (isa<CXXMethodDecl>(D) &&
1301 cast<CXXMethodDecl>(D)->getOverloadedOperator() == OO_Call &&
1302 cast<CXXMethodDecl>(D)->getParent()->isLambda())
1303 ? CheckFallThroughDiagnostics::MakeForLambda()
1304 : CheckFallThroughDiagnostics::MakeForFunction(D));
1305 CheckFallThroughForBody(S, D, Body, blkExpr, CD, AC);
1306 }
1307
1308 // Warning: check for unreachable code
1309 if (P.enableCheckUnreachable) {
1310 // Only check for unreachable code on non-template instantiations.
1311 // Different template instantiations can effectively change the control-flow
1312 // and it is very difficult to prove that a snippet of code in a template
1313 // is unreachable for all instantiations.
1314 bool isTemplateInstantiation = false;
1315 if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(D))
1316 isTemplateInstantiation = Function->isTemplateInstantiation();
1317 if (!isTemplateInstantiation)
1318 CheckUnreachable(S, AC);
1319 }
1320
1321 // Check for thread safety violations
1322 if (P.enableThreadSafetyAnalysis) {
1323 SourceLocation FL = AC.getDecl()->getLocation();
1324 SourceLocation FEL = AC.getDecl()->getLocEnd();
1325 thread_safety::ThreadSafetyReporter Reporter(S, FL, FEL);
1326 thread_safety::runThreadSafetyAnalysis(AC, Reporter);
1327 Reporter.emitDiagnostics();
1328 }
1329
1330 if (Diags.getDiagnosticLevel(diag::warn_uninit_var, D->getLocStart())
1331 != DiagnosticsEngine::Ignored ||
1332 Diags.getDiagnosticLevel(diag::warn_sometimes_uninit_var,D->getLocStart())
1333 != DiagnosticsEngine::Ignored ||
1334 Diags.getDiagnosticLevel(diag::warn_maybe_uninit_var, D->getLocStart())
1335 != DiagnosticsEngine::Ignored) {
1336 if (CFG *cfg = AC.getCFG()) {
1337 UninitValsDiagReporter reporter(S);
1338 UninitVariablesAnalysisStats stats;
1339 std::memset(&stats, 0, sizeof(UninitVariablesAnalysisStats));
1340 runUninitializedVariablesAnalysis(*cast<DeclContext>(D), *cfg, AC,
1341 reporter, stats);
1342
1343 if (S.CollectStats && stats.NumVariablesAnalyzed > 0) {
1344 ++NumUninitAnalysisFunctions;
1345 NumUninitAnalysisVariables += stats.NumVariablesAnalyzed;
1346 NumUninitAnalysisBlockVisits += stats.NumBlockVisits;
1347 MaxUninitAnalysisVariablesPerFunction =
1348 std::max(MaxUninitAnalysisVariablesPerFunction,
1349 stats.NumVariablesAnalyzed);
1350 MaxUninitAnalysisBlockVisitsPerFunction =
1351 std::max(MaxUninitAnalysisBlockVisitsPerFunction,
1352 stats.NumBlockVisits);
1353 }
1354 }
1355 }
1356
1357 bool FallThroughDiagFull =
1358 Diags.getDiagnosticLevel(diag::warn_unannotated_fallthrough,
1359 D->getLocStart()) != DiagnosticsEngine::Ignored;
1360 bool FallThroughDiagPerFunction =
1361 Diags.getDiagnosticLevel(diag::warn_unannotated_fallthrough_per_function,
1362 D->getLocStart()) != DiagnosticsEngine::Ignored;
1363 if (FallThroughDiagFull || FallThroughDiagPerFunction) {
1364 DiagnoseSwitchLabelsFallthrough(S, AC, !FallThroughDiagFull);
1365 }
1366
1367 // Collect statistics about the CFG if it was built.
1368 if (S.CollectStats && AC.isCFGBuilt()) {
1369 ++NumFunctionsAnalyzed;
1370 if (CFG *cfg = AC.getCFG()) {
1371 // If we successfully built a CFG for this context, record some more
1372 // detail information about it.
1373 NumCFGBlocks += cfg->getNumBlockIDs();
1374 MaxCFGBlocksPerFunction = std::max(MaxCFGBlocksPerFunction,
1375 cfg->getNumBlockIDs());
1376 } else {
1377 ++NumFunctionsWithBadCFGs;
1378 }
1379 }
1380 }
1381
PrintStats() const1382 void clang::sema::AnalysisBasedWarnings::PrintStats() const {
1383 llvm::errs() << "\n*** Analysis Based Warnings Stats:\n";
1384
1385 unsigned NumCFGsBuilt = NumFunctionsAnalyzed - NumFunctionsWithBadCFGs;
1386 unsigned AvgCFGBlocksPerFunction =
1387 !NumCFGsBuilt ? 0 : NumCFGBlocks/NumCFGsBuilt;
1388 llvm::errs() << NumFunctionsAnalyzed << " functions analyzed ("
1389 << NumFunctionsWithBadCFGs << " w/o CFGs).\n"
1390 << " " << NumCFGBlocks << " CFG blocks built.\n"
1391 << " " << AvgCFGBlocksPerFunction
1392 << " average CFG blocks per function.\n"
1393 << " " << MaxCFGBlocksPerFunction
1394 << " max CFG blocks per function.\n";
1395
1396 unsigned AvgUninitVariablesPerFunction = !NumUninitAnalysisFunctions ? 0
1397 : NumUninitAnalysisVariables/NumUninitAnalysisFunctions;
1398 unsigned AvgUninitBlockVisitsPerFunction = !NumUninitAnalysisFunctions ? 0
1399 : NumUninitAnalysisBlockVisits/NumUninitAnalysisFunctions;
1400 llvm::errs() << NumUninitAnalysisFunctions
1401 << " functions analyzed for uninitialiazed variables\n"
1402 << " " << NumUninitAnalysisVariables << " variables analyzed.\n"
1403 << " " << AvgUninitVariablesPerFunction
1404 << " average variables per function.\n"
1405 << " " << MaxUninitAnalysisVariablesPerFunction
1406 << " max variables per function.\n"
1407 << " " << NumUninitAnalysisBlockVisits << " block visits.\n"
1408 << " " << AvgUninitBlockVisitsPerFunction
1409 << " average block visits per function.\n"
1410 << " " << MaxUninitAnalysisBlockVisitsPerFunction
1411 << " max block visits per function.\n";
1412 }
1413