Fix the memory leak of FloatingLiteral/IntegerLiteral.
[clang.git] / lib / Sema / JumpDiagnostics.cpp
blobb23f615af7b75aa9365d2e74210c3590434349ab
1 //===--- JumpDiagnostics.cpp - Analyze Jump Targets for VLA issues --------===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file implements the JumpScopeChecker class, which is used to diagnose
11 // jumps that enter a VLA scope in an invalid way.
13 //===----------------------------------------------------------------------===//
15 #include "clang/Sema/SemaInternal.h"
16 #include "clang/AST/DeclCXX.h"
17 #include "clang/AST/Expr.h"
18 #include "clang/AST/StmtObjC.h"
19 #include "clang/AST/StmtCXX.h"
20 #include "llvm/ADT/BitVector.h"
21 using namespace clang;
23 namespace {
25 /// JumpScopeChecker - This object is used by Sema to diagnose invalid jumps
26 /// into VLA and other protected scopes. For example, this rejects:
27 /// goto L;
28 /// int a[n];
29 /// L:
30 ///
31 class JumpScopeChecker {
32 Sema &S;
34 /// GotoScope - This is a record that we use to keep track of all of the
35 /// scopes that are introduced by VLAs and other things that scope jumps like
36 /// gotos. This scope tree has nothing to do with the source scope tree,
37 /// because you can have multiple VLA scopes per compound statement, and most
38 /// compound statements don't introduce any scopes.
39 struct GotoScope {
40 /// ParentScope - The index in ScopeMap of the parent scope. This is 0 for
41 /// the parent scope is the function body.
42 unsigned ParentScope;
44 /// InDiag - The diagnostic to emit if there is a jump into this scope.
45 unsigned InDiag;
47 /// OutDiag - The diagnostic to emit if there is an indirect jump out
48 /// of this scope. Direct jumps always clean up their current scope
49 /// in an orderly way.
50 unsigned OutDiag;
52 /// Loc - Location to emit the diagnostic.
53 SourceLocation Loc;
55 GotoScope(unsigned parentScope, unsigned InDiag, unsigned OutDiag,
56 SourceLocation L)
57 : ParentScope(parentScope), InDiag(InDiag), OutDiag(OutDiag), Loc(L) {}
60 llvm::SmallVector<GotoScope, 48> Scopes;
61 llvm::DenseMap<Stmt*, unsigned> LabelAndGotoScopes;
62 llvm::SmallVector<Stmt*, 16> Jumps;
64 llvm::SmallVector<IndirectGotoStmt*, 4> IndirectJumps;
65 llvm::SmallVector<LabelStmt*, 4> IndirectJumpTargets;
66 public:
67 JumpScopeChecker(Stmt *Body, Sema &S);
68 private:
69 void BuildScopeInformation(Decl *D, unsigned &ParentScope);
70 void BuildScopeInformation(Stmt *S, unsigned ParentScope);
71 void VerifyJumps();
72 void VerifyIndirectJumps();
73 void DiagnoseIndirectJump(IndirectGotoStmt *IG, unsigned IGScope,
74 LabelStmt *Target, unsigned TargetScope);
75 void CheckJump(Stmt *From, Stmt *To,
76 SourceLocation DiagLoc, unsigned JumpDiag);
78 unsigned GetDeepestCommonScope(unsigned A, unsigned B);
80 } // end anonymous namespace
83 JumpScopeChecker::JumpScopeChecker(Stmt *Body, Sema &s) : S(s) {
84 // Add a scope entry for function scope.
85 Scopes.push_back(GotoScope(~0U, ~0U, ~0U, SourceLocation()));
87 // Build information for the top level compound statement, so that we have a
88 // defined scope record for every "goto" and label.
89 BuildScopeInformation(Body, 0);
91 // Check that all jumps we saw are kosher.
92 VerifyJumps();
93 VerifyIndirectJumps();
96 /// GetDeepestCommonScope - Finds the innermost scope enclosing the
97 /// two scopes.
98 unsigned JumpScopeChecker::GetDeepestCommonScope(unsigned A, unsigned B) {
99 while (A != B) {
100 // Inner scopes are created after outer scopes and therefore have
101 // higher indices.
102 if (A < B) {
103 assert(Scopes[B].ParentScope < B);
104 B = Scopes[B].ParentScope;
105 } else {
106 assert(Scopes[A].ParentScope < A);
107 A = Scopes[A].ParentScope;
110 return A;
113 /// GetDiagForGotoScopeDecl - If this decl induces a new goto scope, return a
114 /// diagnostic that should be emitted if control goes over it. If not, return 0.
115 static std::pair<unsigned,unsigned>
116 GetDiagForGotoScopeDecl(const Decl *D, bool isCPlusPlus) {
117 if (const VarDecl *VD = dyn_cast<VarDecl>(D)) {
118 unsigned InDiag = 0, OutDiag = 0;
119 if (VD->getType()->isVariablyModifiedType())
120 InDiag = diag::note_protected_by_vla;
122 if (VD->hasAttr<BlocksAttr>()) {
123 InDiag = diag::note_protected_by___block;
124 OutDiag = diag::note_exits___block;
125 } else if (VD->hasAttr<CleanupAttr>()) {
126 InDiag = diag::note_protected_by_cleanup;
127 OutDiag = diag::note_exits_cleanup;
128 } else if (isCPlusPlus) {
129 // FIXME: In C++0x, we have to check more conditions than "did we
130 // just give it an initializer?". See 6.7p3.
131 if (VD->hasLocalStorage() && VD->hasInit())
132 InDiag = diag::note_protected_by_variable_init;
134 CanQualType T = VD->getType()->getCanonicalTypeUnqualified();
135 if (!T->isDependentType()) {
136 while (CanQual<ArrayType> AT = T->getAs<ArrayType>())
137 T = AT->getElementType();
138 if (CanQual<RecordType> RT = T->getAs<RecordType>())
139 if (!cast<CXXRecordDecl>(RT->getDecl())->hasTrivialDestructor())
140 OutDiag = diag::note_exits_dtor;
144 return std::make_pair(InDiag, OutDiag);
147 if (const TypedefDecl *TD = dyn_cast<TypedefDecl>(D)) {
148 if (TD->getUnderlyingType()->isVariablyModifiedType())
149 return std::make_pair((unsigned) diag::note_protected_by_vla_typedef, 0);
152 return std::make_pair(0U, 0U);
155 /// \brief Build scope information for a declaration that is part of a DeclStmt.
156 void JumpScopeChecker::BuildScopeInformation(Decl *D, unsigned &ParentScope) {
157 bool isCPlusPlus = this->S.getLangOptions().CPlusPlus;
159 // If this decl causes a new scope, push and switch to it.
160 std::pair<unsigned,unsigned> Diags
161 = GetDiagForGotoScopeDecl(D, isCPlusPlus);
162 if (Diags.first || Diags.second) {
163 Scopes.push_back(GotoScope(ParentScope, Diags.first, Diags.second,
164 D->getLocation()));
165 ParentScope = Scopes.size()-1;
168 // If the decl has an initializer, walk it with the potentially new
169 // scope we just installed.
170 if (VarDecl *VD = dyn_cast<VarDecl>(D))
171 if (Expr *Init = VD->getInit())
172 BuildScopeInformation(Init, ParentScope);
175 /// BuildScopeInformation - The statements from CI to CE are known to form a
176 /// coherent VLA scope with a specified parent node. Walk through the
177 /// statements, adding any labels or gotos to LabelAndGotoScopes and recursively
178 /// walking the AST as needed.
179 void JumpScopeChecker::BuildScopeInformation(Stmt *S, unsigned ParentScope) {
180 bool SkipFirstSubStmt = false;
182 // If we found a label, remember that it is in ParentScope scope.
183 switch (S->getStmtClass()) {
184 case Stmt::AddrLabelExprClass:
185 IndirectJumpTargets.push_back(cast<AddrLabelExpr>(S)->getLabel());
186 break;
188 case Stmt::IndirectGotoStmtClass:
189 LabelAndGotoScopes[S] = ParentScope;
190 IndirectJumps.push_back(cast<IndirectGotoStmt>(S));
191 break;
193 case Stmt::SwitchStmtClass:
194 // Evaluate the condition variable before entering the scope of the switch
195 // statement.
196 if (VarDecl *Var = cast<SwitchStmt>(S)->getConditionVariable()) {
197 BuildScopeInformation(Var, ParentScope);
198 SkipFirstSubStmt = true;
200 // Fall through
202 case Stmt::GotoStmtClass:
203 // Remember both what scope a goto is in as well as the fact that we have
204 // it. This makes the second scan not have to walk the AST again.
205 LabelAndGotoScopes[S] = ParentScope;
206 Jumps.push_back(S);
207 break;
209 default:
210 break;
213 for (Stmt::child_iterator CI = S->child_begin(), E = S->child_end(); CI != E;
214 ++CI) {
215 if (SkipFirstSubStmt) {
216 SkipFirstSubStmt = false;
217 continue;
220 Stmt *SubStmt = *CI;
221 if (SubStmt == 0) continue;
223 // Cases, labels, and defaults aren't "scope parents". It's also
224 // important to handle these iteratively instead of recursively in
225 // order to avoid blowing out the stack.
226 while (true) {
227 Stmt *Next;
228 if (isa<CaseStmt>(SubStmt))
229 Next = cast<CaseStmt>(SubStmt)->getSubStmt();
230 else if (isa<DefaultStmt>(SubStmt))
231 Next = cast<DefaultStmt>(SubStmt)->getSubStmt();
232 else if (isa<LabelStmt>(SubStmt))
233 Next = cast<LabelStmt>(SubStmt)->getSubStmt();
234 else
235 break;
237 LabelAndGotoScopes[SubStmt] = ParentScope;
238 SubStmt = Next;
241 // If this is a declstmt with a VLA definition, it defines a scope from here
242 // to the end of the containing context.
243 if (DeclStmt *DS = dyn_cast<DeclStmt>(SubStmt)) {
244 // The decl statement creates a scope if any of the decls in it are VLAs
245 // or have the cleanup attribute.
246 for (DeclStmt::decl_iterator I = DS->decl_begin(), E = DS->decl_end();
247 I != E; ++I)
248 BuildScopeInformation(*I, ParentScope);
249 continue;
252 // Disallow jumps into any part of an @try statement by pushing a scope and
253 // walking all sub-stmts in that scope.
254 if (ObjCAtTryStmt *AT = dyn_cast<ObjCAtTryStmt>(SubStmt)) {
255 // Recursively walk the AST for the @try part.
256 Scopes.push_back(GotoScope(ParentScope,
257 diag::note_protected_by_objc_try,
258 diag::note_exits_objc_try,
259 AT->getAtTryLoc()));
260 if (Stmt *TryPart = AT->getTryBody())
261 BuildScopeInformation(TryPart, Scopes.size()-1);
263 // Jump from the catch to the finally or try is not valid.
264 for (unsigned I = 0, N = AT->getNumCatchStmts(); I != N; ++I) {
265 ObjCAtCatchStmt *AC = AT->getCatchStmt(I);
266 Scopes.push_back(GotoScope(ParentScope,
267 diag::note_protected_by_objc_catch,
268 diag::note_exits_objc_catch,
269 AC->getAtCatchLoc()));
270 // @catches are nested and it isn't
271 BuildScopeInformation(AC->getCatchBody(), Scopes.size()-1);
274 // Jump from the finally to the try or catch is not valid.
275 if (ObjCAtFinallyStmt *AF = AT->getFinallyStmt()) {
276 Scopes.push_back(GotoScope(ParentScope,
277 diag::note_protected_by_objc_finally,
278 diag::note_exits_objc_finally,
279 AF->getAtFinallyLoc()));
280 BuildScopeInformation(AF, Scopes.size()-1);
283 continue;
286 // Disallow jumps into the protected statement of an @synchronized, but
287 // allow jumps into the object expression it protects.
288 if (ObjCAtSynchronizedStmt *AS = dyn_cast<ObjCAtSynchronizedStmt>(SubStmt)){
289 // Recursively walk the AST for the @synchronized object expr, it is
290 // evaluated in the normal scope.
291 BuildScopeInformation(AS->getSynchExpr(), ParentScope);
293 // Recursively walk the AST for the @synchronized part, protected by a new
294 // scope.
295 Scopes.push_back(GotoScope(ParentScope,
296 diag::note_protected_by_objc_synchronized,
297 diag::note_exits_objc_synchronized,
298 AS->getAtSynchronizedLoc()));
299 BuildScopeInformation(AS->getSynchBody(), Scopes.size()-1);
300 continue;
303 // Disallow jumps into any part of a C++ try statement. This is pretty
304 // much the same as for Obj-C.
305 if (CXXTryStmt *TS = dyn_cast<CXXTryStmt>(SubStmt)) {
306 Scopes.push_back(GotoScope(ParentScope,
307 diag::note_protected_by_cxx_try,
308 diag::note_exits_cxx_try,
309 TS->getSourceRange().getBegin()));
310 if (Stmt *TryBlock = TS->getTryBlock())
311 BuildScopeInformation(TryBlock, Scopes.size()-1);
313 // Jump from the catch into the try is not allowed either.
314 for (unsigned I = 0, E = TS->getNumHandlers(); I != E; ++I) {
315 CXXCatchStmt *CS = TS->getHandler(I);
316 Scopes.push_back(GotoScope(ParentScope,
317 diag::note_protected_by_cxx_catch,
318 diag::note_exits_cxx_catch,
319 CS->getSourceRange().getBegin()));
320 BuildScopeInformation(CS->getHandlerBlock(), Scopes.size()-1);
323 continue;
326 // Recursively walk the AST.
327 BuildScopeInformation(SubStmt, ParentScope);
331 /// VerifyJumps - Verify each element of the Jumps array to see if they are
332 /// valid, emitting diagnostics if not.
333 void JumpScopeChecker::VerifyJumps() {
334 while (!Jumps.empty()) {
335 Stmt *Jump = Jumps.pop_back_val();
337 // With a goto,
338 if (GotoStmt *GS = dyn_cast<GotoStmt>(Jump)) {
339 CheckJump(GS, GS->getLabel(), GS->getGotoLoc(),
340 diag::err_goto_into_protected_scope);
341 continue;
344 SwitchStmt *SS = cast<SwitchStmt>(Jump);
345 for (SwitchCase *SC = SS->getSwitchCaseList(); SC;
346 SC = SC->getNextSwitchCase()) {
347 assert(LabelAndGotoScopes.count(SC) && "Case not visited?");
348 CheckJump(SS, SC, SC->getLocStart(),
349 diag::err_switch_into_protected_scope);
354 /// VerifyIndirectJumps - Verify whether any possible indirect jump
355 /// might cross a protection boundary. Unlike direct jumps, indirect
356 /// jumps count cleanups as protection boundaries: since there's no
357 /// way to know where the jump is going, we can't implicitly run the
358 /// right cleanups the way we can with direct jumps.
360 /// Thus, an indirect jump is "trivial" if it bypasses no
361 /// initializations and no teardowns. More formally, an indirect jump
362 /// from A to B is trivial if the path out from A to DCA(A,B) is
363 /// trivial and the path in from DCA(A,B) to B is trivial, where
364 /// DCA(A,B) is the deepest common ancestor of A and B.
365 /// Jump-triviality is transitive but asymmetric.
367 /// A path in is trivial if none of the entered scopes have an InDiag.
368 /// A path out is trivial is none of the exited scopes have an OutDiag.
370 /// Under these definitions, this function checks that the indirect
371 /// jump between A and B is trivial for every indirect goto statement A
372 /// and every label B whose address was taken in the function.
373 void JumpScopeChecker::VerifyIndirectJumps() {
374 if (IndirectJumps.empty()) return;
376 // If there aren't any address-of-label expressions in this function,
377 // complain about the first indirect goto.
378 if (IndirectJumpTargets.empty()) {
379 S.Diag(IndirectJumps[0]->getGotoLoc(),
380 diag::err_indirect_goto_without_addrlabel);
381 return;
384 // Collect a single representative of every scope containing an
385 // indirect goto. For most code bases, this substantially cuts
386 // down on the number of jump sites we'll have to consider later.
387 typedef std::pair<unsigned, IndirectGotoStmt*> JumpScope;
388 llvm::SmallVector<JumpScope, 32> JumpScopes;
390 llvm::DenseMap<unsigned, IndirectGotoStmt*> JumpScopesMap;
391 for (llvm::SmallVectorImpl<IndirectGotoStmt*>::iterator
392 I = IndirectJumps.begin(), E = IndirectJumps.end(); I != E; ++I) {
393 IndirectGotoStmt *IG = *I;
394 assert(LabelAndGotoScopes.count(IG) &&
395 "indirect jump didn't get added to scopes?");
396 unsigned IGScope = LabelAndGotoScopes[IG];
397 IndirectGotoStmt *&Entry = JumpScopesMap[IGScope];
398 if (!Entry) Entry = IG;
400 JumpScopes.reserve(JumpScopesMap.size());
401 for (llvm::DenseMap<unsigned, IndirectGotoStmt*>::iterator
402 I = JumpScopesMap.begin(), E = JumpScopesMap.end(); I != E; ++I)
403 JumpScopes.push_back(*I);
406 // Collect a single representative of every scope containing a
407 // label whose address was taken somewhere in the function.
408 // For most code bases, there will be only one such scope.
409 llvm::DenseMap<unsigned, LabelStmt*> TargetScopes;
410 for (llvm::SmallVectorImpl<LabelStmt*>::iterator
411 I = IndirectJumpTargets.begin(), E = IndirectJumpTargets.end();
412 I != E; ++I) {
413 LabelStmt *TheLabel = *I;
414 assert(LabelAndGotoScopes.count(TheLabel) &&
415 "Referenced label didn't get added to scopes?");
416 unsigned LabelScope = LabelAndGotoScopes[TheLabel];
417 LabelStmt *&Target = TargetScopes[LabelScope];
418 if (!Target) Target = TheLabel;
421 // For each target scope, make sure it's trivially reachable from
422 // every scope containing a jump site.
424 // A path between scopes always consists of exitting zero or more
425 // scopes, then entering zero or more scopes. We build a set of
426 // of scopes S from which the target scope can be trivially
427 // entered, then verify that every jump scope can be trivially
428 // exitted to reach a scope in S.
429 llvm::BitVector Reachable(Scopes.size(), false);
430 for (llvm::DenseMap<unsigned,LabelStmt*>::iterator
431 TI = TargetScopes.begin(), TE = TargetScopes.end(); TI != TE; ++TI) {
432 unsigned TargetScope = TI->first;
433 LabelStmt *TargetLabel = TI->second;
435 Reachable.reset();
437 // Mark all the enclosing scopes from which you can safely jump
438 // into the target scope. 'Min' will end up being the index of
439 // the shallowest such scope.
440 unsigned Min = TargetScope;
441 while (true) {
442 Reachable.set(Min);
444 // Don't go beyond the outermost scope.
445 if (Min == 0) break;
447 // Stop if we can't trivially enter the current scope.
448 if (Scopes[Min].InDiag) break;
450 Min = Scopes[Min].ParentScope;
453 // Walk through all the jump sites, checking that they can trivially
454 // reach this label scope.
455 for (llvm::SmallVectorImpl<JumpScope>::iterator
456 I = JumpScopes.begin(), E = JumpScopes.end(); I != E; ++I) {
457 unsigned Scope = I->first;
459 // Walk out the "scope chain" for this scope, looking for a scope
460 // we've marked reachable. For well-formed code this amortizes
461 // to O(JumpScopes.size() / Scopes.size()): we only iterate
462 // when we see something unmarked, and in well-formed code we
463 // mark everything we iterate past.
464 bool IsReachable = false;
465 while (true) {
466 if (Reachable.test(Scope)) {
467 // If we find something reachable, mark all the scopes we just
468 // walked through as reachable.
469 for (unsigned S = I->first; S != Scope; S = Scopes[S].ParentScope)
470 Reachable.set(S);
471 IsReachable = true;
472 break;
475 // Don't walk out if we've reached the top-level scope or we've
476 // gotten shallower than the shallowest reachable scope.
477 if (Scope == 0 || Scope < Min) break;
479 // Don't walk out through an out-diagnostic.
480 if (Scopes[Scope].OutDiag) break;
482 Scope = Scopes[Scope].ParentScope;
485 // Only diagnose if we didn't find something.
486 if (IsReachable) continue;
488 DiagnoseIndirectJump(I->second, I->first, TargetLabel, TargetScope);
493 /// Diagnose an indirect jump which is known to cross scopes.
494 void JumpScopeChecker::DiagnoseIndirectJump(IndirectGotoStmt *Jump,
495 unsigned JumpScope,
496 LabelStmt *Target,
497 unsigned TargetScope) {
498 assert(JumpScope != TargetScope);
500 S.Diag(Jump->getGotoLoc(), diag::warn_indirect_goto_in_protected_scope);
501 S.Diag(Target->getIdentLoc(), diag::note_indirect_goto_target);
503 unsigned Common = GetDeepestCommonScope(JumpScope, TargetScope);
505 // Walk out the scope chain until we reach the common ancestor.
506 for (unsigned I = JumpScope; I != Common; I = Scopes[I].ParentScope)
507 if (Scopes[I].OutDiag)
508 S.Diag(Scopes[I].Loc, Scopes[I].OutDiag);
510 // Now walk into the scopes containing the label whose address was taken.
511 for (unsigned I = TargetScope; I != Common; I = Scopes[I].ParentScope)
512 if (Scopes[I].InDiag)
513 S.Diag(Scopes[I].Loc, Scopes[I].InDiag);
516 /// CheckJump - Validate that the specified jump statement is valid: that it is
517 /// jumping within or out of its current scope, not into a deeper one.
518 void JumpScopeChecker::CheckJump(Stmt *From, Stmt *To,
519 SourceLocation DiagLoc, unsigned JumpDiag) {
520 assert(LabelAndGotoScopes.count(From) && "Jump didn't get added to scopes?");
521 unsigned FromScope = LabelAndGotoScopes[From];
523 assert(LabelAndGotoScopes.count(To) && "Jump didn't get added to scopes?");
524 unsigned ToScope = LabelAndGotoScopes[To];
526 // Common case: exactly the same scope, which is fine.
527 if (FromScope == ToScope) return;
529 unsigned CommonScope = GetDeepestCommonScope(FromScope, ToScope);
531 // It's okay to jump out from a nested scope.
532 if (CommonScope == ToScope) return;
534 // Pull out (and reverse) any scopes we might need to diagnose skipping.
535 llvm::SmallVector<unsigned, 10> ToScopes;
536 for (unsigned I = ToScope; I != CommonScope; I = Scopes[I].ParentScope)
537 if (Scopes[I].InDiag)
538 ToScopes.push_back(I);
540 // If the only scopes present are cleanup scopes, we're okay.
541 if (ToScopes.empty()) return;
543 S.Diag(DiagLoc, JumpDiag);
545 // Emit diagnostics for whatever is left in ToScopes.
546 for (unsigned i = 0, e = ToScopes.size(); i != e; ++i)
547 S.Diag(Scopes[ToScopes[i]].Loc, Scopes[ToScopes[i]].InDiag);
550 void Sema::DiagnoseInvalidJumps(Stmt *Body) {
551 (void)JumpScopeChecker(Body, *this);