Complain on missing property getter method only
[clang.git] / lib / GR / ExprEngine.cpp
blob7cf9054f247c0296f63a946d2bdb17fde320eca7
1 //=-- ExprEngine.cpp - Path-Sensitive Expression-Level Dataflow ---*- C++ -*-=
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file defines a meta-engine for path-sensitive dataflow analysis that
11 // is built on GREngine, but provides the boilerplate to execute transfer
12 // functions and build the ExplodedGraph at the expression level.
14 //===----------------------------------------------------------------------===//
16 // FIXME: Restructure checker registration.
17 #include "Checkers/ExprEngineInternalChecks.h"
19 #include "clang/GR/BugReporter/BugType.h"
20 #include "clang/GR/PathSensitive/AnalysisManager.h"
21 #include "clang/GR/PathSensitive/ExprEngine.h"
22 #include "clang/GR/PathSensitive/ExprEngineBuilders.h"
23 #include "clang/GR/PathSensitive/Checker.h"
24 #include "clang/AST/CharUnits.h"
25 #include "clang/AST/ParentMap.h"
26 #include "clang/AST/StmtObjC.h"
27 #include "clang/AST/DeclCXX.h"
28 #include "clang/Basic/Builtins.h"
29 #include "clang/Basic/SourceManager.h"
30 #include "clang/Basic/SourceManager.h"
31 #include "clang/Basic/PrettyStackTrace.h"
32 #include "llvm/Support/raw_ostream.h"
33 #include "llvm/ADT/ImmutableList.h"
35 #ifndef NDEBUG
36 #include "llvm/Support/GraphWriter.h"
37 #endif
39 using namespace clang;
40 using namespace GR;
41 using llvm::dyn_cast;
42 using llvm::dyn_cast_or_null;
43 using llvm::cast;
44 using llvm::APSInt;
46 namespace {
47 // Trait class for recording returned expression in the state.
48 struct ReturnExpr {
49 static int TagInt;
50 typedef const Stmt *data_type;
52 int ReturnExpr::TagInt;
55 //===----------------------------------------------------------------------===//
56 // Utility functions.
57 //===----------------------------------------------------------------------===//
59 static inline Selector GetNullarySelector(const char* name, ASTContext& Ctx) {
60 IdentifierInfo* II = &Ctx.Idents.get(name);
61 return Ctx.Selectors.getSelector(0, &II);
64 //===----------------------------------------------------------------------===//
65 // Checker worklist routines.
66 //===----------------------------------------------------------------------===//
68 void ExprEngine::CheckerVisit(const Stmt *S, ExplodedNodeSet &Dst,
69 ExplodedNodeSet &Src, CallbackKind Kind) {
71 // Determine if we already have a cached 'CheckersOrdered' vector
72 // specifically tailored for the provided <CallbackKind, Stmt kind>. This
73 // can reduce the number of checkers actually called.
74 CheckersOrdered *CO = &Checkers;
75 llvm::OwningPtr<CheckersOrdered> NewCO;
77 // The cache key is made up of the and the callback kind (pre- or post-visit)
78 // and the statement kind.
79 CallbackTag K = GetCallbackTag(Kind, S->getStmtClass());
81 CheckersOrdered *& CO_Ref = COCache[K];
83 if (!CO_Ref) {
84 // If we have no previously cached CheckersOrdered vector for this
85 // statement kind, then create one.
86 NewCO.reset(new CheckersOrdered);
88 else {
89 // Use the already cached set.
90 CO = CO_Ref;
93 if (CO->empty()) {
94 // If there are no checkers, return early without doing any
95 // more work.
96 Dst.insert(Src);
97 return;
100 ExplodedNodeSet Tmp;
101 ExplodedNodeSet *PrevSet = &Src;
102 unsigned checkersEvaluated = 0;
104 for (CheckersOrdered::iterator I=CO->begin(), E=CO->end(); I!=E; ++I) {
105 // If all nodes are sunk, bail out early.
106 if (PrevSet->empty())
107 break;
108 ExplodedNodeSet *CurrSet = 0;
109 if (I+1 == E)
110 CurrSet = &Dst;
111 else {
112 CurrSet = (PrevSet == &Tmp) ? &Src : &Tmp;
113 CurrSet->clear();
115 void *tag = I->first;
116 Checker *checker = I->second;
117 bool respondsToCallback = true;
119 for (ExplodedNodeSet::iterator NI = PrevSet->begin(), NE = PrevSet->end();
120 NI != NE; ++NI) {
122 checker->GR_Visit(*CurrSet, *Builder, *this, S, *NI, tag,
123 Kind == PreVisitStmtCallback, respondsToCallback);
127 PrevSet = CurrSet;
129 if (NewCO.get()) {
130 ++checkersEvaluated;
131 if (respondsToCallback)
132 NewCO->push_back(*I);
136 // If we built NewCO, check if we called all the checkers. This is important
137 // so that we know that we accurately determined the entire set of checkers
138 // that responds to this callback. Note that 'checkersEvaluated' might
139 // not be the same as Checkers.size() if one of the Checkers generates
140 // a sink node.
141 if (NewCO.get() && checkersEvaluated == Checkers.size())
142 CO_Ref = NewCO.take();
144 // Don't autotransition. The CheckerContext objects should do this
145 // automatically.
148 void ExprEngine::CheckerEvalNilReceiver(const ObjCMessageExpr *ME,
149 ExplodedNodeSet &Dst,
150 const GRState *state,
151 ExplodedNode *Pred) {
152 bool evaluated = false;
153 ExplodedNodeSet DstTmp;
155 for (CheckersOrdered::iterator I=Checkers.begin(),E=Checkers.end();I!=E;++I) {
156 void *tag = I->first;
157 Checker *checker = I->second;
159 if (checker->GR_evalNilReceiver(DstTmp, *Builder, *this, ME, Pred, state,
160 tag)) {
161 evaluated = true;
162 break;
163 } else
164 // The checker didn't evaluate the expr. Restore the Dst.
165 DstTmp.clear();
168 if (evaluated)
169 Dst.insert(DstTmp);
170 else
171 Dst.insert(Pred);
174 // CheckerEvalCall returns true if one of the checkers processed the node.
175 // This may return void when all call evaluation logic goes to some checker
176 // in the future.
177 bool ExprEngine::CheckerEvalCall(const CallExpr *CE,
178 ExplodedNodeSet &Dst,
179 ExplodedNode *Pred) {
180 bool evaluated = false;
181 ExplodedNodeSet DstTmp;
183 for (CheckersOrdered::iterator I=Checkers.begin(),E=Checkers.end();I!=E;++I) {
184 void *tag = I->first;
185 Checker *checker = I->second;
187 if (checker->GR_evalCallExpr(DstTmp, *Builder, *this, CE, Pred, tag)) {
188 evaluated = true;
189 break;
190 } else
191 // The checker didn't evaluate the expr. Restore the DstTmp set.
192 DstTmp.clear();
195 if (evaluated)
196 Dst.insert(DstTmp);
197 else
198 Dst.insert(Pred);
200 return evaluated;
203 // FIXME: This is largely copy-paste from CheckerVisit(). Need to
204 // unify.
205 void ExprEngine::CheckerVisitBind(const Stmt *StoreE, ExplodedNodeSet &Dst,
206 ExplodedNodeSet &Src, SVal location,
207 SVal val, bool isPrevisit) {
209 if (Checkers.empty()) {
210 Dst.insert(Src);
211 return;
214 ExplodedNodeSet Tmp;
215 ExplodedNodeSet *PrevSet = &Src;
217 for (CheckersOrdered::iterator I=Checkers.begin(),E=Checkers.end(); I!=E; ++I)
219 ExplodedNodeSet *CurrSet = 0;
220 if (I+1 == E)
221 CurrSet = &Dst;
222 else {
223 CurrSet = (PrevSet == &Tmp) ? &Src : &Tmp;
224 CurrSet->clear();
227 void *tag = I->first;
228 Checker *checker = I->second;
230 for (ExplodedNodeSet::iterator NI = PrevSet->begin(), NE = PrevSet->end();
231 NI != NE; ++NI)
232 checker->GR_VisitBind(*CurrSet, *Builder, *this, StoreE,
233 *NI, tag, location, val, isPrevisit);
235 // Update which NodeSet is the current one.
236 PrevSet = CurrSet;
239 // Don't autotransition. The CheckerContext objects should do this
240 // automatically.
242 //===----------------------------------------------------------------------===//
243 // Engine construction and deletion.
244 //===----------------------------------------------------------------------===//
246 static void RegisterInternalChecks(ExprEngine &Eng) {
247 // Register internal "built-in" BugTypes with the BugReporter. These BugTypes
248 // are different than what probably many checks will do since they don't
249 // create BugReports on-the-fly but instead wait until ExprEngine finishes
250 // analyzing a function. Generation of BugReport objects is done via a call
251 // to 'FlushReports' from BugReporter.
252 // The following checks do not need to have their associated BugTypes
253 // explicitly registered with the BugReporter. If they issue any BugReports,
254 // their associated BugType will get registered with the BugReporter
255 // automatically. Note that the check itself is owned by the ExprEngine
256 // object.
257 RegisterAdjustedReturnValueChecker(Eng);
258 // CallAndMessageChecker should be registered before AttrNonNullChecker,
259 // where we assume arguments are not undefined.
260 RegisterCallAndMessageChecker(Eng);
261 RegisterAttrNonNullChecker(Eng);
262 RegisterDereferenceChecker(Eng);
263 RegisterVLASizeChecker(Eng);
264 RegisterDivZeroChecker(Eng);
265 RegisterReturnUndefChecker(Eng);
266 RegisterUndefinedArraySubscriptChecker(Eng);
267 RegisterUndefinedAssignmentChecker(Eng);
268 RegisterUndefBranchChecker(Eng);
269 RegisterUndefCapturedBlockVarChecker(Eng);
270 RegisterUndefResultChecker(Eng);
271 RegisterStackAddrLeakChecker(Eng);
272 RegisterObjCAtSyncChecker(Eng);
274 // This is not a checker yet.
275 RegisterNoReturnFunctionChecker(Eng);
276 RegisterBuiltinFunctionChecker(Eng);
277 RegisterOSAtomicChecker(Eng);
278 RegisterUnixAPIChecker(Eng);
279 RegisterMacOSXAPIChecker(Eng);
282 ExprEngine::ExprEngine(AnalysisManager &mgr, TransferFuncs *tf)
283 : AMgr(mgr),
284 Engine(*this),
285 G(Engine.getGraph()),
286 Builder(NULL),
287 StateMgr(getContext(), mgr.getStoreManagerCreator(),
288 mgr.getConstraintManagerCreator(), G.getAllocator(),
289 *this),
290 SymMgr(StateMgr.getSymbolManager()),
291 svalBuilder(StateMgr.getSValBuilder()),
292 EntryNode(NULL), currentStmt(NULL),
293 NSExceptionII(NULL), NSExceptionInstanceRaiseSelectors(NULL),
294 RaiseSel(GetNullarySelector("raise", getContext())),
295 BR(mgr, *this), TF(tf) {
296 // Register internal checks.
297 RegisterInternalChecks(*this);
299 // FIXME: Eventually remove the TF object entirely.
300 TF->RegisterChecks(*this);
301 TF->RegisterPrinters(getStateManager().Printers);
304 ExprEngine::~ExprEngine() {
305 BR.FlushReports();
306 delete [] NSExceptionInstanceRaiseSelectors;
308 // Delete the set of checkers.
309 for (CheckersOrdered::iterator I=Checkers.begin(), E=Checkers.end(); I!=E;++I)
310 delete I->second;
312 for (CheckersOrderedCache::iterator I=COCache.begin(), E=COCache.end();
313 I!=E;++I)
314 delete I->second;
317 //===----------------------------------------------------------------------===//
318 // Utility methods.
319 //===----------------------------------------------------------------------===//
321 const GRState* ExprEngine::getInitialState(const LocationContext *InitLoc) {
322 const GRState *state = StateMgr.getInitialState(InitLoc);
324 // Preconditions.
326 // FIXME: It would be nice if we had a more general mechanism to add
327 // such preconditions. Some day.
328 do {
329 const Decl *D = InitLoc->getDecl();
330 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
331 // Precondition: the first argument of 'main' is an integer guaranteed
332 // to be > 0.
333 const IdentifierInfo *II = FD->getIdentifier();
334 if (!II || !(II->getName() == "main" && FD->getNumParams() > 0))
335 break;
337 const ParmVarDecl *PD = FD->getParamDecl(0);
338 QualType T = PD->getType();
339 if (!T->isIntegerType())
340 break;
342 const MemRegion *R = state->getRegion(PD, InitLoc);
343 if (!R)
344 break;
346 SVal V = state->getSVal(loc::MemRegionVal(R));
347 SVal Constraint_untested = evalBinOp(state, BO_GT, V,
348 svalBuilder.makeZeroVal(T),
349 getContext().IntTy);
351 DefinedOrUnknownSVal *Constraint =
352 dyn_cast<DefinedOrUnknownSVal>(&Constraint_untested);
354 if (!Constraint)
355 break;
357 if (const GRState *newState = state->assume(*Constraint, true))
358 state = newState;
360 break;
363 if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) {
364 // Precondition: 'self' is always non-null upon entry to an Objective-C
365 // method.
366 const ImplicitParamDecl *SelfD = MD->getSelfDecl();
367 const MemRegion *R = state->getRegion(SelfD, InitLoc);
368 SVal V = state->getSVal(loc::MemRegionVal(R));
370 if (const Loc *LV = dyn_cast<Loc>(&V)) {
371 // Assume that the pointer value in 'self' is non-null.
372 state = state->assume(*LV, true);
373 assert(state && "'self' cannot be null");
376 } while (0);
378 return state;
381 //===----------------------------------------------------------------------===//
382 // Top-level transfer function logic (Dispatcher).
383 //===----------------------------------------------------------------------===//
385 /// evalAssume - Called by ConstraintManager. Used to call checker-specific
386 /// logic for handling assumptions on symbolic values.
387 const GRState *ExprEngine::ProcessAssume(const GRState *state, SVal cond,
388 bool assumption) {
389 // Determine if we already have a cached 'CheckersOrdered' vector
390 // specifically tailored for processing assumptions. This
391 // can reduce the number of checkers actually called.
392 CheckersOrdered *CO = &Checkers;
393 llvm::OwningPtr<CheckersOrdered> NewCO;
395 CallbackTag K = GetCallbackTag(ProcessAssumeCallback);
396 CheckersOrdered *& CO_Ref = COCache[K];
398 if (!CO_Ref) {
399 // If we have no previously cached CheckersOrdered vector for this
400 // statement kind, then create one.
401 NewCO.reset(new CheckersOrdered);
403 else {
404 // Use the already cached set.
405 CO = CO_Ref;
408 if (!CO->empty()) {
409 // Let the checkers have a crack at the assume before the transfer functions
410 // get their turn.
411 for (CheckersOrdered::iterator I = CO->begin(), E = CO->end(); I!=E; ++I) {
413 // If any checker declares the state infeasible (or if it starts that
414 // way), bail out.
415 if (!state)
416 return NULL;
418 Checker *C = I->second;
419 bool respondsToCallback = true;
421 state = C->evalAssume(state, cond, assumption, &respondsToCallback);
423 // Check if we're building the cache of checkers that care about
424 // assumptions.
425 if (NewCO.get() && respondsToCallback)
426 NewCO->push_back(*I);
429 // If we got through all the checkers, and we built a list of those that
430 // care about assumptions, save it.
431 if (NewCO.get())
432 CO_Ref = NewCO.take();
435 // If the state is infeasible at this point, bail out.
436 if (!state)
437 return NULL;
439 return TF->evalAssume(state, cond, assumption);
442 bool ExprEngine::WantsRegionChangeUpdate(const GRState* state) {
443 CallbackTag K = GetCallbackTag(EvalRegionChangesCallback);
444 CheckersOrdered *CO = COCache[K];
446 if (!CO)
447 CO = &Checkers;
449 for (CheckersOrdered::iterator I = CO->begin(), E = CO->end(); I != E; ++I) {
450 Checker *C = I->second;
451 if (C->WantsRegionChangeUpdate(state))
452 return true;
455 return false;
458 const GRState *
459 ExprEngine::ProcessRegionChanges(const GRState *state,
460 const MemRegion * const *Begin,
461 const MemRegion * const *End) {
462 // FIXME: Most of this method is copy-pasted from ProcessAssume.
464 // Determine if we already have a cached 'CheckersOrdered' vector
465 // specifically tailored for processing region changes. This
466 // can reduce the number of checkers actually called.
467 CheckersOrdered *CO = &Checkers;
468 llvm::OwningPtr<CheckersOrdered> NewCO;
470 CallbackTag K = GetCallbackTag(EvalRegionChangesCallback);
471 CheckersOrdered *& CO_Ref = COCache[K];
473 if (!CO_Ref) {
474 // If we have no previously cached CheckersOrdered vector for this
475 // callback, then create one.
476 NewCO.reset(new CheckersOrdered);
478 else {
479 // Use the already cached set.
480 CO = CO_Ref;
483 // If there are no checkers, just return the state as is.
484 if (CO->empty())
485 return state;
487 for (CheckersOrdered::iterator I = CO->begin(), E = CO->end(); I != E; ++I) {
488 // If any checker declares the state infeasible (or if it starts that way),
489 // bail out.
490 if (!state)
491 return NULL;
493 Checker *C = I->second;
494 bool respondsToCallback = true;
496 state = C->EvalRegionChanges(state, Begin, End, &respondsToCallback);
498 // See if we're building a cache of checkers that care about region changes.
499 if (NewCO.get() && respondsToCallback)
500 NewCO->push_back(*I);
503 // If we got through all the checkers, and we built a list of those that
504 // care about region changes, save it.
505 if (NewCO.get())
506 CO_Ref = NewCO.take();
508 return state;
511 void ExprEngine::ProcessEndWorklist(bool hasWorkRemaining) {
512 for (CheckersOrdered::iterator I = Checkers.begin(), E = Checkers.end();
513 I != E; ++I) {
514 I->second->VisitEndAnalysis(G, BR, *this);
518 void ExprEngine::ProcessElement(const CFGElement E,
519 StmtNodeBuilder& builder) {
520 switch (E.getKind()) {
521 case CFGElement::Statement:
522 ProcessStmt(E.getAs<CFGStmt>(), builder);
523 break;
524 case CFGElement::Initializer:
525 ProcessInitializer(E.getAs<CFGInitializer>(), builder);
526 break;
527 case CFGElement::ImplicitDtor:
528 ProcessImplicitDtor(E.getAs<CFGImplicitDtor>(), builder);
529 break;
530 default:
531 // Suppress compiler warning.
532 llvm_unreachable("Unexpected CFGElement kind.");
536 void ExprEngine::ProcessStmt(const CFGStmt S, StmtNodeBuilder& builder) {
537 currentStmt = S.getStmt();
538 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),
539 currentStmt->getLocStart(),
540 "Error evaluating statement");
542 Builder = &builder;
543 EntryNode = builder.getBasePredecessor();
545 // Create the cleaned state.
546 const LocationContext *LC = EntryNode->getLocationContext();
547 SymbolReaper SymReaper(LC, currentStmt, SymMgr);
549 if (AMgr.shouldPurgeDead()) {
550 const GRState *St = EntryNode->getState();
552 for (CheckersOrdered::iterator I = Checkers.begin(), E = Checkers.end();
553 I != E; ++I) {
554 Checker *checker = I->second;
555 checker->MarkLiveSymbols(St, SymReaper);
558 const StackFrameContext *SFC = LC->getCurrentStackFrame();
559 CleanedState = StateMgr.RemoveDeadBindings(St, SFC, SymReaper);
560 } else {
561 CleanedState = EntryNode->getState();
564 // Process any special transfer function for dead symbols.
565 ExplodedNodeSet Tmp;
567 if (!SymReaper.hasDeadSymbols())
568 Tmp.Add(EntryNode);
569 else {
570 SaveAndRestore<bool> OldSink(Builder->BuildSinks);
571 SaveOr OldHasGen(Builder->HasGeneratedNode);
573 SaveAndRestore<bool> OldPurgeDeadSymbols(Builder->PurgingDeadSymbols);
574 Builder->PurgingDeadSymbols = true;
576 // FIXME: This should soon be removed.
577 ExplodedNodeSet Tmp2;
578 getTF().evalDeadSymbols(Tmp2, *this, *Builder, EntryNode,
579 CleanedState, SymReaper);
581 if (Checkers.empty())
582 Tmp.insert(Tmp2);
583 else {
584 ExplodedNodeSet Tmp3;
585 ExplodedNodeSet *SrcSet = &Tmp2;
586 for (CheckersOrdered::iterator I = Checkers.begin(), E = Checkers.end();
587 I != E; ++I) {
588 ExplodedNodeSet *DstSet = 0;
589 if (I+1 == E)
590 DstSet = &Tmp;
591 else {
592 DstSet = (SrcSet == &Tmp2) ? &Tmp3 : &Tmp2;
593 DstSet->clear();
596 void *tag = I->first;
597 Checker *checker = I->second;
598 for (ExplodedNodeSet::iterator NI = SrcSet->begin(), NE = SrcSet->end();
599 NI != NE; ++NI)
600 checker->GR_evalDeadSymbols(*DstSet, *Builder, *this, currentStmt,
601 *NI, SymReaper, tag);
602 SrcSet = DstSet;
606 if (!Builder->BuildSinks && !Builder->HasGeneratedNode)
607 Tmp.Add(EntryNode);
610 bool HasAutoGenerated = false;
612 for (ExplodedNodeSet::iterator I=Tmp.begin(), E=Tmp.end(); I!=E; ++I) {
613 ExplodedNodeSet Dst;
615 // Set the cleaned state.
616 Builder->SetCleanedState(*I == EntryNode ? CleanedState : GetState(*I));
618 // Visit the statement.
619 Visit(currentStmt, *I, Dst);
621 // Do we need to auto-generate a node? We only need to do this to generate
622 // a node with a "cleaned" state; CoreEngine will actually handle
623 // auto-transitions for other cases.
624 if (Dst.size() == 1 && *Dst.begin() == EntryNode
625 && !Builder->HasGeneratedNode && !HasAutoGenerated) {
626 HasAutoGenerated = true;
627 builder.generateNode(currentStmt, GetState(EntryNode), *I);
631 // NULL out these variables to cleanup.
632 CleanedState = NULL;
633 EntryNode = NULL;
635 currentStmt = 0;
637 Builder = NULL;
640 void ExprEngine::ProcessInitializer(const CFGInitializer Init,
641 StmtNodeBuilder &builder) {
642 // We don't set EntryNode and currentStmt. And we don't clean up state.
643 const CXXBaseOrMemberInitializer *BMI = Init.getInitializer();
645 ExplodedNode *Pred = builder.getBasePredecessor();
646 const LocationContext *LC = Pred->getLocationContext();
648 if (BMI->isAnyMemberInitializer()) {
649 ExplodedNodeSet Dst;
651 // Evaluate the initializer.
652 Visit(BMI->getInit(), Pred, Dst);
654 for (ExplodedNodeSet::iterator I = Dst.begin(), E = Dst.end(); I != E; ++I){
655 ExplodedNode *Pred = *I;
656 const GRState *state = Pred->getState();
658 const FieldDecl *FD = BMI->getAnyMember();
659 const RecordDecl *RD = FD->getParent();
660 const CXXThisRegion *ThisR = getCXXThisRegion(cast<CXXRecordDecl>(RD),
661 cast<StackFrameContext>(LC));
663 SVal ThisV = state->getSVal(ThisR);
664 SVal FieldLoc = state->getLValue(FD, ThisV);
665 SVal InitVal = state->getSVal(BMI->getInit());
666 state = state->bindLoc(FieldLoc, InitVal);
668 // Use a custom node building process.
669 PostInitializer PP(BMI, LC);
670 // Builder automatically add the generated node to the deferred set,
671 // which are processed in the builder's dtor.
672 builder.generateNode(PP, state, Pred);
677 void ExprEngine::ProcessImplicitDtor(const CFGImplicitDtor D,
678 StmtNodeBuilder &builder) {
679 Builder = &builder;
681 switch (D.getDtorKind()) {
682 case CFGElement::AutomaticObjectDtor:
683 ProcessAutomaticObjDtor(cast<CFGAutomaticObjDtor>(D), builder);
684 break;
685 case CFGElement::BaseDtor:
686 ProcessBaseDtor(cast<CFGBaseDtor>(D), builder);
687 break;
688 case CFGElement::MemberDtor:
689 ProcessMemberDtor(cast<CFGMemberDtor>(D), builder);
690 break;
691 case CFGElement::TemporaryDtor:
692 ProcessTemporaryDtor(cast<CFGTemporaryDtor>(D), builder);
693 break;
694 default:
695 llvm_unreachable("Unexpected dtor kind.");
699 void ExprEngine::ProcessAutomaticObjDtor(const CFGAutomaticObjDtor dtor,
700 StmtNodeBuilder &builder) {
701 ExplodedNode *pred = builder.getBasePredecessor();
702 const GRState *state = pred->getState();
703 const VarDecl *varDecl = dtor.getVarDecl();
705 QualType varType = varDecl->getType();
707 if (const ReferenceType *refType = varType->getAs<ReferenceType>())
708 varType = refType->getPointeeType();
710 const CXXRecordDecl *recordDecl = varType->getAsCXXRecordDecl();
711 assert(recordDecl && "get CXXRecordDecl fail");
712 const CXXDestructorDecl *dtorDecl = recordDecl->getDestructor();
714 Loc dest = state->getLValue(varDecl, pred->getLocationContext());
716 ExplodedNodeSet dstSet;
717 VisitCXXDestructor(dtorDecl, cast<loc::MemRegionVal>(dest).getRegion(),
718 dtor.getTriggerStmt(), pred, dstSet);
721 void ExprEngine::ProcessBaseDtor(const CFGBaseDtor D,
722 StmtNodeBuilder &builder) {
725 void ExprEngine::ProcessMemberDtor(const CFGMemberDtor D,
726 StmtNodeBuilder &builder) {
729 void ExprEngine::ProcessTemporaryDtor(const CFGTemporaryDtor D,
730 StmtNodeBuilder &builder) {
733 void ExprEngine::Visit(const Stmt* S, ExplodedNode* Pred,
734 ExplodedNodeSet& Dst) {
735 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),
736 S->getLocStart(),
737 "Error evaluating statement");
739 // Expressions to ignore.
740 if (const Expr *Ex = dyn_cast<Expr>(S))
741 S = Ex->IgnoreParens();
743 // FIXME: add metadata to the CFG so that we can disable
744 // this check when we KNOW that there is no block-level subexpression.
745 // The motivation is that this check requires a hashtable lookup.
747 if (S != currentStmt && Pred->getLocationContext()->getCFG()->isBlkExpr(S)) {
748 Dst.Add(Pred);
749 return;
752 switch (S->getStmtClass()) {
753 // C++ stuff we don't support yet.
754 case Stmt::CXXBindTemporaryExprClass:
755 case Stmt::CXXCatchStmtClass:
756 case Stmt::CXXDefaultArgExprClass:
757 case Stmt::CXXDependentScopeMemberExprClass:
758 case Stmt::ExprWithCleanupsClass:
759 case Stmt::CXXNullPtrLiteralExprClass:
760 case Stmt::CXXPseudoDestructorExprClass:
761 case Stmt::CXXTemporaryObjectExprClass:
762 case Stmt::CXXThrowExprClass:
763 case Stmt::CXXTryStmtClass:
764 case Stmt::CXXTypeidExprClass:
765 case Stmt::CXXUuidofExprClass:
766 case Stmt::CXXUnresolvedConstructExprClass:
767 case Stmt::CXXScalarValueInitExprClass:
768 case Stmt::DependentScopeDeclRefExprClass:
769 case Stmt::UnaryTypeTraitExprClass:
770 case Stmt::BinaryTypeTraitExprClass:
771 case Stmt::UnresolvedLookupExprClass:
772 case Stmt::UnresolvedMemberExprClass:
773 case Stmt::CXXNoexceptExprClass:
775 SaveAndRestore<bool> OldSink(Builder->BuildSinks);
776 Builder->BuildSinks = true;
777 MakeNode(Dst, S, Pred, GetState(Pred));
778 break;
781 case Stmt::ParenExprClass:
782 llvm_unreachable("ParenExprs already handled.");
783 // Cases that should never be evaluated simply because they shouldn't
784 // appear in the CFG.
785 case Stmt::BreakStmtClass:
786 case Stmt::CaseStmtClass:
787 case Stmt::CompoundStmtClass:
788 case Stmt::ContinueStmtClass:
789 case Stmt::DefaultStmtClass:
790 case Stmt::DoStmtClass:
791 case Stmt::GotoStmtClass:
792 case Stmt::IndirectGotoStmtClass:
793 case Stmt::LabelStmtClass:
794 case Stmt::NoStmtClass:
795 case Stmt::NullStmtClass:
796 case Stmt::SwitchCaseClass:
797 case Stmt::OpaqueValueExprClass:
798 llvm_unreachable("Stmt should not be in analyzer evaluation loop");
799 break;
801 case Stmt::GNUNullExprClass: {
802 MakeNode(Dst, S, Pred, GetState(Pred)->BindExpr(S, svalBuilder.makeNull()));
803 break;
806 case Stmt::ObjCAtSynchronizedStmtClass:
807 VisitObjCAtSynchronizedStmt(cast<ObjCAtSynchronizedStmt>(S), Pred, Dst);
808 break;
810 // Cases not handled yet; but will handle some day.
811 case Stmt::DesignatedInitExprClass:
812 case Stmt::ExtVectorElementExprClass:
813 case Stmt::ImaginaryLiteralClass:
814 case Stmt::ImplicitValueInitExprClass:
815 case Stmt::ObjCAtCatchStmtClass:
816 case Stmt::ObjCAtFinallyStmtClass:
817 case Stmt::ObjCAtTryStmtClass:
818 case Stmt::ObjCEncodeExprClass:
819 case Stmt::ObjCIsaExprClass:
820 case Stmt::ObjCPropertyRefExprClass:
821 case Stmt::ObjCProtocolExprClass:
822 case Stmt::ObjCSelectorExprClass:
823 case Stmt::ObjCStringLiteralClass:
824 case Stmt::ParenListExprClass:
825 case Stmt::PredefinedExprClass:
826 case Stmt::ShuffleVectorExprClass:
827 case Stmt::VAArgExprClass:
828 // Fall through.
830 // Cases we intentionally don't evaluate, since they don't need
831 // to be explicitly evaluated.
832 case Stmt::AddrLabelExprClass:
833 case Stmt::IntegerLiteralClass:
834 case Stmt::CharacterLiteralClass:
835 case Stmt::CXXBoolLiteralExprClass:
836 case Stmt::FloatingLiteralClass:
837 Dst.Add(Pred); // No-op. Simply propagate the current state unchanged.
838 break;
840 case Stmt::ArraySubscriptExprClass:
841 VisitLvalArraySubscriptExpr(cast<ArraySubscriptExpr>(S), Pred, Dst);
842 break;
844 case Stmt::AsmStmtClass:
845 VisitAsmStmt(cast<AsmStmt>(S), Pred, Dst);
846 break;
848 case Stmt::BlockDeclRefExprClass: {
849 const BlockDeclRefExpr *BE = cast<BlockDeclRefExpr>(S);
850 VisitCommonDeclRefExpr(BE, BE->getDecl(), Pred, Dst);
851 break;
854 case Stmt::BlockExprClass:
855 VisitBlockExpr(cast<BlockExpr>(S), Pred, Dst);
856 break;
858 case Stmt::BinaryOperatorClass: {
859 const BinaryOperator* B = cast<BinaryOperator>(S);
860 if (B->isLogicalOp()) {
861 VisitLogicalExpr(B, Pred, Dst);
862 break;
864 else if (B->getOpcode() == BO_Comma) {
865 const GRState* state = GetState(Pred);
866 MakeNode(Dst, B, Pred, state->BindExpr(B, state->getSVal(B->getRHS())));
867 break;
870 if (AMgr.shouldEagerlyAssume() &&
871 (B->isRelationalOp() || B->isEqualityOp())) {
872 ExplodedNodeSet Tmp;
873 VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Tmp);
874 evalEagerlyAssume(Dst, Tmp, cast<Expr>(S));
876 else
877 VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Dst);
879 break;
882 case Stmt::CallExprClass: {
883 const CallExpr* C = cast<CallExpr>(S);
884 VisitCall(C, Pred, C->arg_begin(), C->arg_end(), Dst);
885 break;
888 case Stmt::CXXConstructExprClass: {
889 const CXXConstructExpr *C = cast<CXXConstructExpr>(S);
890 // For block-level CXXConstructExpr, we don't have a destination region.
891 // Let VisitCXXConstructExpr() create one.
892 VisitCXXConstructExpr(C, 0, Pred, Dst);
893 break;
896 case Stmt::CXXMemberCallExprClass: {
897 const CXXMemberCallExpr *MCE = cast<CXXMemberCallExpr>(S);
898 VisitCXXMemberCallExpr(MCE, Pred, Dst);
899 break;
902 case Stmt::CXXOperatorCallExprClass: {
903 const CXXOperatorCallExpr *C = cast<CXXOperatorCallExpr>(S);
904 VisitCXXOperatorCallExpr(C, Pred, Dst);
905 break;
908 case Stmt::CXXNewExprClass: {
909 const CXXNewExpr *NE = cast<CXXNewExpr>(S);
910 VisitCXXNewExpr(NE, Pred, Dst);
911 break;
914 case Stmt::CXXDeleteExprClass: {
915 const CXXDeleteExpr *CDE = cast<CXXDeleteExpr>(S);
916 VisitCXXDeleteExpr(CDE, Pred, Dst);
917 break;
919 // FIXME: ChooseExpr is really a constant. We need to fix
920 // the CFG do not model them as explicit control-flow.
922 case Stmt::ChooseExprClass: { // __builtin_choose_expr
923 const ChooseExpr* C = cast<ChooseExpr>(S);
924 VisitGuardedExpr(C, C->getLHS(), C->getRHS(), Pred, Dst);
925 break;
928 case Stmt::CompoundAssignOperatorClass:
929 VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Dst);
930 break;
932 case Stmt::CompoundLiteralExprClass:
933 VisitCompoundLiteralExpr(cast<CompoundLiteralExpr>(S), Pred, Dst);
934 break;
936 case Stmt::ConditionalOperatorClass: { // '?' operator
937 const ConditionalOperator* C = cast<ConditionalOperator>(S);
938 VisitGuardedExpr(C, C->getLHS(), C->getRHS(), Pred, Dst);
939 break;
942 case Stmt::CXXThisExprClass:
943 VisitCXXThisExpr(cast<CXXThisExpr>(S), Pred, Dst);
944 break;
946 case Stmt::DeclRefExprClass: {
947 const DeclRefExpr *DE = cast<DeclRefExpr>(S);
948 VisitCommonDeclRefExpr(DE, DE->getDecl(), Pred, Dst);
949 break;
952 case Stmt::DeclStmtClass:
953 VisitDeclStmt(cast<DeclStmt>(S), Pred, Dst);
954 break;
956 case Stmt::ForStmtClass:
957 // This case isn't for branch processing, but for handling the
958 // initialization of a condition variable.
959 VisitCondInit(cast<ForStmt>(S)->getConditionVariable(), S, Pred, Dst);
960 break;
962 case Stmt::ImplicitCastExprClass:
963 case Stmt::CStyleCastExprClass:
964 case Stmt::CXXStaticCastExprClass:
965 case Stmt::CXXDynamicCastExprClass:
966 case Stmt::CXXReinterpretCastExprClass:
967 case Stmt::CXXConstCastExprClass:
968 case Stmt::CXXFunctionalCastExprClass: {
969 const CastExpr* C = cast<CastExpr>(S);
970 VisitCast(C, C->getSubExpr(), Pred, Dst);
971 break;
974 case Stmt::IfStmtClass:
975 // This case isn't for branch processing, but for handling the
976 // initialization of a condition variable.
977 VisitCondInit(cast<IfStmt>(S)->getConditionVariable(), S, Pred, Dst);
978 break;
980 case Stmt::InitListExprClass:
981 VisitInitListExpr(cast<InitListExpr>(S), Pred, Dst);
982 break;
984 case Stmt::MemberExprClass:
985 VisitMemberExpr(cast<MemberExpr>(S), Pred, Dst);
986 break;
987 case Stmt::ObjCIvarRefExprClass:
988 VisitLvalObjCIvarRefExpr(cast<ObjCIvarRefExpr>(S), Pred, Dst);
989 break;
991 case Stmt::ObjCForCollectionStmtClass:
992 VisitObjCForCollectionStmt(cast<ObjCForCollectionStmt>(S), Pred, Dst);
993 break;
995 case Stmt::ObjCMessageExprClass:
996 VisitObjCMessageExpr(cast<ObjCMessageExpr>(S), Pred, Dst);
997 break;
999 case Stmt::ObjCAtThrowStmtClass: {
1000 // FIXME: This is not complete. We basically treat @throw as
1001 // an abort.
1002 SaveAndRestore<bool> OldSink(Builder->BuildSinks);
1003 Builder->BuildSinks = true;
1004 MakeNode(Dst, S, Pred, GetState(Pred));
1005 break;
1008 case Stmt::ReturnStmtClass:
1009 VisitReturnStmt(cast<ReturnStmt>(S), Pred, Dst);
1010 break;
1012 case Stmt::OffsetOfExprClass:
1013 VisitOffsetOfExpr(cast<OffsetOfExpr>(S), Pred, Dst);
1014 break;
1016 case Stmt::SizeOfAlignOfExprClass:
1017 VisitSizeOfAlignOfExpr(cast<SizeOfAlignOfExpr>(S), Pred, Dst);
1018 break;
1020 case Stmt::StmtExprClass: {
1021 const StmtExpr* SE = cast<StmtExpr>(S);
1023 if (SE->getSubStmt()->body_empty()) {
1024 // Empty statement expression.
1025 assert(SE->getType() == getContext().VoidTy
1026 && "Empty statement expression must have void type.");
1027 Dst.Add(Pred);
1028 break;
1031 if (Expr* LastExpr = dyn_cast<Expr>(*SE->getSubStmt()->body_rbegin())) {
1032 const GRState* state = GetState(Pred);
1033 MakeNode(Dst, SE, Pred, state->BindExpr(SE, state->getSVal(LastExpr)));
1035 else
1036 Dst.Add(Pred);
1038 break;
1041 case Stmt::StringLiteralClass: {
1042 const GRState* state = GetState(Pred);
1043 SVal V = state->getLValue(cast<StringLiteral>(S));
1044 MakeNode(Dst, S, Pred, state->BindExpr(S, V));
1045 return;
1048 case Stmt::SwitchStmtClass:
1049 // This case isn't for branch processing, but for handling the
1050 // initialization of a condition variable.
1051 VisitCondInit(cast<SwitchStmt>(S)->getConditionVariable(), S, Pred, Dst);
1052 break;
1054 case Stmt::UnaryOperatorClass: {
1055 const UnaryOperator *U = cast<UnaryOperator>(S);
1056 if (AMgr.shouldEagerlyAssume()&&(U->getOpcode() == UO_LNot)) {
1057 ExplodedNodeSet Tmp;
1058 VisitUnaryOperator(U, Pred, Tmp);
1059 evalEagerlyAssume(Dst, Tmp, U);
1061 else
1062 VisitUnaryOperator(U, Pred, Dst);
1063 break;
1066 case Stmt::WhileStmtClass:
1067 // This case isn't for branch processing, but for handling the
1068 // initialization of a condition variable.
1069 VisitCondInit(cast<WhileStmt>(S)->getConditionVariable(), S, Pred, Dst);
1070 break;
1074 //===----------------------------------------------------------------------===//
1075 // Block entrance. (Update counters).
1076 //===----------------------------------------------------------------------===//
1078 bool ExprEngine::ProcessBlockEntrance(const CFGBlock* B,
1079 const ExplodedNode *Pred,
1080 BlockCounter BC) {
1081 return BC.getNumVisited(Pred->getLocationContext()->getCurrentStackFrame(),
1082 B->getBlockID()) < AMgr.getMaxVisit();
1085 //===----------------------------------------------------------------------===//
1086 // Generic node creation.
1087 //===----------------------------------------------------------------------===//
1089 ExplodedNode* ExprEngine::MakeNode(ExplodedNodeSet& Dst, const Stmt* S,
1090 ExplodedNode* Pred, const GRState* St,
1091 ProgramPoint::Kind K, const void *tag) {
1092 assert (Builder && "StmtNodeBuilder not present.");
1093 SaveAndRestore<const void*> OldTag(Builder->Tag);
1094 Builder->Tag = tag;
1095 return Builder->MakeNode(Dst, S, Pred, St, K);
1098 //===----------------------------------------------------------------------===//
1099 // Branch processing.
1100 //===----------------------------------------------------------------------===//
1102 const GRState* ExprEngine::MarkBranch(const GRState* state,
1103 const Stmt* Terminator,
1104 bool branchTaken) {
1106 switch (Terminator->getStmtClass()) {
1107 default:
1108 return state;
1110 case Stmt::BinaryOperatorClass: { // '&&' and '||'
1112 const BinaryOperator* B = cast<BinaryOperator>(Terminator);
1113 BinaryOperator::Opcode Op = B->getOpcode();
1115 assert (Op == BO_LAnd || Op == BO_LOr);
1117 // For &&, if we take the true branch, then the value of the whole
1118 // expression is that of the RHS expression.
1120 // For ||, if we take the false branch, then the value of the whole
1121 // expression is that of the RHS expression.
1123 const Expr* Ex = (Op == BO_LAnd && branchTaken) ||
1124 (Op == BO_LOr && !branchTaken)
1125 ? B->getRHS() : B->getLHS();
1127 return state->BindExpr(B, UndefinedVal(Ex));
1130 case Stmt::ConditionalOperatorClass: { // ?:
1132 const ConditionalOperator* C = cast<ConditionalOperator>(Terminator);
1134 // For ?, if branchTaken == true then the value is either the LHS or
1135 // the condition itself. (GNU extension).
1137 const Expr* Ex;
1139 if (branchTaken)
1140 Ex = C->getLHS() ? C->getLHS() : C->getCond();
1141 else
1142 Ex = C->getRHS();
1144 return state->BindExpr(C, UndefinedVal(Ex));
1147 case Stmt::ChooseExprClass: { // ?:
1149 const ChooseExpr* C = cast<ChooseExpr>(Terminator);
1151 const Expr* Ex = branchTaken ? C->getLHS() : C->getRHS();
1152 return state->BindExpr(C, UndefinedVal(Ex));
1157 /// RecoverCastedSymbol - A helper function for ProcessBranch that is used
1158 /// to try to recover some path-sensitivity for casts of symbolic
1159 /// integers that promote their values (which are currently not tracked well).
1160 /// This function returns the SVal bound to Condition->IgnoreCasts if all the
1161 // cast(s) did was sign-extend the original value.
1162 static SVal RecoverCastedSymbol(GRStateManager& StateMgr, const GRState* state,
1163 const Stmt* Condition, ASTContext& Ctx) {
1165 const Expr *Ex = dyn_cast<Expr>(Condition);
1166 if (!Ex)
1167 return UnknownVal();
1169 uint64_t bits = 0;
1170 bool bitsInit = false;
1172 while (const CastExpr *CE = dyn_cast<CastExpr>(Ex)) {
1173 QualType T = CE->getType();
1175 if (!T->isIntegerType())
1176 return UnknownVal();
1178 uint64_t newBits = Ctx.getTypeSize(T);
1179 if (!bitsInit || newBits < bits) {
1180 bitsInit = true;
1181 bits = newBits;
1184 Ex = CE->getSubExpr();
1187 // We reached a non-cast. Is it a symbolic value?
1188 QualType T = Ex->getType();
1190 if (!bitsInit || !T->isIntegerType() || Ctx.getTypeSize(T) > bits)
1191 return UnknownVal();
1193 return state->getSVal(Ex);
1196 void ExprEngine::ProcessBranch(const Stmt* Condition, const Stmt* Term,
1197 BranchNodeBuilder& builder) {
1199 // Check for NULL conditions; e.g. "for(;;)"
1200 if (!Condition) {
1201 builder.markInfeasible(false);
1202 return;
1205 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(),
1206 Condition->getLocStart(),
1207 "Error evaluating branch");
1209 for (CheckersOrdered::iterator I=Checkers.begin(),E=Checkers.end();I!=E;++I) {
1210 void *tag = I->first;
1211 Checker *checker = I->second;
1212 checker->VisitBranchCondition(builder, *this, Condition, tag);
1215 // If the branch condition is undefined, return;
1216 if (!builder.isFeasible(true) && !builder.isFeasible(false))
1217 return;
1219 const GRState* PrevState = builder.getState();
1220 SVal X = PrevState->getSVal(Condition);
1222 if (X.isUnknown()) {
1223 // Give it a chance to recover from unknown.
1224 if (const Expr *Ex = dyn_cast<Expr>(Condition)) {
1225 if (Ex->getType()->isIntegerType()) {
1226 // Try to recover some path-sensitivity. Right now casts of symbolic
1227 // integers that promote their values are currently not tracked well.
1228 // If 'Condition' is such an expression, try and recover the
1229 // underlying value and use that instead.
1230 SVal recovered = RecoverCastedSymbol(getStateManager(),
1231 builder.getState(), Condition,
1232 getContext());
1234 if (!recovered.isUnknown()) {
1235 X = recovered;
1239 // If the condition is still unknown, give up.
1240 if (X.isUnknown()) {
1241 builder.generateNode(MarkBranch(PrevState, Term, true), true);
1242 builder.generateNode(MarkBranch(PrevState, Term, false), false);
1243 return;
1247 DefinedSVal V = cast<DefinedSVal>(X);
1249 // Process the true branch.
1250 if (builder.isFeasible(true)) {
1251 if (const GRState *state = PrevState->assume(V, true))
1252 builder.generateNode(MarkBranch(state, Term, true), true);
1253 else
1254 builder.markInfeasible(true);
1257 // Process the false branch.
1258 if (builder.isFeasible(false)) {
1259 if (const GRState *state = PrevState->assume(V, false))
1260 builder.generateNode(MarkBranch(state, Term, false), false);
1261 else
1262 builder.markInfeasible(false);
1266 /// ProcessIndirectGoto - Called by CoreEngine. Used to generate successor
1267 /// nodes by processing the 'effects' of a computed goto jump.
1268 void ExprEngine::ProcessIndirectGoto(IndirectGotoNodeBuilder& builder) {
1270 const GRState *state = builder.getState();
1271 SVal V = state->getSVal(builder.getTarget());
1273 // Three possibilities:
1275 // (1) We know the computed label.
1276 // (2) The label is NULL (or some other constant), or Undefined.
1277 // (3) We have no clue about the label. Dispatch to all targets.
1280 typedef IndirectGotoNodeBuilder::iterator iterator;
1282 if (isa<loc::GotoLabel>(V)) {
1283 const LabelStmt* L = cast<loc::GotoLabel>(V).getLabel();
1285 for (iterator I=builder.begin(), E=builder.end(); I != E; ++I) {
1286 if (I.getLabel() == L) {
1287 builder.generateNode(I, state);
1288 return;
1292 assert (false && "No block with label.");
1293 return;
1296 if (isa<loc::ConcreteInt>(V) || isa<UndefinedVal>(V)) {
1297 // Dispatch to the first target and mark it as a sink.
1298 //ExplodedNode* N = builder.generateNode(builder.begin(), state, true);
1299 // FIXME: add checker visit.
1300 // UndefBranches.insert(N);
1301 return;
1304 // This is really a catch-all. We don't support symbolics yet.
1305 // FIXME: Implement dispatch for symbolic pointers.
1307 for (iterator I=builder.begin(), E=builder.end(); I != E; ++I)
1308 builder.generateNode(I, state);
1312 void ExprEngine::VisitGuardedExpr(const Expr* Ex, const Expr* L,
1313 const Expr* R,
1314 ExplodedNode* Pred, ExplodedNodeSet& Dst) {
1316 assert(Ex == currentStmt &&
1317 Pred->getLocationContext()->getCFG()->isBlkExpr(Ex));
1319 const GRState* state = GetState(Pred);
1320 SVal X = state->getSVal(Ex);
1322 assert (X.isUndef());
1324 const Expr *SE = (Expr*) cast<UndefinedVal>(X).getData();
1325 assert(SE);
1326 X = state->getSVal(SE);
1328 // Make sure that we invalidate the previous binding.
1329 MakeNode(Dst, Ex, Pred, state->BindExpr(Ex, X, true));
1332 /// ProcessEndPath - Called by CoreEngine. Used to generate end-of-path
1333 /// nodes when the control reaches the end of a function.
1334 void ExprEngine::ProcessEndPath(EndPathNodeBuilder& builder) {
1335 getTF().evalEndPath(*this, builder);
1336 StateMgr.EndPath(builder.getState());
1337 for (CheckersOrdered::iterator I=Checkers.begin(),E=Checkers.end(); I!=E;++I){
1338 void *tag = I->first;
1339 Checker *checker = I->second;
1340 checker->evalEndPath(builder, tag, *this);
1344 /// ProcessSwitch - Called by CoreEngine. Used to generate successor
1345 /// nodes by processing the 'effects' of a switch statement.
1346 void ExprEngine::ProcessSwitch(SwitchNodeBuilder& builder) {
1347 typedef SwitchNodeBuilder::iterator iterator;
1348 const GRState* state = builder.getState();
1349 const Expr* CondE = builder.getCondition();
1350 SVal CondV_untested = state->getSVal(CondE);
1352 if (CondV_untested.isUndef()) {
1353 //ExplodedNode* N = builder.generateDefaultCaseNode(state, true);
1354 // FIXME: add checker
1355 //UndefBranches.insert(N);
1357 return;
1359 DefinedOrUnknownSVal CondV = cast<DefinedOrUnknownSVal>(CondV_untested);
1361 const GRState *DefaultSt = state;
1363 iterator I = builder.begin(), EI = builder.end();
1364 bool defaultIsFeasible = I == EI;
1366 for ( ; I != EI; ++I) {
1367 const CaseStmt* Case = I.getCase();
1369 // Evaluate the LHS of the case value.
1370 Expr::EvalResult V1;
1371 bool b = Case->getLHS()->Evaluate(V1, getContext());
1373 // Sanity checks. These go away in Release builds.
1374 assert(b && V1.Val.isInt() && !V1.HasSideEffects
1375 && "Case condition must evaluate to an integer constant.");
1376 b = b; // silence unused variable warning
1377 assert(V1.Val.getInt().getBitWidth() ==
1378 getContext().getTypeSize(CondE->getType()));
1380 // Get the RHS of the case, if it exists.
1381 Expr::EvalResult V2;
1383 if (const Expr* E = Case->getRHS()) {
1384 b = E->Evaluate(V2, getContext());
1385 assert(b && V2.Val.isInt() && !V2.HasSideEffects
1386 && "Case condition must evaluate to an integer constant.");
1387 b = b; // silence unused variable warning
1389 else
1390 V2 = V1;
1392 // FIXME: Eventually we should replace the logic below with a range
1393 // comparison, rather than concretize the values within the range.
1394 // This should be easy once we have "ranges" for NonLVals.
1396 do {
1397 nonloc::ConcreteInt CaseVal(getBasicVals().getValue(V1.Val.getInt()));
1398 DefinedOrUnknownSVal Res = svalBuilder.evalEQ(DefaultSt ? DefaultSt : state,
1399 CondV, CaseVal);
1401 // Now "assume" that the case matches.
1402 if (const GRState* stateNew = state->assume(Res, true)) {
1403 builder.generateCaseStmtNode(I, stateNew);
1405 // If CondV evaluates to a constant, then we know that this
1406 // is the *only* case that we can take, so stop evaluating the
1407 // others.
1408 if (isa<nonloc::ConcreteInt>(CondV))
1409 return;
1412 // Now "assume" that the case doesn't match. Add this state
1413 // to the default state (if it is feasible).
1414 if (DefaultSt) {
1415 if (const GRState *stateNew = DefaultSt->assume(Res, false)) {
1416 defaultIsFeasible = true;
1417 DefaultSt = stateNew;
1419 else {
1420 defaultIsFeasible = false;
1421 DefaultSt = NULL;
1425 // Concretize the next value in the range.
1426 if (V1.Val.getInt() == V2.Val.getInt())
1427 break;
1429 ++V1.Val.getInt();
1430 assert (V1.Val.getInt() <= V2.Val.getInt());
1432 } while (true);
1435 if (!defaultIsFeasible)
1436 return;
1438 // If we have switch(enum value), the default branch is not
1439 // feasible if all of the enum constants not covered by 'case:' statements
1440 // are not feasible values for the switch condition.
1442 // Note that this isn't as accurate as it could be. Even if there isn't
1443 // a case for a particular enum value as long as that enum value isn't
1444 // feasible then it shouldn't be considered for making 'default:' reachable.
1445 const SwitchStmt *SS = builder.getSwitch();
1446 const Expr *CondExpr = SS->getCond()->IgnoreParenImpCasts();
1447 if (CondExpr->getType()->getAs<EnumType>()) {
1448 if (SS->isAllEnumCasesCovered())
1449 return;
1452 builder.generateDefaultCaseNode(DefaultSt);
1455 void ExprEngine::ProcessCallEnter(CallEnterNodeBuilder &B) {
1456 const GRState *state = B.getState()->EnterStackFrame(B.getCalleeContext());
1457 B.generateNode(state);
1460 void ExprEngine::ProcessCallExit(CallExitNodeBuilder &B) {
1461 const GRState *state = B.getState();
1462 const ExplodedNode *Pred = B.getPredecessor();
1463 const StackFrameContext *calleeCtx =
1464 cast<StackFrameContext>(Pred->getLocationContext());
1465 const Stmt *CE = calleeCtx->getCallSite();
1467 // If the callee returns an expression, bind its value to CallExpr.
1468 const Stmt *ReturnedExpr = state->get<ReturnExpr>();
1469 if (ReturnedExpr) {
1470 SVal RetVal = state->getSVal(ReturnedExpr);
1471 state = state->BindExpr(CE, RetVal);
1472 // Clear the return expr GDM.
1473 state = state->remove<ReturnExpr>();
1476 // Bind the constructed object value to CXXConstructExpr.
1477 if (const CXXConstructExpr *CCE = dyn_cast<CXXConstructExpr>(CE)) {
1478 const CXXThisRegion *ThisR =
1479 getCXXThisRegion(CCE->getConstructor()->getParent(), calleeCtx);
1481 SVal ThisV = state->getSVal(ThisR);
1482 // Always bind the region to the CXXConstructExpr.
1483 state = state->BindExpr(CCE, ThisV);
1486 B.generateNode(state);
1489 //===----------------------------------------------------------------------===//
1490 // Transfer functions: logical operations ('&&', '||').
1491 //===----------------------------------------------------------------------===//
1493 void ExprEngine::VisitLogicalExpr(const BinaryOperator* B, ExplodedNode* Pred,
1494 ExplodedNodeSet& Dst) {
1496 assert(B->getOpcode() == BO_LAnd ||
1497 B->getOpcode() == BO_LOr);
1499 assert(B==currentStmt && Pred->getLocationContext()->getCFG()->isBlkExpr(B));
1501 const GRState* state = GetState(Pred);
1502 SVal X = state->getSVal(B);
1503 assert(X.isUndef());
1505 const Expr *Ex = (const Expr*) cast<UndefinedVal>(X).getData();
1506 assert(Ex);
1508 if (Ex == B->getRHS()) {
1509 X = state->getSVal(Ex);
1511 // Handle undefined values.
1512 if (X.isUndef()) {
1513 MakeNode(Dst, B, Pred, state->BindExpr(B, X));
1514 return;
1517 DefinedOrUnknownSVal XD = cast<DefinedOrUnknownSVal>(X);
1519 // We took the RHS. Because the value of the '&&' or '||' expression must
1520 // evaluate to 0 or 1, we must assume the value of the RHS evaluates to 0
1521 // or 1. Alternatively, we could take a lazy approach, and calculate this
1522 // value later when necessary. We don't have the machinery in place for
1523 // this right now, and since most logical expressions are used for branches,
1524 // the payoff is not likely to be large. Instead, we do eager evaluation.
1525 if (const GRState *newState = state->assume(XD, true))
1526 MakeNode(Dst, B, Pred,
1527 newState->BindExpr(B, svalBuilder.makeIntVal(1U, B->getType())));
1529 if (const GRState *newState = state->assume(XD, false))
1530 MakeNode(Dst, B, Pred,
1531 newState->BindExpr(B, svalBuilder.makeIntVal(0U, B->getType())));
1533 else {
1534 // We took the LHS expression. Depending on whether we are '&&' or
1535 // '||' we know what the value of the expression is via properties of
1536 // the short-circuiting.
1537 X = svalBuilder.makeIntVal(B->getOpcode() == BO_LAnd ? 0U : 1U,
1538 B->getType());
1539 MakeNode(Dst, B, Pred, state->BindExpr(B, X));
1543 //===----------------------------------------------------------------------===//
1544 // Transfer functions: Loads and stores.
1545 //===----------------------------------------------------------------------===//
1547 void ExprEngine::VisitBlockExpr(const BlockExpr *BE, ExplodedNode *Pred,
1548 ExplodedNodeSet &Dst) {
1550 ExplodedNodeSet Tmp;
1552 CanQualType T = getContext().getCanonicalType(BE->getType());
1553 SVal V = svalBuilder.getBlockPointer(BE->getBlockDecl(), T,
1554 Pred->getLocationContext());
1556 MakeNode(Tmp, BE, Pred, GetState(Pred)->BindExpr(BE, V),
1557 ProgramPoint::PostLValueKind);
1559 // Post-visit the BlockExpr.
1560 CheckerVisit(BE, Dst, Tmp, PostVisitStmtCallback);
1563 void ExprEngine::VisitCommonDeclRefExpr(const Expr *Ex, const NamedDecl *D,
1564 ExplodedNode *Pred,
1565 ExplodedNodeSet &Dst) {
1566 const GRState *state = GetState(Pred);
1568 if (const VarDecl* VD = dyn_cast<VarDecl>(D)) {
1569 assert(Ex->isLValue());
1570 SVal V = state->getLValue(VD, Pred->getLocationContext());
1572 // For references, the 'lvalue' is the pointer address stored in the
1573 // reference region.
1574 if (VD->getType()->isReferenceType()) {
1575 if (const MemRegion *R = V.getAsRegion())
1576 V = state->getSVal(R);
1577 else
1578 V = UnknownVal();
1581 MakeNode(Dst, Ex, Pred, state->BindExpr(Ex, V),
1582 ProgramPoint::PostLValueKind);
1583 return;
1585 if (const EnumConstantDecl* ED = dyn_cast<EnumConstantDecl>(D)) {
1586 assert(!Ex->isLValue());
1587 SVal V = svalBuilder.makeIntVal(ED->getInitVal());
1588 MakeNode(Dst, Ex, Pred, state->BindExpr(Ex, V));
1589 return;
1591 if (const FunctionDecl* FD = dyn_cast<FunctionDecl>(D)) {
1592 SVal V = svalBuilder.getFunctionPointer(FD);
1593 MakeNode(Dst, Ex, Pred, state->BindExpr(Ex, V),
1594 ProgramPoint::PostLValueKind);
1595 return;
1597 assert (false &&
1598 "ValueDecl support for this ValueDecl not implemented.");
1601 /// VisitArraySubscriptExpr - Transfer function for array accesses
1602 void ExprEngine::VisitLvalArraySubscriptExpr(const ArraySubscriptExpr* A,
1603 ExplodedNode* Pred,
1604 ExplodedNodeSet& Dst){
1606 const Expr* Base = A->getBase()->IgnoreParens();
1607 const Expr* Idx = A->getIdx()->IgnoreParens();
1609 // Evaluate the base.
1610 ExplodedNodeSet Tmp;
1611 Visit(Base, Pred, Tmp);
1613 for (ExplodedNodeSet::iterator I1=Tmp.begin(), E1=Tmp.end(); I1!=E1; ++I1) {
1614 ExplodedNodeSet Tmp2;
1615 Visit(Idx, *I1, Tmp2); // Evaluate the index.
1616 ExplodedNodeSet Tmp3;
1617 CheckerVisit(A, Tmp3, Tmp2, PreVisitStmtCallback);
1619 for (ExplodedNodeSet::iterator I2=Tmp3.begin(),E2=Tmp3.end();I2!=E2; ++I2) {
1620 const GRState* state = GetState(*I2);
1621 SVal V = state->getLValue(A->getType(), state->getSVal(Idx),
1622 state->getSVal(Base));
1623 assert(A->isLValue());
1624 MakeNode(Dst, A, *I2, state->BindExpr(A, V), ProgramPoint::PostLValueKind);
1629 /// VisitMemberExpr - Transfer function for member expressions.
1630 void ExprEngine::VisitMemberExpr(const MemberExpr* M, ExplodedNode* Pred,
1631 ExplodedNodeSet& Dst) {
1633 Expr *baseExpr = M->getBase()->IgnoreParens();
1634 ExplodedNodeSet dstBase;
1635 Visit(baseExpr, Pred, dstBase);
1637 FieldDecl *field = dyn_cast<FieldDecl>(M->getMemberDecl());
1638 if (!field) // FIXME: skipping member expressions for non-fields
1639 return;
1641 for (ExplodedNodeSet::iterator I = dstBase.begin(), E = dstBase.end();
1642 I != E; ++I) {
1643 const GRState* state = GetState(*I);
1644 SVal baseExprVal = state->getSVal(baseExpr);
1645 if (isa<nonloc::LazyCompoundVal>(baseExprVal) ||
1646 isa<nonloc::CompoundVal>(baseExprVal)) {
1647 MakeNode(Dst, M, *I, state->BindExpr(M, UnknownVal()));
1648 continue;
1651 // FIXME: Should we insert some assumption logic in here to determine
1652 // if "Base" is a valid piece of memory? Before we put this assumption
1653 // later when using FieldOffset lvals (which we no longer have).
1655 // For all other cases, compute an lvalue.
1656 SVal L = state->getLValue(field, baseExprVal);
1657 if (M->isLValue())
1658 MakeNode(Dst, M, *I, state->BindExpr(M, L), ProgramPoint::PostLValueKind);
1659 else
1660 evalLoad(Dst, M, *I, state, L);
1664 /// evalBind - Handle the semantics of binding a value to a specific location.
1665 /// This method is used by evalStore and (soon) VisitDeclStmt, and others.
1666 void ExprEngine::evalBind(ExplodedNodeSet& Dst, const Stmt* StoreE,
1667 ExplodedNode* Pred, const GRState* state,
1668 SVal location, SVal Val, bool atDeclInit) {
1671 // Do a previsit of the bind.
1672 ExplodedNodeSet CheckedSet, Src;
1673 Src.Add(Pred);
1674 CheckerVisitBind(StoreE, CheckedSet, Src, location, Val, true);
1676 for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end();
1677 I!=E; ++I) {
1679 if (Pred != *I)
1680 state = GetState(*I);
1682 const GRState* newState = 0;
1684 if (atDeclInit) {
1685 const VarRegion *VR =
1686 cast<VarRegion>(cast<loc::MemRegionVal>(location).getRegion());
1688 newState = state->bindDecl(VR, Val);
1690 else {
1691 if (location.isUnknown()) {
1692 // We know that the new state will be the same as the old state since
1693 // the location of the binding is "unknown". Consequently, there
1694 // is no reason to just create a new node.
1695 newState = state;
1697 else {
1698 // We are binding to a value other than 'unknown'. Perform the binding
1699 // using the StoreManager.
1700 newState = state->bindLoc(cast<Loc>(location), Val);
1704 // The next thing to do is check if the TransferFuncs object wants to
1705 // update the state based on the new binding. If the GRTransferFunc object
1706 // doesn't do anything, just auto-propagate the current state.
1708 // NOTE: We use 'AssignE' for the location of the PostStore if 'AssignE'
1709 // is non-NULL. Checkers typically care about
1711 StmtNodeBuilderRef BuilderRef(Dst, *Builder, *this, *I, newState, StoreE,
1712 true);
1714 getTF().evalBind(BuilderRef, location, Val);
1718 /// evalStore - Handle the semantics of a store via an assignment.
1719 /// @param Dst The node set to store generated state nodes
1720 /// @param AssignE The assignment expression if the store happens in an
1721 /// assignment.
1722 /// @param LocatioinE The location expression that is stored to.
1723 /// @param state The current simulation state
1724 /// @param location The location to store the value
1725 /// @param Val The value to be stored
1726 void ExprEngine::evalStore(ExplodedNodeSet& Dst, const Expr *AssignE,
1727 const Expr* LocationE,
1728 ExplodedNode* Pred,
1729 const GRState* state, SVal location, SVal Val,
1730 const void *tag) {
1732 assert(Builder && "StmtNodeBuilder must be defined.");
1734 // Evaluate the location (checks for bad dereferences).
1735 ExplodedNodeSet Tmp;
1736 evalLocation(Tmp, LocationE, Pred, state, location, tag, false);
1738 if (Tmp.empty())
1739 return;
1741 assert(!location.isUndef());
1743 SaveAndRestore<ProgramPoint::Kind> OldSPointKind(Builder->PointKind,
1744 ProgramPoint::PostStoreKind);
1745 SaveAndRestore<const void*> OldTag(Builder->Tag, tag);
1747 // Proceed with the store. We use AssignE as the anchor for the PostStore
1748 // ProgramPoint if it is non-NULL, and LocationE otherwise.
1749 const Expr *StoreE = AssignE ? AssignE : LocationE;
1751 for (ExplodedNodeSet::iterator NI=Tmp.begin(), NE=Tmp.end(); NI!=NE; ++NI)
1752 evalBind(Dst, StoreE, *NI, GetState(*NI), location, Val);
1755 void ExprEngine::evalLoad(ExplodedNodeSet& Dst, const Expr *Ex,
1756 ExplodedNode* Pred,
1757 const GRState* state, SVal location,
1758 const void *tag, QualType LoadTy) {
1759 assert(!isa<NonLoc>(location) && "location cannot be a NonLoc.");
1761 // Are we loading from a region? This actually results in two loads; one
1762 // to fetch the address of the referenced value and one to fetch the
1763 // referenced value.
1764 if (const TypedRegion *TR =
1765 dyn_cast_or_null<TypedRegion>(location.getAsRegion())) {
1767 QualType ValTy = TR->getValueType();
1768 if (const ReferenceType *RT = ValTy->getAs<ReferenceType>()) {
1769 static int loadReferenceTag = 0;
1770 ExplodedNodeSet Tmp;
1771 evalLoadCommon(Tmp, Ex, Pred, state, location, &loadReferenceTag,
1772 getContext().getPointerType(RT->getPointeeType()));
1774 // Perform the load from the referenced value.
1775 for (ExplodedNodeSet::iterator I=Tmp.begin(), E=Tmp.end() ; I!=E; ++I) {
1776 state = GetState(*I);
1777 location = state->getSVal(Ex);
1778 evalLoadCommon(Dst, Ex, *I, state, location, tag, LoadTy);
1780 return;
1784 evalLoadCommon(Dst, Ex, Pred, state, location, tag, LoadTy);
1787 void ExprEngine::evalLoadCommon(ExplodedNodeSet& Dst, const Expr *Ex,
1788 ExplodedNode* Pred,
1789 const GRState* state, SVal location,
1790 const void *tag, QualType LoadTy) {
1792 // Evaluate the location (checks for bad dereferences).
1793 ExplodedNodeSet Tmp;
1794 evalLocation(Tmp, Ex, Pred, state, location, tag, true);
1796 if (Tmp.empty())
1797 return;
1799 assert(!location.isUndef());
1801 SaveAndRestore<ProgramPoint::Kind> OldSPointKind(Builder->PointKind);
1802 SaveAndRestore<const void*> OldTag(Builder->Tag);
1804 // Proceed with the load.
1805 for (ExplodedNodeSet::iterator NI=Tmp.begin(), NE=Tmp.end(); NI!=NE; ++NI) {
1806 state = GetState(*NI);
1808 if (location.isUnknown()) {
1809 // This is important. We must nuke the old binding.
1810 MakeNode(Dst, Ex, *NI, state->BindExpr(Ex, UnknownVal()),
1811 ProgramPoint::PostLoadKind, tag);
1813 else {
1814 if (LoadTy.isNull())
1815 LoadTy = Ex->getType();
1816 SVal V = state->getSVal(cast<Loc>(location), LoadTy);
1817 MakeNode(Dst, Ex, *NI, state->bindExprAndLocation(Ex, location, V),
1818 ProgramPoint::PostLoadKind, tag);
1823 void ExprEngine::evalLocation(ExplodedNodeSet &Dst, const Stmt *S,
1824 ExplodedNode* Pred,
1825 const GRState* state, SVal location,
1826 const void *tag, bool isLoad) {
1827 // Early checks for performance reason.
1828 if (location.isUnknown() || Checkers.empty()) {
1829 Dst.Add(Pred);
1830 return;
1833 ExplodedNodeSet Src, Tmp;
1834 Src.Add(Pred);
1835 ExplodedNodeSet *PrevSet = &Src;
1837 for (CheckersOrdered::iterator I=Checkers.begin(),E=Checkers.end(); I!=E; ++I)
1839 ExplodedNodeSet *CurrSet = 0;
1840 if (I+1 == E)
1841 CurrSet = &Dst;
1842 else {
1843 CurrSet = (PrevSet == &Tmp) ? &Src : &Tmp;
1844 CurrSet->clear();
1847 void *tag = I->first;
1848 Checker *checker = I->second;
1850 for (ExplodedNodeSet::iterator NI = PrevSet->begin(), NE = PrevSet->end();
1851 NI != NE; ++NI) {
1852 // Use the 'state' argument only when the predecessor node is the
1853 // same as Pred. This allows us to catch updates to the state.
1854 checker->GR_visitLocation(*CurrSet, *Builder, *this, S, *NI,
1855 *NI == Pred ? state : GetState(*NI),
1856 location, tag, isLoad);
1859 // Update which NodeSet is the current one.
1860 PrevSet = CurrSet;
1864 bool ExprEngine::InlineCall(ExplodedNodeSet &Dst, const CallExpr *CE,
1865 ExplodedNode *Pred) {
1866 const GRState *state = GetState(Pred);
1867 const Expr *Callee = CE->getCallee();
1868 SVal L = state->getSVal(Callee);
1870 const FunctionDecl *FD = L.getAsFunctionDecl();
1871 if (!FD)
1872 return false;
1874 // Check if the function definition is in the same translation unit.
1875 if (FD->hasBody(FD)) {
1876 const StackFrameContext *stackFrame =
1877 AMgr.getStackFrame(AMgr.getAnalysisContext(FD),
1878 Pred->getLocationContext(),
1879 CE, Builder->getBlock(), Builder->getIndex());
1880 // Now we have the definition of the callee, create a CallEnter node.
1881 CallEnter Loc(CE, stackFrame, Pred->getLocationContext());
1883 ExplodedNode *N = Builder->generateNode(Loc, state, Pred);
1884 Dst.Add(N);
1885 return true;
1888 // Check if we can find the function definition in other translation units.
1889 if (AMgr.hasIndexer()) {
1890 AnalysisContext *C = AMgr.getAnalysisContextInAnotherTU(FD);
1891 if (C == 0)
1892 return false;
1893 const StackFrameContext *stackFrame =
1894 AMgr.getStackFrame(C, Pred->getLocationContext(),
1895 CE, Builder->getBlock(), Builder->getIndex());
1896 CallEnter Loc(CE, stackFrame, Pred->getLocationContext());
1897 ExplodedNode *N = Builder->generateNode(Loc, state, Pred);
1898 Dst.Add(N);
1899 return true;
1902 return false;
1905 void ExprEngine::VisitCall(const CallExpr* CE, ExplodedNode* Pred,
1906 CallExpr::const_arg_iterator AI,
1907 CallExpr::const_arg_iterator AE,
1908 ExplodedNodeSet& Dst) {
1910 // Determine the type of function we're calling (if available).
1911 const FunctionProtoType *Proto = NULL;
1912 QualType FnType = CE->getCallee()->IgnoreParens()->getType();
1913 if (const PointerType *FnTypePtr = FnType->getAs<PointerType>())
1914 Proto = FnTypePtr->getPointeeType()->getAs<FunctionProtoType>();
1916 // Evaluate the arguments.
1917 ExplodedNodeSet ArgsEvaluated;
1918 evalArguments(CE->arg_begin(), CE->arg_end(), Proto, Pred, ArgsEvaluated);
1920 // Now process the call itself.
1921 ExplodedNodeSet DstTmp;
1922 const Expr* Callee = CE->getCallee()->IgnoreParens();
1924 for (ExplodedNodeSet::iterator NI=ArgsEvaluated.begin(),
1925 NE=ArgsEvaluated.end(); NI != NE; ++NI) {
1926 // Evaluate the callee.
1927 ExplodedNodeSet DstTmp2;
1928 Visit(Callee, *NI, DstTmp2);
1929 // Perform the previsit of the CallExpr, storing the results in DstTmp.
1930 CheckerVisit(CE, DstTmp, DstTmp2, PreVisitStmtCallback);
1933 // Finally, evaluate the function call. We try each of the checkers
1934 // to see if the can evaluate the function call.
1935 ExplodedNodeSet DstTmp3;
1937 for (ExplodedNodeSet::iterator DI = DstTmp.begin(), DE = DstTmp.end();
1938 DI != DE; ++DI) {
1940 const GRState* state = GetState(*DI);
1941 SVal L = state->getSVal(Callee);
1943 // FIXME: Add support for symbolic function calls (calls involving
1944 // function pointer values that are symbolic).
1945 SaveAndRestore<bool> OldSink(Builder->BuildSinks);
1946 ExplodedNodeSet DstChecker;
1948 // If the callee is processed by a checker, skip the rest logic.
1949 if (CheckerEvalCall(CE, DstChecker, *DI))
1950 DstTmp3.insert(DstChecker);
1951 else if (AMgr.shouldInlineCall() && InlineCall(Dst, CE, *DI)) {
1952 // Callee is inlined. We shouldn't do post call checking.
1953 return;
1955 else {
1956 for (ExplodedNodeSet::iterator DI_Checker = DstChecker.begin(),
1957 DE_Checker = DstChecker.end();
1958 DI_Checker != DE_Checker; ++DI_Checker) {
1960 // Dispatch to the plug-in transfer function.
1961 unsigned oldSize = DstTmp3.size();
1962 SaveOr OldHasGen(Builder->HasGeneratedNode);
1963 Pred = *DI_Checker;
1965 // Dispatch to transfer function logic to handle the call itself.
1966 // FIXME: Allow us to chain together transfer functions.
1967 assert(Builder && "StmtNodeBuilder must be defined.");
1968 getTF().evalCall(DstTmp3, *this, *Builder, CE, L, Pred);
1970 // Handle the case where no nodes where generated. Auto-generate that
1971 // contains the updated state if we aren't generating sinks.
1972 if (!Builder->BuildSinks && DstTmp3.size() == oldSize &&
1973 !Builder->HasGeneratedNode)
1974 MakeNode(DstTmp3, CE, Pred, state);
1979 // Finally, perform the post-condition check of the CallExpr and store
1980 // the created nodes in 'Dst'.
1981 CheckerVisit(CE, Dst, DstTmp3, PostVisitStmtCallback);
1984 //===----------------------------------------------------------------------===//
1985 // Transfer function: Objective-C ivar references.
1986 //===----------------------------------------------------------------------===//
1988 static std::pair<const void*,const void*> EagerlyAssumeTag
1989 = std::pair<const void*,const void*>(&EagerlyAssumeTag,static_cast<void*>(0));
1991 void ExprEngine::evalEagerlyAssume(ExplodedNodeSet &Dst, ExplodedNodeSet &Src,
1992 const Expr *Ex) {
1993 for (ExplodedNodeSet::iterator I=Src.begin(), E=Src.end(); I!=E; ++I) {
1994 ExplodedNode *Pred = *I;
1996 // Test if the previous node was as the same expression. This can happen
1997 // when the expression fails to evaluate to anything meaningful and
1998 // (as an optimization) we don't generate a node.
1999 ProgramPoint P = Pred->getLocation();
2000 if (!isa<PostStmt>(P) || cast<PostStmt>(P).getStmt() != Ex) {
2001 Dst.Add(Pred);
2002 continue;
2005 const GRState* state = GetState(Pred);
2006 SVal V = state->getSVal(Ex);
2007 if (nonloc::SymExprVal *SEV = dyn_cast<nonloc::SymExprVal>(&V)) {
2008 // First assume that the condition is true.
2009 if (const GRState *stateTrue = state->assume(*SEV, true)) {
2010 stateTrue = stateTrue->BindExpr(Ex,
2011 svalBuilder.makeIntVal(1U, Ex->getType()));
2012 Dst.Add(Builder->generateNode(PostStmtCustom(Ex,
2013 &EagerlyAssumeTag, Pred->getLocationContext()),
2014 stateTrue, Pred));
2017 // Next, assume that the condition is false.
2018 if (const GRState *stateFalse = state->assume(*SEV, false)) {
2019 stateFalse = stateFalse->BindExpr(Ex,
2020 svalBuilder.makeIntVal(0U, Ex->getType()));
2021 Dst.Add(Builder->generateNode(PostStmtCustom(Ex, &EagerlyAssumeTag,
2022 Pred->getLocationContext()),
2023 stateFalse, Pred));
2026 else
2027 Dst.Add(Pred);
2031 //===----------------------------------------------------------------------===//
2032 // Transfer function: Objective-C @synchronized.
2033 //===----------------------------------------------------------------------===//
2035 void ExprEngine::VisitObjCAtSynchronizedStmt(const ObjCAtSynchronizedStmt *S,
2036 ExplodedNode *Pred,
2037 ExplodedNodeSet &Dst) {
2039 // The mutex expression is a CFGElement, so we don't need to explicitly
2040 // visit it since it will already be processed.
2042 // Pre-visit the ObjCAtSynchronizedStmt.
2043 ExplodedNodeSet Tmp;
2044 Tmp.Add(Pred);
2045 CheckerVisit(S, Dst, Tmp, PreVisitStmtCallback);
2048 //===----------------------------------------------------------------------===//
2049 // Transfer function: Objective-C ivar references.
2050 //===----------------------------------------------------------------------===//
2052 void ExprEngine::VisitLvalObjCIvarRefExpr(const ObjCIvarRefExpr* Ex,
2053 ExplodedNode* Pred,
2054 ExplodedNodeSet& Dst) {
2056 // Visit the base expression, which is needed for computing the lvalue
2057 // of the ivar.
2058 ExplodedNodeSet dstBase;
2059 const Expr *baseExpr = Ex->getBase();
2060 Visit(baseExpr, Pred, dstBase);
2062 // Using the base, compute the lvalue of the instance variable.
2063 for (ExplodedNodeSet::iterator I = dstBase.begin(), E = dstBase.end();
2064 I!=E; ++I) {
2065 ExplodedNode *nodeBase = *I;
2066 const GRState *state = GetState(nodeBase);
2067 SVal baseVal = state->getSVal(baseExpr);
2068 SVal location = state->getLValue(Ex->getDecl(), baseVal);
2069 MakeNode(Dst, Ex, *I, state->BindExpr(Ex, location));
2073 //===----------------------------------------------------------------------===//
2074 // Transfer function: Objective-C fast enumeration 'for' statements.
2075 //===----------------------------------------------------------------------===//
2077 void ExprEngine::VisitObjCForCollectionStmt(const ObjCForCollectionStmt* S,
2078 ExplodedNode* Pred, ExplodedNodeSet& Dst) {
2080 // ObjCForCollectionStmts are processed in two places. This method
2081 // handles the case where an ObjCForCollectionStmt* occurs as one of the
2082 // statements within a basic block. This transfer function does two things:
2084 // (1) binds the next container value to 'element'. This creates a new
2085 // node in the ExplodedGraph.
2087 // (2) binds the value 0/1 to the ObjCForCollectionStmt* itself, indicating
2088 // whether or not the container has any more elements. This value
2089 // will be tested in ProcessBranch. We need to explicitly bind
2090 // this value because a container can contain nil elements.
2092 // FIXME: Eventually this logic should actually do dispatches to
2093 // 'countByEnumeratingWithState:objects:count:' (NSFastEnumeration).
2094 // This will require simulating a temporary NSFastEnumerationState, either
2095 // through an SVal or through the use of MemRegions. This value can
2096 // be affixed to the ObjCForCollectionStmt* instead of 0/1; when the loop
2097 // terminates we reclaim the temporary (it goes out of scope) and we
2098 // we can test if the SVal is 0 or if the MemRegion is null (depending
2099 // on what approach we take).
2101 // For now: simulate (1) by assigning either a symbol or nil if the
2102 // container is empty. Thus this transfer function will by default
2103 // result in state splitting.
2105 const Stmt* elem = S->getElement();
2106 SVal ElementV;
2108 if (const DeclStmt* DS = dyn_cast<DeclStmt>(elem)) {
2109 const VarDecl* ElemD = cast<VarDecl>(DS->getSingleDecl());
2110 assert (ElemD->getInit() == 0);
2111 ElementV = GetState(Pred)->getLValue(ElemD, Pred->getLocationContext());
2112 VisitObjCForCollectionStmtAux(S, Pred, Dst, ElementV);
2113 return;
2116 ExplodedNodeSet Tmp;
2117 Visit(cast<Expr>(elem), Pred, Tmp);
2118 for (ExplodedNodeSet::iterator I = Tmp.begin(), E = Tmp.end(); I!=E; ++I) {
2119 const GRState* state = GetState(*I);
2120 VisitObjCForCollectionStmtAux(S, *I, Dst, state->getSVal(elem));
2124 void ExprEngine::VisitObjCForCollectionStmtAux(const ObjCForCollectionStmt* S,
2125 ExplodedNode* Pred, ExplodedNodeSet& Dst,
2126 SVal ElementV) {
2128 // Check if the location we are writing back to is a null pointer.
2129 const Stmt* elem = S->getElement();
2130 ExplodedNodeSet Tmp;
2131 evalLocation(Tmp, elem, Pred, GetState(Pred), ElementV, NULL, false);
2133 if (Tmp.empty())
2134 return;
2136 for (ExplodedNodeSet::iterator NI=Tmp.begin(), NE=Tmp.end(); NI!=NE; ++NI) {
2137 Pred = *NI;
2138 const GRState *state = GetState(Pred);
2140 // Handle the case where the container still has elements.
2141 SVal TrueV = svalBuilder.makeTruthVal(1);
2142 const GRState *hasElems = state->BindExpr(S, TrueV);
2144 // Handle the case where the container has no elements.
2145 SVal FalseV = svalBuilder.makeTruthVal(0);
2146 const GRState *noElems = state->BindExpr(S, FalseV);
2148 if (loc::MemRegionVal* MV = dyn_cast<loc::MemRegionVal>(&ElementV))
2149 if (const TypedRegion* R = dyn_cast<TypedRegion>(MV->getRegion())) {
2150 // FIXME: The proper thing to do is to really iterate over the
2151 // container. We will do this with dispatch logic to the store.
2152 // For now, just 'conjure' up a symbolic value.
2153 QualType T = R->getValueType();
2154 assert(Loc::IsLocType(T));
2155 unsigned Count = Builder->getCurrentBlockCount();
2156 SymbolRef Sym = SymMgr.getConjuredSymbol(elem, T, Count);
2157 SVal V = svalBuilder.makeLoc(Sym);
2158 hasElems = hasElems->bindLoc(ElementV, V);
2160 // Bind the location to 'nil' on the false branch.
2161 SVal nilV = svalBuilder.makeIntVal(0, T);
2162 noElems = noElems->bindLoc(ElementV, nilV);
2165 // Create the new nodes.
2166 MakeNode(Dst, S, Pred, hasElems);
2167 MakeNode(Dst, S, Pred, noElems);
2171 //===----------------------------------------------------------------------===//
2172 // Transfer function: Objective-C message expressions.
2173 //===----------------------------------------------------------------------===//
2175 namespace {
2176 class ObjCMsgWLItem {
2177 public:
2178 ObjCMessageExpr::const_arg_iterator I;
2179 ExplodedNode *N;
2181 ObjCMsgWLItem(const ObjCMessageExpr::const_arg_iterator &i, ExplodedNode *n)
2182 : I(i), N(n) {}
2184 } // end anonymous namespace
2186 void ExprEngine::VisitObjCMessageExpr(const ObjCMessageExpr* ME,
2187 ExplodedNode* Pred,
2188 ExplodedNodeSet& Dst){
2190 // Create a worklist to process both the arguments.
2191 llvm::SmallVector<ObjCMsgWLItem, 20> WL;
2193 // But first evaluate the receiver (if any).
2194 ObjCMessageExpr::const_arg_iterator AI = ME->arg_begin(), AE = ME->arg_end();
2195 if (const Expr *Receiver = ME->getInstanceReceiver()) {
2196 ExplodedNodeSet Tmp;
2197 Visit(Receiver, Pred, Tmp);
2199 if (Tmp.empty())
2200 return;
2202 for (ExplodedNodeSet::iterator I=Tmp.begin(), E=Tmp.end(); I!=E; ++I)
2203 WL.push_back(ObjCMsgWLItem(AI, *I));
2205 else
2206 WL.push_back(ObjCMsgWLItem(AI, Pred));
2208 // Evaluate the arguments.
2209 ExplodedNodeSet ArgsEvaluated;
2210 while (!WL.empty()) {
2211 ObjCMsgWLItem Item = WL.back();
2212 WL.pop_back();
2214 if (Item.I == AE) {
2215 ArgsEvaluated.insert(Item.N);
2216 continue;
2219 // Evaluate the subexpression.
2220 ExplodedNodeSet Tmp;
2222 // FIXME: [Objective-C++] handle arguments that are references
2223 Visit(*Item.I, Item.N, Tmp);
2225 // Enqueue evaluating the next argument on the worklist.
2226 ++(Item.I);
2227 for (ExplodedNodeSet::iterator NI=Tmp.begin(), NE=Tmp.end(); NI!=NE; ++NI)
2228 WL.push_back(ObjCMsgWLItem(Item.I, *NI));
2231 // Now that the arguments are processed, handle the previsits checks.
2232 ExplodedNodeSet DstPrevisit;
2233 CheckerVisit(ME, DstPrevisit, ArgsEvaluated, PreVisitStmtCallback);
2235 // Proceed with evaluate the message expression.
2236 ExplodedNodeSet dstEval;
2238 for (ExplodedNodeSet::iterator DI = DstPrevisit.begin(),
2239 DE = DstPrevisit.end(); DI != DE; ++DI) {
2241 Pred = *DI;
2242 bool RaisesException = false;
2243 unsigned oldSize = dstEval.size();
2244 SaveAndRestore<bool> OldSink(Builder->BuildSinks);
2245 SaveOr OldHasGen(Builder->HasGeneratedNode);
2247 if (const Expr *Receiver = ME->getInstanceReceiver()) {
2248 const GRState *state = GetState(Pred);
2250 // Bifurcate the state into nil and non-nil ones.
2251 DefinedOrUnknownSVal receiverVal =
2252 cast<DefinedOrUnknownSVal>(state->getSVal(Receiver));
2254 const GRState *notNilState, *nilState;
2255 llvm::tie(notNilState, nilState) = state->assume(receiverVal);
2257 // There are three cases: can be nil or non-nil, must be nil, must be
2258 // non-nil. We handle must be nil, and merge the rest two into non-nil.
2259 if (nilState && !notNilState) {
2260 CheckerEvalNilReceiver(ME, dstEval, nilState, Pred);
2261 continue;
2264 // Check if the "raise" message was sent.
2265 assert(notNilState);
2266 if (ME->getSelector() == RaiseSel)
2267 RaisesException = true;
2269 // Check if we raise an exception. For now treat these as sinks.
2270 // Eventually we will want to handle exceptions properly.
2271 if (RaisesException)
2272 Builder->BuildSinks = true;
2274 // Dispatch to plug-in transfer function.
2275 evalObjCMessageExpr(dstEval, ME, Pred, notNilState);
2277 else if (ObjCInterfaceDecl *Iface = ME->getReceiverInterface()) {
2278 IdentifierInfo* ClsName = Iface->getIdentifier();
2279 Selector S = ME->getSelector();
2281 // Check for special instance methods.
2282 if (!NSExceptionII) {
2283 ASTContext& Ctx = getContext();
2284 NSExceptionII = &Ctx.Idents.get("NSException");
2287 if (ClsName == NSExceptionII) {
2288 enum { NUM_RAISE_SELECTORS = 2 };
2290 // Lazily create a cache of the selectors.
2291 if (!NSExceptionInstanceRaiseSelectors) {
2292 ASTContext& Ctx = getContext();
2293 NSExceptionInstanceRaiseSelectors =
2294 new Selector[NUM_RAISE_SELECTORS];
2295 llvm::SmallVector<IdentifierInfo*, NUM_RAISE_SELECTORS> II;
2296 unsigned idx = 0;
2298 // raise:format:
2299 II.push_back(&Ctx.Idents.get("raise"));
2300 II.push_back(&Ctx.Idents.get("format"));
2301 NSExceptionInstanceRaiseSelectors[idx++] =
2302 Ctx.Selectors.getSelector(II.size(), &II[0]);
2304 // raise:format::arguments:
2305 II.push_back(&Ctx.Idents.get("arguments"));
2306 NSExceptionInstanceRaiseSelectors[idx++] =
2307 Ctx.Selectors.getSelector(II.size(), &II[0]);
2310 for (unsigned i = 0; i < NUM_RAISE_SELECTORS; ++i)
2311 if (S == NSExceptionInstanceRaiseSelectors[i]) {
2312 RaisesException = true;
2313 break;
2317 // Check if we raise an exception. For now treat these as sinks.
2318 // Eventually we will want to handle exceptions properly.
2319 if (RaisesException)
2320 Builder->BuildSinks = true;
2322 // Dispatch to plug-in transfer function.
2323 evalObjCMessageExpr(dstEval, ME, Pred, Builder->GetState(Pred));
2326 // Handle the case where no nodes where generated. Auto-generate that
2327 // contains the updated state if we aren't generating sinks.
2328 if (!Builder->BuildSinks && dstEval.size() == oldSize &&
2329 !Builder->HasGeneratedNode)
2330 MakeNode(dstEval, ME, Pred, GetState(Pred));
2333 // Finally, perform the post-condition check of the ObjCMessageExpr and store
2334 // the created nodes in 'Dst'.
2335 CheckerVisit(ME, Dst, dstEval, PostVisitStmtCallback);
2338 //===----------------------------------------------------------------------===//
2339 // Transfer functions: Miscellaneous statements.
2340 //===----------------------------------------------------------------------===//
2342 void ExprEngine::VisitCast(const CastExpr *CastE, const Expr *Ex,
2343 ExplodedNode *Pred, ExplodedNodeSet &Dst) {
2345 ExplodedNodeSet S1;
2346 Visit(Ex, Pred, S1);
2347 ExplodedNodeSet S2;
2348 CheckerVisit(CastE, S2, S1, PreVisitStmtCallback);
2350 if (CastE->getCastKind() == CK_LValueToRValue) {
2351 for (ExplodedNodeSet::iterator I = S2.begin(), E = S2.end(); I!=E; ++I) {
2352 ExplodedNode *subExprNode = *I;
2353 const GRState *state = GetState(subExprNode);
2354 evalLoad(Dst, CastE, subExprNode, state, state->getSVal(Ex));
2356 return;
2359 // All other casts.
2360 QualType T = CastE->getType();
2361 QualType ExTy = Ex->getType();
2363 if (const ExplicitCastExpr *ExCast=dyn_cast_or_null<ExplicitCastExpr>(CastE))
2364 T = ExCast->getTypeAsWritten();
2366 #if 0
2367 // If we are evaluating the cast in an lvalue context, we implicitly want
2368 // the cast to evaluate to a location.
2369 if (asLValue) {
2370 ASTContext &Ctx = getContext();
2371 T = Ctx.getPointerType(Ctx.getCanonicalType(T));
2372 ExTy = Ctx.getPointerType(Ctx.getCanonicalType(ExTy));
2374 #endif
2376 switch (CastE->getCastKind()) {
2377 case CK_ToVoid:
2378 for (ExplodedNodeSet::iterator I = S2.begin(), E = S2.end(); I != E; ++I)
2379 Dst.Add(*I);
2380 return;
2382 case CK_LValueToRValue:
2383 case CK_NoOp:
2384 case CK_FunctionToPointerDecay:
2385 for (ExplodedNodeSet::iterator I = S2.begin(), E = S2.end(); I != E; ++I) {
2386 // Copy the SVal of Ex to CastE.
2387 ExplodedNode *N = *I;
2388 const GRState *state = GetState(N);
2389 SVal V = state->getSVal(Ex);
2390 state = state->BindExpr(CastE, V);
2391 MakeNode(Dst, CastE, N, state);
2393 return;
2395 case CK_GetObjCProperty:
2396 case CK_Dependent:
2397 case CK_ArrayToPointerDecay:
2398 case CK_BitCast:
2399 case CK_LValueBitCast:
2400 case CK_IntegralCast:
2401 case CK_NullToPointer:
2402 case CK_IntegralToPointer:
2403 case CK_PointerToIntegral:
2404 case CK_PointerToBoolean:
2405 case CK_IntegralToBoolean:
2406 case CK_IntegralToFloating:
2407 case CK_FloatingToIntegral:
2408 case CK_FloatingToBoolean:
2409 case CK_FloatingCast:
2410 case CK_FloatingRealToComplex:
2411 case CK_FloatingComplexToReal:
2412 case CK_FloatingComplexToBoolean:
2413 case CK_FloatingComplexCast:
2414 case CK_FloatingComplexToIntegralComplex:
2415 case CK_IntegralRealToComplex:
2416 case CK_IntegralComplexToReal:
2417 case CK_IntegralComplexToBoolean:
2418 case CK_IntegralComplexCast:
2419 case CK_IntegralComplexToFloatingComplex:
2420 case CK_AnyPointerToObjCPointerCast:
2421 case CK_AnyPointerToBlockPointerCast:
2423 case CK_ObjCObjectLValueCast: {
2424 // Delegate to SValBuilder to process.
2425 for (ExplodedNodeSet::iterator I = S2.begin(), E = S2.end(); I != E; ++I) {
2426 ExplodedNode* N = *I;
2427 const GRState* state = GetState(N);
2428 SVal V = state->getSVal(Ex);
2429 V = svalBuilder.evalCast(V, T, ExTy);
2430 state = state->BindExpr(CastE, V);
2431 MakeNode(Dst, CastE, N, state);
2433 return;
2436 case CK_DerivedToBase:
2437 case CK_UncheckedDerivedToBase:
2438 // For DerivedToBase cast, delegate to the store manager.
2439 for (ExplodedNodeSet::iterator I = S2.begin(), E = S2.end(); I != E; ++I) {
2440 ExplodedNode *node = *I;
2441 const GRState *state = GetState(node);
2442 SVal val = state->getSVal(Ex);
2443 val = getStoreManager().evalDerivedToBase(val, T);
2444 state = state->BindExpr(CastE, val);
2445 MakeNode(Dst, CastE, node, state);
2447 return;
2449 // Various C++ casts that are not handled yet.
2450 case CK_Dynamic:
2451 case CK_ToUnion:
2452 case CK_BaseToDerived:
2453 case CK_NullToMemberPointer:
2454 case CK_BaseToDerivedMemberPointer:
2455 case CK_DerivedToBaseMemberPointer:
2456 case CK_UserDefinedConversion:
2457 case CK_ConstructorConversion:
2458 case CK_VectorSplat:
2459 case CK_MemberPointerToBoolean: {
2460 SaveAndRestore<bool> OldSink(Builder->BuildSinks);
2461 Builder->BuildSinks = true;
2462 MakeNode(Dst, CastE, Pred, GetState(Pred));
2463 return;
2468 void ExprEngine::VisitCompoundLiteralExpr(const CompoundLiteralExpr* CL,
2469 ExplodedNode* Pred,
2470 ExplodedNodeSet& Dst) {
2471 const InitListExpr* ILE
2472 = cast<InitListExpr>(CL->getInitializer()->IgnoreParens());
2473 ExplodedNodeSet Tmp;
2474 Visit(ILE, Pred, Tmp);
2476 for (ExplodedNodeSet::iterator I = Tmp.begin(), EI = Tmp.end(); I!=EI; ++I) {
2477 const GRState* state = GetState(*I);
2478 SVal ILV = state->getSVal(ILE);
2479 const LocationContext *LC = (*I)->getLocationContext();
2480 state = state->bindCompoundLiteral(CL, LC, ILV);
2482 if (CL->isLValue()) {
2483 MakeNode(Dst, CL, *I, state->BindExpr(CL, state->getLValue(CL, LC)));
2485 else
2486 MakeNode(Dst, CL, *I, state->BindExpr(CL, ILV));
2490 void ExprEngine::VisitDeclStmt(const DeclStmt *DS, ExplodedNode *Pred,
2491 ExplodedNodeSet& Dst) {
2493 // The CFG has one DeclStmt per Decl.
2494 const Decl* D = *DS->decl_begin();
2496 if (!D || !isa<VarDecl>(D))
2497 return;
2499 const VarDecl* VD = dyn_cast<VarDecl>(D);
2500 const Expr* InitEx = VD->getInit();
2502 // FIXME: static variables may have an initializer, but the second
2503 // time a function is called those values may not be current.
2504 ExplodedNodeSet Tmp;
2506 if (InitEx) {
2507 if (VD->getType()->isReferenceType() && !InitEx->isLValue()) {
2508 // If the initializer is C++ record type, it should already has a
2509 // temp object.
2510 if (!InitEx->getType()->isRecordType())
2511 CreateCXXTemporaryObject(InitEx, Pred, Tmp);
2512 else
2513 Tmp.Add(Pred);
2514 } else
2515 Visit(InitEx, Pred, Tmp);
2516 } else
2517 Tmp.Add(Pred);
2519 ExplodedNodeSet Tmp2;
2520 CheckerVisit(DS, Tmp2, Tmp, PreVisitStmtCallback);
2522 for (ExplodedNodeSet::iterator I=Tmp2.begin(), E=Tmp2.end(); I!=E; ++I) {
2523 ExplodedNode *N = *I;
2524 const GRState *state = GetState(N);
2526 // Decls without InitExpr are not initialized explicitly.
2527 const LocationContext *LC = N->getLocationContext();
2529 if (InitEx) {
2530 SVal InitVal = state->getSVal(InitEx);
2532 // We bound the temp obj region to the CXXConstructExpr. Now recover
2533 // the lazy compound value when the variable is not a reference.
2534 if (AMgr.getLangOptions().CPlusPlus && VD->getType()->isRecordType() &&
2535 !VD->getType()->isReferenceType() && isa<loc::MemRegionVal>(InitVal)){
2536 InitVal = state->getSVal(cast<loc::MemRegionVal>(InitVal).getRegion());
2537 assert(isa<nonloc::LazyCompoundVal>(InitVal));
2540 // Recover some path-sensitivity if a scalar value evaluated to
2541 // UnknownVal.
2542 if ((InitVal.isUnknown() ||
2543 !getConstraintManager().canReasonAbout(InitVal)) &&
2544 !VD->getType()->isReferenceType()) {
2545 InitVal = svalBuilder.getConjuredSymbolVal(NULL, InitEx,
2546 Builder->getCurrentBlockCount());
2549 evalBind(Dst, DS, *I, state,
2550 loc::MemRegionVal(state->getRegion(VD, LC)), InitVal, true);
2552 else {
2553 state = state->bindDeclWithNoInit(state->getRegion(VD, LC));
2554 MakeNode(Dst, DS, *I, state);
2559 void ExprEngine::VisitCondInit(const VarDecl *VD, const Stmt *S,
2560 ExplodedNode *Pred, ExplodedNodeSet& Dst) {
2562 const Expr* InitEx = VD->getInit();
2563 ExplodedNodeSet Tmp;
2564 Visit(InitEx, Pred, Tmp);
2566 for (ExplodedNodeSet::iterator I=Tmp.begin(), E=Tmp.end(); I!=E; ++I) {
2567 ExplodedNode *N = *I;
2568 const GRState *state = GetState(N);
2570 const LocationContext *LC = N->getLocationContext();
2571 SVal InitVal = state->getSVal(InitEx);
2573 // Recover some path-sensitivity if a scalar value evaluated to
2574 // UnknownVal.
2575 if (InitVal.isUnknown() ||
2576 !getConstraintManager().canReasonAbout(InitVal)) {
2577 InitVal = svalBuilder.getConjuredSymbolVal(NULL, InitEx,
2578 Builder->getCurrentBlockCount());
2581 evalBind(Dst, S, N, state,
2582 loc::MemRegionVal(state->getRegion(VD, LC)), InitVal, true);
2586 namespace {
2587 // This class is used by VisitInitListExpr as an item in a worklist
2588 // for processing the values contained in an InitListExpr.
2589 class InitListWLItem {
2590 public:
2591 llvm::ImmutableList<SVal> Vals;
2592 ExplodedNode* N;
2593 InitListExpr::const_reverse_iterator Itr;
2595 InitListWLItem(ExplodedNode* n, llvm::ImmutableList<SVal> vals,
2596 InitListExpr::const_reverse_iterator itr)
2597 : Vals(vals), N(n), Itr(itr) {}
2602 void ExprEngine::VisitInitListExpr(const InitListExpr* E, ExplodedNode* Pred,
2603 ExplodedNodeSet& Dst) {
2605 const GRState* state = GetState(Pred);
2606 QualType T = getContext().getCanonicalType(E->getType());
2607 unsigned NumInitElements = E->getNumInits();
2609 if (T->isArrayType() || T->isRecordType() || T->isVectorType()) {
2610 llvm::ImmutableList<SVal> StartVals = getBasicVals().getEmptySValList();
2612 // Handle base case where the initializer has no elements.
2613 // e.g: static int* myArray[] = {};
2614 if (NumInitElements == 0) {
2615 SVal V = svalBuilder.makeCompoundVal(T, StartVals);
2616 MakeNode(Dst, E, Pred, state->BindExpr(E, V));
2617 return;
2620 // Create a worklist to process the initializers.
2621 llvm::SmallVector<InitListWLItem, 10> WorkList;
2622 WorkList.reserve(NumInitElements);
2623 WorkList.push_back(InitListWLItem(Pred, StartVals, E->rbegin()));
2624 InitListExpr::const_reverse_iterator ItrEnd = E->rend();
2625 assert(!(E->rbegin() == E->rend()));
2627 // Process the worklist until it is empty.
2628 while (!WorkList.empty()) {
2629 InitListWLItem X = WorkList.back();
2630 WorkList.pop_back();
2632 ExplodedNodeSet Tmp;
2633 Visit(*X.Itr, X.N, Tmp);
2635 InitListExpr::const_reverse_iterator NewItr = X.Itr + 1;
2637 for (ExplodedNodeSet::iterator NI=Tmp.begin(),NE=Tmp.end();NI!=NE;++NI) {
2638 // Get the last initializer value.
2639 state = GetState(*NI);
2640 SVal InitV = state->getSVal(cast<Expr>(*X.Itr));
2642 // Construct the new list of values by prepending the new value to
2643 // the already constructed list.
2644 llvm::ImmutableList<SVal> NewVals =
2645 getBasicVals().consVals(InitV, X.Vals);
2647 if (NewItr == ItrEnd) {
2648 // Now we have a list holding all init values. Make CompoundValData.
2649 SVal V = svalBuilder.makeCompoundVal(T, NewVals);
2651 // Make final state and node.
2652 MakeNode(Dst, E, *NI, state->BindExpr(E, V));
2654 else {
2655 // Still some initializer values to go. Push them onto the worklist.
2656 WorkList.push_back(InitListWLItem(*NI, NewVals, NewItr));
2661 return;
2664 if (Loc::IsLocType(T) || T->isIntegerType()) {
2665 assert (E->getNumInits() == 1);
2666 ExplodedNodeSet Tmp;
2667 const Expr* Init = E->getInit(0);
2668 Visit(Init, Pred, Tmp);
2669 for (ExplodedNodeSet::iterator I=Tmp.begin(), EI=Tmp.end(); I != EI; ++I) {
2670 state = GetState(*I);
2671 MakeNode(Dst, E, *I, state->BindExpr(E, state->getSVal(Init)));
2673 return;
2676 assert(0 && "unprocessed InitListExpr type");
2679 /// VisitSizeOfAlignOfExpr - Transfer function for sizeof(type).
2680 void ExprEngine::VisitSizeOfAlignOfExpr(const SizeOfAlignOfExpr* Ex,
2681 ExplodedNode* Pred,
2682 ExplodedNodeSet& Dst) {
2683 QualType T = Ex->getTypeOfArgument();
2684 CharUnits amt;
2686 if (Ex->isSizeOf()) {
2687 if (T == getContext().VoidTy) {
2688 // sizeof(void) == 1 byte.
2689 amt = CharUnits::One();
2691 else if (!T->isConstantSizeType()) {
2692 assert(T->isVariableArrayType() && "Unknown non-constant-sized type.");
2694 // FIXME: Add support for VLA type arguments, not just VLA expressions.
2695 // When that happens, we should probably refactor VLASizeChecker's code.
2696 if (Ex->isArgumentType()) {
2697 Dst.Add(Pred);
2698 return;
2701 // Get the size by getting the extent of the sub-expression.
2702 // First, visit the sub-expression to find its region.
2703 const Expr *Arg = Ex->getArgumentExpr();
2704 ExplodedNodeSet Tmp;
2705 Visit(Arg, Pred, Tmp);
2707 for (ExplodedNodeSet::iterator I=Tmp.begin(), E=Tmp.end(); I!=E; ++I) {
2708 const GRState* state = GetState(*I);
2709 const MemRegion *MR = state->getSVal(Arg).getAsRegion();
2711 // If the subexpression can't be resolved to a region, we don't know
2712 // anything about its size. Just leave the state as is and continue.
2713 if (!MR) {
2714 Dst.Add(*I);
2715 continue;
2718 // The result is the extent of the VLA.
2719 SVal Extent = cast<SubRegion>(MR)->getExtent(svalBuilder);
2720 MakeNode(Dst, Ex, *I, state->BindExpr(Ex, Extent));
2723 return;
2725 else if (T->getAs<ObjCObjectType>()) {
2726 // Some code tries to take the sizeof an ObjCObjectType, relying that
2727 // the compiler has laid out its representation. Just report Unknown
2728 // for these.
2729 Dst.Add(Pred);
2730 return;
2732 else {
2733 // All other cases.
2734 amt = getContext().getTypeSizeInChars(T);
2737 else // Get alignment of the type.
2738 amt = getContext().getTypeAlignInChars(T);
2740 MakeNode(Dst, Ex, Pred,
2741 GetState(Pred)->BindExpr(Ex,
2742 svalBuilder.makeIntVal(amt.getQuantity(), Ex->getType())));
2745 void ExprEngine::VisitOffsetOfExpr(const OffsetOfExpr* OOE,
2746 ExplodedNode* Pred, ExplodedNodeSet& Dst) {
2747 Expr::EvalResult Res;
2748 if (OOE->Evaluate(Res, getContext()) && Res.Val.isInt()) {
2749 const APSInt &IV = Res.Val.getInt();
2750 assert(IV.getBitWidth() == getContext().getTypeSize(OOE->getType()));
2751 assert(OOE->getType()->isIntegerType());
2752 assert(IV.isSigned() == OOE->getType()->isSignedIntegerType());
2753 SVal X = svalBuilder.makeIntVal(IV);
2754 MakeNode(Dst, OOE, Pred, GetState(Pred)->BindExpr(OOE, X));
2755 return;
2757 // FIXME: Handle the case where __builtin_offsetof is not a constant.
2758 Dst.Add(Pred);
2761 void ExprEngine::VisitUnaryOperator(const UnaryOperator* U,
2762 ExplodedNode* Pred,
2763 ExplodedNodeSet& Dst) {
2765 switch (U->getOpcode()) {
2767 default:
2768 break;
2770 case UO_Real: {
2771 const Expr* Ex = U->getSubExpr()->IgnoreParens();
2772 ExplodedNodeSet Tmp;
2773 Visit(Ex, Pred, Tmp);
2775 for (ExplodedNodeSet::iterator I=Tmp.begin(), E=Tmp.end(); I!=E; ++I) {
2777 // FIXME: We don't have complex SValues yet.
2778 if (Ex->getType()->isAnyComplexType()) {
2779 // Just report "Unknown."
2780 Dst.Add(*I);
2781 continue;
2784 // For all other types, UO_Real is an identity operation.
2785 assert (U->getType() == Ex->getType());
2786 const GRState* state = GetState(*I);
2787 MakeNode(Dst, U, *I, state->BindExpr(U, state->getSVal(Ex)));
2790 return;
2793 case UO_Imag: {
2795 const Expr* Ex = U->getSubExpr()->IgnoreParens();
2796 ExplodedNodeSet Tmp;
2797 Visit(Ex, Pred, Tmp);
2799 for (ExplodedNodeSet::iterator I=Tmp.begin(), E=Tmp.end(); I!=E; ++I) {
2800 // FIXME: We don't have complex SValues yet.
2801 if (Ex->getType()->isAnyComplexType()) {
2802 // Just report "Unknown."
2803 Dst.Add(*I);
2804 continue;
2807 // For all other types, UO_Imag returns 0.
2808 const GRState* state = GetState(*I);
2809 SVal X = svalBuilder.makeZeroVal(Ex->getType());
2810 MakeNode(Dst, U, *I, state->BindExpr(U, X));
2813 return;
2816 case UO_Plus:
2817 assert(!U->isLValue());
2818 // FALL-THROUGH.
2819 case UO_Deref:
2820 case UO_AddrOf:
2821 case UO_Extension: {
2823 // Unary "+" is a no-op, similar to a parentheses. We still have places
2824 // where it may be a block-level expression, so we need to
2825 // generate an extra node that just propagates the value of the
2826 // subexpression.
2828 const Expr* Ex = U->getSubExpr()->IgnoreParens();
2829 ExplodedNodeSet Tmp;
2830 Visit(Ex, Pred, Tmp);
2832 for (ExplodedNodeSet::iterator I=Tmp.begin(), E=Tmp.end(); I!=E; ++I) {
2833 const GRState* state = GetState(*I);
2834 MakeNode(Dst, U, *I, state->BindExpr(U, state->getSVal(Ex)));
2837 return;
2840 case UO_LNot:
2841 case UO_Minus:
2842 case UO_Not: {
2843 assert (!U->isLValue());
2844 const Expr* Ex = U->getSubExpr()->IgnoreParens();
2845 ExplodedNodeSet Tmp;
2846 Visit(Ex, Pred, Tmp);
2848 for (ExplodedNodeSet::iterator I=Tmp.begin(), E=Tmp.end(); I!=E; ++I) {
2849 const GRState* state = GetState(*I);
2851 // Get the value of the subexpression.
2852 SVal V = state->getSVal(Ex);
2854 if (V.isUnknownOrUndef()) {
2855 MakeNode(Dst, U, *I, state->BindExpr(U, V));
2856 continue;
2859 // QualType DstT = getContext().getCanonicalType(U->getType());
2860 // QualType SrcT = getContext().getCanonicalType(Ex->getType());
2862 // if (DstT != SrcT) // Perform promotions.
2863 // V = evalCast(V, DstT);
2865 // if (V.isUnknownOrUndef()) {
2866 // MakeNode(Dst, U, *I, BindExpr(St, U, V));
2867 // continue;
2868 // }
2870 switch (U->getOpcode()) {
2871 default:
2872 assert(false && "Invalid Opcode.");
2873 break;
2875 case UO_Not:
2876 // FIXME: Do we need to handle promotions?
2877 state = state->BindExpr(U, evalComplement(cast<NonLoc>(V)));
2878 break;
2880 case UO_Minus:
2881 // FIXME: Do we need to handle promotions?
2882 state = state->BindExpr(U, evalMinus(cast<NonLoc>(V)));
2883 break;
2885 case UO_LNot:
2887 // C99 6.5.3.3: "The expression !E is equivalent to (0==E)."
2889 // Note: technically we do "E == 0", but this is the same in the
2890 // transfer functions as "0 == E".
2891 SVal Result;
2893 if (isa<Loc>(V)) {
2894 Loc X = svalBuilder.makeNull();
2895 Result = evalBinOp(state, BO_EQ, cast<Loc>(V), X,
2896 U->getType());
2898 else {
2899 nonloc::ConcreteInt X(getBasicVals().getValue(0, Ex->getType()));
2900 Result = evalBinOp(state, BO_EQ, cast<NonLoc>(V), X,
2901 U->getType());
2904 state = state->BindExpr(U, Result);
2906 break;
2909 MakeNode(Dst, U, *I, state);
2912 return;
2916 // Handle ++ and -- (both pre- and post-increment).
2917 assert (U->isIncrementDecrementOp());
2918 ExplodedNodeSet Tmp;
2919 const Expr* Ex = U->getSubExpr()->IgnoreParens();
2920 Visit(Ex, Pred, Tmp);
2922 for (ExplodedNodeSet::iterator I = Tmp.begin(), E = Tmp.end(); I!=E; ++I) {
2924 const GRState* state = GetState(*I);
2925 SVal loc = state->getSVal(Ex);
2927 // Perform a load.
2928 ExplodedNodeSet Tmp2;
2929 evalLoad(Tmp2, Ex, *I, state, loc);
2931 for (ExplodedNodeSet::iterator I2=Tmp2.begin(), E2=Tmp2.end();I2!=E2;++I2) {
2933 state = GetState(*I2);
2934 SVal V2_untested = state->getSVal(Ex);
2936 // Propagate unknown and undefined values.
2937 if (V2_untested.isUnknownOrUndef()) {
2938 MakeNode(Dst, U, *I2, state->BindExpr(U, V2_untested));
2939 continue;
2941 DefinedSVal V2 = cast<DefinedSVal>(V2_untested);
2943 // Handle all other values.
2944 BinaryOperator::Opcode Op = U->isIncrementOp() ? BO_Add
2945 : BO_Sub;
2947 // If the UnaryOperator has non-location type, use its type to create the
2948 // constant value. If the UnaryOperator has location type, create the
2949 // constant with int type and pointer width.
2950 SVal RHS;
2952 if (U->getType()->isAnyPointerType())
2953 RHS = svalBuilder.makeIntValWithPtrWidth(1, false);
2954 else
2955 RHS = svalBuilder.makeIntVal(1, U->getType());
2957 SVal Result = evalBinOp(state, Op, V2, RHS, U->getType());
2959 // Conjure a new symbol if necessary to recover precision.
2960 if (Result.isUnknown() || !getConstraintManager().canReasonAbout(Result)){
2961 DefinedOrUnknownSVal SymVal =
2962 svalBuilder.getConjuredSymbolVal(NULL, Ex,
2963 Builder->getCurrentBlockCount());
2964 Result = SymVal;
2966 // If the value is a location, ++/-- should always preserve
2967 // non-nullness. Check if the original value was non-null, and if so
2968 // propagate that constraint.
2969 if (Loc::IsLocType(U->getType())) {
2970 DefinedOrUnknownSVal Constraint =
2971 svalBuilder.evalEQ(state, V2,svalBuilder.makeZeroVal(U->getType()));
2973 if (!state->assume(Constraint, true)) {
2974 // It isn't feasible for the original value to be null.
2975 // Propagate this constraint.
2976 Constraint = svalBuilder.evalEQ(state, SymVal,
2977 svalBuilder.makeZeroVal(U->getType()));
2980 state = state->assume(Constraint, false);
2981 assert(state);
2986 // Since the lvalue-to-rvalue conversion is explicit in the AST,
2987 // we bind an l-value if the operator is prefix and an lvalue (in C++).
2988 if (U->isPrefix() && U->isLValue())
2989 state = state->BindExpr(U, loc);
2990 else
2991 state = state->BindExpr(U, V2);
2993 // Perform the store.
2994 evalStore(Dst, NULL, U, *I2, state, loc, Result);
2999 void ExprEngine::VisitAsmStmt(const AsmStmt* A, ExplodedNode* Pred,
3000 ExplodedNodeSet& Dst) {
3001 VisitAsmStmtHelperOutputs(A, A->begin_outputs(), A->end_outputs(), Pred, Dst);
3004 void ExprEngine::VisitAsmStmtHelperOutputs(const AsmStmt* A,
3005 AsmStmt::const_outputs_iterator I,
3006 AsmStmt::const_outputs_iterator E,
3007 ExplodedNode* Pred, ExplodedNodeSet& Dst) {
3008 if (I == E) {
3009 VisitAsmStmtHelperInputs(A, A->begin_inputs(), A->end_inputs(), Pred, Dst);
3010 return;
3013 ExplodedNodeSet Tmp;
3014 Visit(*I, Pred, Tmp);
3015 ++I;
3017 for (ExplodedNodeSet::iterator NI = Tmp.begin(), NE = Tmp.end();NI != NE;++NI)
3018 VisitAsmStmtHelperOutputs(A, I, E, *NI, Dst);
3021 void ExprEngine::VisitAsmStmtHelperInputs(const AsmStmt* A,
3022 AsmStmt::const_inputs_iterator I,
3023 AsmStmt::const_inputs_iterator E,
3024 ExplodedNode* Pred,
3025 ExplodedNodeSet& Dst) {
3026 if (I == E) {
3028 // We have processed both the inputs and the outputs. All of the outputs
3029 // should evaluate to Locs. Nuke all of their values.
3031 // FIXME: Some day in the future it would be nice to allow a "plug-in"
3032 // which interprets the inline asm and stores proper results in the
3033 // outputs.
3035 const GRState* state = GetState(Pred);
3037 for (AsmStmt::const_outputs_iterator OI = A->begin_outputs(),
3038 OE = A->end_outputs(); OI != OE; ++OI) {
3040 SVal X = state->getSVal(*OI);
3041 assert (!isa<NonLoc>(X)); // Should be an Lval, or unknown, undef.
3043 if (isa<Loc>(X))
3044 state = state->bindLoc(cast<Loc>(X), UnknownVal());
3047 MakeNode(Dst, A, Pred, state);
3048 return;
3051 ExplodedNodeSet Tmp;
3052 Visit(*I, Pred, Tmp);
3054 ++I;
3056 for (ExplodedNodeSet::iterator NI = Tmp.begin(), NE = Tmp.end(); NI!=NE; ++NI)
3057 VisitAsmStmtHelperInputs(A, I, E, *NI, Dst);
3060 void ExprEngine::VisitReturnStmt(const ReturnStmt *RS, ExplodedNode *Pred,
3061 ExplodedNodeSet &Dst) {
3062 ExplodedNodeSet Src;
3063 if (const Expr *RetE = RS->getRetValue()) {
3064 // Record the returned expression in the state. It will be used in
3065 // ProcessCallExit to bind the return value to the call expr.
3067 static int Tag = 0;
3068 SaveAndRestore<const void *> OldTag(Builder->Tag, &Tag);
3069 const GRState *state = GetState(Pred);
3070 state = state->set<ReturnExpr>(RetE);
3071 Pred = Builder->generateNode(RetE, state, Pred);
3073 // We may get a NULL Pred because we generated a cached node.
3074 if (Pred)
3075 Visit(RetE, Pred, Src);
3077 else {
3078 Src.Add(Pred);
3081 ExplodedNodeSet CheckedSet;
3082 CheckerVisit(RS, CheckedSet, Src, PreVisitStmtCallback);
3084 for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end();
3085 I != E; ++I) {
3087 assert(Builder && "StmtNodeBuilder must be defined.");
3089 Pred = *I;
3090 unsigned size = Dst.size();
3092 SaveAndRestore<bool> OldSink(Builder->BuildSinks);
3093 SaveOr OldHasGen(Builder->HasGeneratedNode);
3095 getTF().evalReturn(Dst, *this, *Builder, RS, Pred);
3097 // Handle the case where no nodes where generated.
3098 if (!Builder->BuildSinks && Dst.size() == size &&
3099 !Builder->HasGeneratedNode)
3100 MakeNode(Dst, RS, Pred, GetState(Pred));
3104 //===----------------------------------------------------------------------===//
3105 // Transfer functions: Binary operators.
3106 //===----------------------------------------------------------------------===//
3108 void ExprEngine::VisitBinaryOperator(const BinaryOperator* B,
3109 ExplodedNode* Pred,
3110 ExplodedNodeSet& Dst) {
3111 ExplodedNodeSet Tmp1;
3112 Expr* LHS = B->getLHS()->IgnoreParens();
3113 Expr* RHS = B->getRHS()->IgnoreParens();
3115 Visit(LHS, Pred, Tmp1);
3116 ExplodedNodeSet Tmp3;
3118 for (ExplodedNodeSet::iterator I1=Tmp1.begin(), E1=Tmp1.end(); I1!=E1; ++I1) {
3119 SVal LeftV = GetState(*I1)->getSVal(LHS);
3120 ExplodedNodeSet Tmp2;
3121 Visit(RHS, *I1, Tmp2);
3123 ExplodedNodeSet CheckedSet;
3124 CheckerVisit(B, CheckedSet, Tmp2, PreVisitStmtCallback);
3126 // With both the LHS and RHS evaluated, process the operation itself.
3128 for (ExplodedNodeSet::iterator I2=CheckedSet.begin(), E2=CheckedSet.end();
3129 I2 != E2; ++I2) {
3131 const GRState *state = GetState(*I2);
3132 SVal RightV = state->getSVal(RHS);
3134 BinaryOperator::Opcode Op = B->getOpcode();
3136 if (Op == BO_Assign) {
3137 // EXPERIMENTAL: "Conjured" symbols.
3138 // FIXME: Handle structs.
3139 QualType T = RHS->getType();
3141 if (RightV.isUnknown() ||!getConstraintManager().canReasonAbout(RightV))
3143 unsigned Count = Builder->getCurrentBlockCount();
3144 RightV = svalBuilder.getConjuredSymbolVal(NULL, B->getRHS(), Count);
3147 SVal ExprVal = B->isLValue() ? LeftV : RightV;
3149 // Simulate the effects of a "store": bind the value of the RHS
3150 // to the L-Value represented by the LHS.
3151 evalStore(Tmp3, B, LHS, *I2, state->BindExpr(B, ExprVal), LeftV,RightV);
3152 continue;
3155 if (!B->isAssignmentOp()) {
3156 // Process non-assignments except commas or short-circuited
3157 // logical expressions (LAnd and LOr).
3158 SVal Result = evalBinOp(state, Op, LeftV, RightV, B->getType());
3160 if (Result.isUnknown()) {
3161 MakeNode(Tmp3, B, *I2, state);
3162 continue;
3165 state = state->BindExpr(B, Result);
3167 MakeNode(Tmp3, B, *I2, state);
3168 continue;
3171 assert (B->isCompoundAssignmentOp());
3173 switch (Op) {
3174 default:
3175 assert(0 && "Invalid opcode for compound assignment.");
3176 case BO_MulAssign: Op = BO_Mul; break;
3177 case BO_DivAssign: Op = BO_Div; break;
3178 case BO_RemAssign: Op = BO_Rem; break;
3179 case BO_AddAssign: Op = BO_Add; break;
3180 case BO_SubAssign: Op = BO_Sub; break;
3181 case BO_ShlAssign: Op = BO_Shl; break;
3182 case BO_ShrAssign: Op = BO_Shr; break;
3183 case BO_AndAssign: Op = BO_And; break;
3184 case BO_XorAssign: Op = BO_Xor; break;
3185 case BO_OrAssign: Op = BO_Or; break;
3188 // Perform a load (the LHS). This performs the checks for
3189 // null dereferences, and so on.
3190 ExplodedNodeSet Tmp4;
3191 SVal location = state->getSVal(LHS);
3192 evalLoad(Tmp4, LHS, *I2, state, location);
3194 for (ExplodedNodeSet::iterator I4=Tmp4.begin(), E4=Tmp4.end(); I4!=E4;
3195 ++I4) {
3196 state = GetState(*I4);
3197 SVal V = state->getSVal(LHS);
3199 // Get the computation type.
3200 QualType CTy =
3201 cast<CompoundAssignOperator>(B)->getComputationResultType();
3202 CTy = getContext().getCanonicalType(CTy);
3204 QualType CLHSTy =
3205 cast<CompoundAssignOperator>(B)->getComputationLHSType();
3206 CLHSTy = getContext().getCanonicalType(CLHSTy);
3208 QualType LTy = getContext().getCanonicalType(LHS->getType());
3209 QualType RTy = getContext().getCanonicalType(RHS->getType());
3211 // Promote LHS.
3212 V = svalBuilder.evalCast(V, CLHSTy, LTy);
3214 // Compute the result of the operation.
3215 SVal Result = svalBuilder.evalCast(evalBinOp(state, Op, V, RightV, CTy),
3216 B->getType(), CTy);
3218 // EXPERIMENTAL: "Conjured" symbols.
3219 // FIXME: Handle structs.
3221 SVal LHSVal;
3223 if (Result.isUnknown() ||
3224 !getConstraintManager().canReasonAbout(Result)) {
3226 unsigned Count = Builder->getCurrentBlockCount();
3228 // The symbolic value is actually for the type of the left-hand side
3229 // expression, not the computation type, as this is the value the
3230 // LValue on the LHS will bind to.
3231 LHSVal = svalBuilder.getConjuredSymbolVal(NULL, B->getRHS(), LTy, Count);
3233 // However, we need to convert the symbol to the computation type.
3234 Result = svalBuilder.evalCast(LHSVal, CTy, LTy);
3236 else {
3237 // The left-hand side may bind to a different value then the
3238 // computation type.
3239 LHSVal = svalBuilder.evalCast(Result, LTy, CTy);
3242 evalStore(Tmp3, B, LHS, *I4, state->BindExpr(B, Result),
3243 location, LHSVal);
3248 CheckerVisit(B, Dst, Tmp3, PostVisitStmtCallback);
3251 //===----------------------------------------------------------------------===//
3252 // Checker registration/lookup.
3253 //===----------------------------------------------------------------------===//
3255 Checker *ExprEngine::lookupChecker(void *tag) const {
3256 CheckerMap::const_iterator I = CheckerM.find(tag);
3257 return (I == CheckerM.end()) ? NULL : Checkers[I->second].second;
3260 //===----------------------------------------------------------------------===//
3261 // Visualization.
3262 //===----------------------------------------------------------------------===//
3264 #ifndef NDEBUG
3265 static ExprEngine* GraphPrintCheckerState;
3266 static SourceManager* GraphPrintSourceManager;
3268 namespace llvm {
3269 template<>
3270 struct DOTGraphTraits<ExplodedNode*> :
3271 public DefaultDOTGraphTraits {
3273 DOTGraphTraits (bool isSimple=false) : DefaultDOTGraphTraits(isSimple) {}
3275 // FIXME: Since we do not cache error nodes in ExprEngine now, this does not
3276 // work.
3277 static std::string getNodeAttributes(const ExplodedNode* N, void*) {
3279 #if 0
3280 // FIXME: Replace with a general scheme to tell if the node is
3281 // an error node.
3282 if (GraphPrintCheckerState->isImplicitNullDeref(N) ||
3283 GraphPrintCheckerState->isExplicitNullDeref(N) ||
3284 GraphPrintCheckerState->isUndefDeref(N) ||
3285 GraphPrintCheckerState->isUndefStore(N) ||
3286 GraphPrintCheckerState->isUndefControlFlow(N) ||
3287 GraphPrintCheckerState->isUndefResult(N) ||
3288 GraphPrintCheckerState->isBadCall(N) ||
3289 GraphPrintCheckerState->isUndefArg(N))
3290 return "color=\"red\",style=\"filled\"";
3292 if (GraphPrintCheckerState->isNoReturnCall(N))
3293 return "color=\"blue\",style=\"filled\"";
3294 #endif
3295 return "";
3298 static std::string getNodeLabel(const ExplodedNode* N, void*){
3300 std::string sbuf;
3301 llvm::raw_string_ostream Out(sbuf);
3303 // Program Location.
3304 ProgramPoint Loc = N->getLocation();
3306 switch (Loc.getKind()) {
3307 case ProgramPoint::BlockEntranceKind:
3308 Out << "Block Entrance: B"
3309 << cast<BlockEntrance>(Loc).getBlock()->getBlockID();
3310 break;
3312 case ProgramPoint::BlockExitKind:
3313 assert (false);
3314 break;
3316 case ProgramPoint::CallEnterKind:
3317 Out << "CallEnter";
3318 break;
3320 case ProgramPoint::CallExitKind:
3321 Out << "CallExit";
3322 break;
3324 default: {
3325 if (StmtPoint *L = dyn_cast<StmtPoint>(&Loc)) {
3326 const Stmt* S = L->getStmt();
3327 SourceLocation SLoc = S->getLocStart();
3329 Out << S->getStmtClassName() << ' ' << (void*) S << ' ';
3330 LangOptions LO; // FIXME.
3331 S->printPretty(Out, 0, PrintingPolicy(LO));
3333 if (SLoc.isFileID()) {
3334 Out << "\\lline="
3335 << GraphPrintSourceManager->getInstantiationLineNumber(SLoc)
3336 << " col="
3337 << GraphPrintSourceManager->getInstantiationColumnNumber(SLoc)
3338 << "\\l";
3341 if (isa<PreStmt>(Loc))
3342 Out << "\\lPreStmt\\l;";
3343 else if (isa<PostLoad>(Loc))
3344 Out << "\\lPostLoad\\l;";
3345 else if (isa<PostStore>(Loc))
3346 Out << "\\lPostStore\\l";
3347 else if (isa<PostLValue>(Loc))
3348 Out << "\\lPostLValue\\l";
3350 #if 0
3351 // FIXME: Replace with a general scheme to determine
3352 // the name of the check.
3353 if (GraphPrintCheckerState->isImplicitNullDeref(N))
3354 Out << "\\|Implicit-Null Dereference.\\l";
3355 else if (GraphPrintCheckerState->isExplicitNullDeref(N))
3356 Out << "\\|Explicit-Null Dereference.\\l";
3357 else if (GraphPrintCheckerState->isUndefDeref(N))
3358 Out << "\\|Dereference of undefialied value.\\l";
3359 else if (GraphPrintCheckerState->isUndefStore(N))
3360 Out << "\\|Store to Undefined Loc.";
3361 else if (GraphPrintCheckerState->isUndefResult(N))
3362 Out << "\\|Result of operation is undefined.";
3363 else if (GraphPrintCheckerState->isNoReturnCall(N))
3364 Out << "\\|Call to function marked \"noreturn\".";
3365 else if (GraphPrintCheckerState->isBadCall(N))
3366 Out << "\\|Call to NULL/Undefined.";
3367 else if (GraphPrintCheckerState->isUndefArg(N))
3368 Out << "\\|Argument in call is undefined";
3369 #endif
3371 break;
3374 const BlockEdge& E = cast<BlockEdge>(Loc);
3375 Out << "Edge: (B" << E.getSrc()->getBlockID() << ", B"
3376 << E.getDst()->getBlockID() << ')';
3378 if (const Stmt* T = E.getSrc()->getTerminator()) {
3380 SourceLocation SLoc = T->getLocStart();
3382 Out << "\\|Terminator: ";
3383 LangOptions LO; // FIXME.
3384 E.getSrc()->printTerminator(Out, LO);
3386 if (SLoc.isFileID()) {
3387 Out << "\\lline="
3388 << GraphPrintSourceManager->getInstantiationLineNumber(SLoc)
3389 << " col="
3390 << GraphPrintSourceManager->getInstantiationColumnNumber(SLoc);
3393 if (isa<SwitchStmt>(T)) {
3394 const Stmt* Label = E.getDst()->getLabel();
3396 if (Label) {
3397 if (const CaseStmt* C = dyn_cast<CaseStmt>(Label)) {
3398 Out << "\\lcase ";
3399 LangOptions LO; // FIXME.
3400 C->getLHS()->printPretty(Out, 0, PrintingPolicy(LO));
3402 if (const Stmt* RHS = C->getRHS()) {
3403 Out << " .. ";
3404 RHS->printPretty(Out, 0, PrintingPolicy(LO));
3407 Out << ":";
3409 else {
3410 assert (isa<DefaultStmt>(Label));
3411 Out << "\\ldefault:";
3414 else
3415 Out << "\\l(implicit) default:";
3417 else if (isa<IndirectGotoStmt>(T)) {
3418 // FIXME
3420 else {
3421 Out << "\\lCondition: ";
3422 if (*E.getSrc()->succ_begin() == E.getDst())
3423 Out << "true";
3424 else
3425 Out << "false";
3428 Out << "\\l";
3431 #if 0
3432 // FIXME: Replace with a general scheme to determine
3433 // the name of the check.
3434 if (GraphPrintCheckerState->isUndefControlFlow(N)) {
3435 Out << "\\|Control-flow based on\\lUndefined value.\\l";
3437 #endif
3441 const GRState *state = N->getState();
3442 Out << "\\|StateID: " << (void*) state
3443 << " NodeID: " << (void*) N << "\\|";
3444 state->printDOT(Out, *N->getLocationContext()->getCFG());
3445 Out << "\\l";
3446 return Out.str();
3449 } // end llvm namespace
3450 #endif
3452 #ifndef NDEBUG
3453 template <typename ITERATOR>
3454 ExplodedNode* GetGraphNode(ITERATOR I) { return *I; }
3456 template <> ExplodedNode*
3457 GetGraphNode<llvm::DenseMap<ExplodedNode*, Expr*>::iterator>
3458 (llvm::DenseMap<ExplodedNode*, Expr*>::iterator I) {
3459 return I->first;
3461 #endif
3463 void ExprEngine::ViewGraph(bool trim) {
3464 #ifndef NDEBUG
3465 if (trim) {
3466 std::vector<ExplodedNode*> Src;
3468 // Flush any outstanding reports to make sure we cover all the nodes.
3469 // This does not cause them to get displayed.
3470 for (BugReporter::iterator I=BR.begin(), E=BR.end(); I!=E; ++I)
3471 const_cast<BugType*>(*I)->FlushReports(BR);
3473 // Iterate through the reports and get their nodes.
3474 for (BugReporter::iterator I=BR.begin(), E=BR.end(); I!=E; ++I) {
3475 for (BugType::const_iterator I2=(*I)->begin(), E2=(*I)->end();
3476 I2!=E2; ++I2) {
3477 const BugReportEquivClass& EQ = *I2;
3478 const BugReport &R = **EQ.begin();
3479 ExplodedNode *N = const_cast<ExplodedNode*>(R.getErrorNode());
3480 if (N) Src.push_back(N);
3484 ViewGraph(&Src[0], &Src[0]+Src.size());
3486 else {
3487 GraphPrintCheckerState = this;
3488 GraphPrintSourceManager = &getContext().getSourceManager();
3490 llvm::ViewGraph(*G.roots_begin(), "ExprEngine");
3492 GraphPrintCheckerState = NULL;
3493 GraphPrintSourceManager = NULL;
3495 #endif
3498 void ExprEngine::ViewGraph(ExplodedNode** Beg, ExplodedNode** End) {
3499 #ifndef NDEBUG
3500 GraphPrintCheckerState = this;
3501 GraphPrintSourceManager = &getContext().getSourceManager();
3503 std::auto_ptr<ExplodedGraph> TrimmedG(G.Trim(Beg, End).first);
3505 if (!TrimmedG.get())
3506 llvm::errs() << "warning: Trimmed ExplodedGraph is empty.\n";
3507 else
3508 llvm::ViewGraph(*TrimmedG->roots_begin(), "TrimmedExprEngine");
3510 GraphPrintCheckerState = NULL;
3511 GraphPrintSourceManager = NULL;
3512 #endif