[analyzer] Refactoring: Move stuff into namespace 'GR'.
[clang.git] / lib / GR / GRCoreEngine.cpp
blob092cb460aef9d5fd12a7f3651177283103ee05cc
1 //==- GRCoreEngine.cpp - Path-Sensitive Dataflow Engine ------------*- C++ -*-//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file defines a generic engine for intraprocedural, path-sensitive,
11 // dataflow analysis via graph reachability engine.
13 //===----------------------------------------------------------------------===//
15 #include "clang/GR/PathSensitive/AnalysisManager.h"
16 #include "clang/GR/PathSensitive/GRCoreEngine.h"
17 #include "clang/GR/PathSensitive/GRExprEngine.h"
18 #include "clang/Index/TranslationUnit.h"
19 #include "clang/AST/Expr.h"
20 #include "llvm/Support/Casting.h"
21 #include "llvm/ADT/DenseMap.h"
22 #include <vector>
23 #include <queue>
25 using llvm::cast;
26 using llvm::isa;
27 using namespace clang;
28 using namespace GR;
30 // This should be removed in the future.
31 namespace clang {
32 namespace GR {
33 GRTransferFuncs* MakeCFRefCountTF(ASTContext& Ctx, bool GCEnabled,
34 const LangOptions& lopts);
38 //===----------------------------------------------------------------------===//
39 // Worklist classes for exploration of reachable states.
40 //===----------------------------------------------------------------------===//
42 GRWorkList::Visitor::~Visitor() {}
44 namespace {
45 class DFS : public GRWorkList {
46 llvm::SmallVector<GRWorkListUnit,20> Stack;
47 public:
48 virtual bool hasWork() const {
49 return !Stack.empty();
52 virtual void Enqueue(const GRWorkListUnit& U) {
53 Stack.push_back(U);
56 virtual GRWorkListUnit Dequeue() {
57 assert (!Stack.empty());
58 const GRWorkListUnit& U = Stack.back();
59 Stack.pop_back(); // This technically "invalidates" U, but we are fine.
60 return U;
63 virtual bool VisitItemsInWorkList(Visitor &V) {
64 for (llvm::SmallVectorImpl<GRWorkListUnit>::iterator
65 I = Stack.begin(), E = Stack.end(); I != E; ++I) {
66 if (V.Visit(*I))
67 return true;
69 return false;
73 class BFS : public GRWorkList {
74 std::deque<GRWorkListUnit> Queue;
75 public:
76 virtual bool hasWork() const {
77 return !Queue.empty();
80 virtual void Enqueue(const GRWorkListUnit& U) {
81 Queue.push_front(U);
84 virtual GRWorkListUnit Dequeue() {
85 GRWorkListUnit U = Queue.front();
86 Queue.pop_front();
87 return U;
90 virtual bool VisitItemsInWorkList(Visitor &V) {
91 for (std::deque<GRWorkListUnit>::iterator
92 I = Queue.begin(), E = Queue.end(); I != E; ++I) {
93 if (V.Visit(*I))
94 return true;
96 return false;
100 } // end anonymous namespace
102 // Place the dstor for GRWorkList here because it contains virtual member
103 // functions, and we the code for the dstor generated in one compilation unit.
104 GRWorkList::~GRWorkList() {}
106 GRWorkList *GRWorkList::MakeDFS() { return new DFS(); }
107 GRWorkList *GRWorkList::MakeBFS() { return new BFS(); }
109 namespace {
110 class BFSBlockDFSContents : public GRWorkList {
111 std::deque<GRWorkListUnit> Queue;
112 llvm::SmallVector<GRWorkListUnit,20> Stack;
113 public:
114 virtual bool hasWork() const {
115 return !Queue.empty() || !Stack.empty();
118 virtual void Enqueue(const GRWorkListUnit& U) {
119 if (isa<BlockEntrance>(U.getNode()->getLocation()))
120 Queue.push_front(U);
121 else
122 Stack.push_back(U);
125 virtual GRWorkListUnit Dequeue() {
126 // Process all basic blocks to completion.
127 if (!Stack.empty()) {
128 const GRWorkListUnit& U = Stack.back();
129 Stack.pop_back(); // This technically "invalidates" U, but we are fine.
130 return U;
133 assert(!Queue.empty());
134 // Don't use const reference. The subsequent pop_back() might make it
135 // unsafe.
136 GRWorkListUnit U = Queue.front();
137 Queue.pop_front();
138 return U;
140 virtual bool VisitItemsInWorkList(Visitor &V) {
141 for (llvm::SmallVectorImpl<GRWorkListUnit>::iterator
142 I = Stack.begin(), E = Stack.end(); I != E; ++I) {
143 if (V.Visit(*I))
144 return true;
146 for (std::deque<GRWorkListUnit>::iterator
147 I = Queue.begin(), E = Queue.end(); I != E; ++I) {
148 if (V.Visit(*I))
149 return true;
151 return false;
155 } // end anonymous namespace
157 GRWorkList* GRWorkList::MakeBFSBlockDFSContents() {
158 return new BFSBlockDFSContents();
161 //===----------------------------------------------------------------------===//
162 // Core analysis engine.
163 //===----------------------------------------------------------------------===//
165 /// ExecuteWorkList - Run the worklist algorithm for a maximum number of steps.
166 bool GRCoreEngine::ExecuteWorkList(const LocationContext *L, unsigned Steps,
167 const GRState *InitState) {
169 if (G->num_roots() == 0) { // Initialize the analysis by constructing
170 // the root if none exists.
172 const CFGBlock* Entry = &(L->getCFG()->getEntry());
174 assert (Entry->empty() &&
175 "Entry block must be empty.");
177 assert (Entry->succ_size() == 1 &&
178 "Entry block must have 1 successor.");
180 // Get the solitary successor.
181 const CFGBlock* Succ = *(Entry->succ_begin());
183 // Construct an edge representing the
184 // starting location in the function.
185 BlockEdge StartLoc(Entry, Succ, L);
187 // Set the current block counter to being empty.
188 WList->setBlockCounter(BCounterFactory.GetEmptyCounter());
190 if (!InitState)
191 // Generate the root.
192 generateNode(StartLoc, getInitialState(L), 0);
193 else
194 generateNode(StartLoc, InitState, 0);
197 // Check if we have a steps limit
198 bool UnlimitedSteps = Steps == 0;
200 while (WList->hasWork()) {
201 if (!UnlimitedSteps) {
202 if (Steps == 0)
203 break;
204 --Steps;
207 const GRWorkListUnit& WU = WList->Dequeue();
209 // Set the current block counter.
210 WList->setBlockCounter(WU.getBlockCounter());
212 // Retrieve the node.
213 ExplodedNode* Node = WU.getNode();
215 // Dispatch on the location type.
216 switch (Node->getLocation().getKind()) {
217 case ProgramPoint::BlockEdgeKind:
218 HandleBlockEdge(cast<BlockEdge>(Node->getLocation()), Node);
219 break;
221 case ProgramPoint::BlockEntranceKind:
222 HandleBlockEntrance(cast<BlockEntrance>(Node->getLocation()), Node);
223 break;
225 case ProgramPoint::BlockExitKind:
226 assert (false && "BlockExit location never occur in forward analysis.");
227 break;
229 case ProgramPoint::CallEnterKind:
230 HandleCallEnter(cast<CallEnter>(Node->getLocation()), WU.getBlock(),
231 WU.getIndex(), Node);
232 break;
234 case ProgramPoint::CallExitKind:
235 HandleCallExit(cast<CallExit>(Node->getLocation()), Node);
236 break;
238 default:
239 assert(isa<PostStmt>(Node->getLocation()) ||
240 isa<PostInitializer>(Node->getLocation()));
241 HandlePostStmt(WU.getBlock(), WU.getIndex(), Node);
242 break;
246 SubEngine.ProcessEndWorklist(hasWorkRemaining());
247 return WList->hasWork();
250 void GRCoreEngine::ExecuteWorkListWithInitialState(const LocationContext *L,
251 unsigned Steps,
252 const GRState *InitState,
253 ExplodedNodeSet &Dst) {
254 ExecuteWorkList(L, Steps, InitState);
255 for (llvm::SmallVectorImpl<ExplodedNode*>::iterator I = G->EndNodes.begin(),
256 E = G->EndNodes.end(); I != E; ++I) {
257 Dst.Add(*I);
261 void GRCoreEngine::HandleCallEnter(const CallEnter &L, const CFGBlock *Block,
262 unsigned Index, ExplodedNode *Pred) {
263 GRCallEnterNodeBuilder Builder(*this, Pred, L.getCallExpr(),
264 L.getCalleeContext(), Block, Index);
265 ProcessCallEnter(Builder);
268 void GRCoreEngine::HandleCallExit(const CallExit &L, ExplodedNode *Pred) {
269 GRCallExitNodeBuilder Builder(*this, Pred);
270 ProcessCallExit(Builder);
273 void GRCoreEngine::HandleBlockEdge(const BlockEdge& L, ExplodedNode* Pred) {
275 const CFGBlock* Blk = L.getDst();
277 // Check if we are entering the EXIT block.
278 if (Blk == &(L.getLocationContext()->getCFG()->getExit())) {
280 assert (L.getLocationContext()->getCFG()->getExit().size() == 0
281 && "EXIT block cannot contain Stmts.");
283 // Process the final state transition.
284 GREndPathNodeBuilder Builder(Blk, Pred, this);
285 ProcessEndPath(Builder);
287 // This path is done. Don't enqueue any more nodes.
288 return;
291 // FIXME: Should we allow ProcessBlockEntrance to also manipulate state?
293 if (ProcessBlockEntrance(Blk, Pred, WList->getBlockCounter()))
294 generateNode(BlockEntrance(Blk, Pred->getLocationContext()),
295 Pred->State, Pred);
296 else {
297 blocksAborted.push_back(std::make_pair(L, Pred));
301 void GRCoreEngine::HandleBlockEntrance(const BlockEntrance& L,
302 ExplodedNode* Pred) {
304 // Increment the block counter.
305 GRBlockCounter Counter = WList->getBlockCounter();
306 Counter = BCounterFactory.IncrementCount(Counter,
307 Pred->getLocationContext()->getCurrentStackFrame(),
308 L.getBlock()->getBlockID());
309 WList->setBlockCounter(Counter);
311 // Process the entrance of the block.
312 if (CFGElement E = L.getFirstElement()) {
313 GRStmtNodeBuilder Builder(L.getBlock(), 0, Pred, this,
314 SubEngine.getStateManager());
315 ProcessElement(E, Builder);
317 else
318 HandleBlockExit(L.getBlock(), Pred);
321 void GRCoreEngine::HandleBlockExit(const CFGBlock * B, ExplodedNode* Pred) {
323 if (const Stmt* Term = B->getTerminator()) {
324 switch (Term->getStmtClass()) {
325 default:
326 assert(false && "Analysis for this terminator not implemented.");
327 break;
329 case Stmt::BinaryOperatorClass: // '&&' and '||'
330 HandleBranch(cast<BinaryOperator>(Term)->getLHS(), Term, B, Pred);
331 return;
333 case Stmt::ConditionalOperatorClass:
334 HandleBranch(cast<ConditionalOperator>(Term)->getCond(), Term, B, Pred);
335 return;
337 // FIXME: Use constant-folding in CFG construction to simplify this
338 // case.
340 case Stmt::ChooseExprClass:
341 HandleBranch(cast<ChooseExpr>(Term)->getCond(), Term, B, Pred);
342 return;
344 case Stmt::DoStmtClass:
345 HandleBranch(cast<DoStmt>(Term)->getCond(), Term, B, Pred);
346 return;
348 case Stmt::ForStmtClass:
349 HandleBranch(cast<ForStmt>(Term)->getCond(), Term, B, Pred);
350 return;
352 case Stmt::ContinueStmtClass:
353 case Stmt::BreakStmtClass:
354 case Stmt::GotoStmtClass:
355 break;
357 case Stmt::IfStmtClass:
358 HandleBranch(cast<IfStmt>(Term)->getCond(), Term, B, Pred);
359 return;
361 case Stmt::IndirectGotoStmtClass: {
362 // Only 1 successor: the indirect goto dispatch block.
363 assert (B->succ_size() == 1);
365 GRIndirectGotoNodeBuilder
366 builder(Pred, B, cast<IndirectGotoStmt>(Term)->getTarget(),
367 *(B->succ_begin()), this);
369 ProcessIndirectGoto(builder);
370 return;
373 case Stmt::ObjCForCollectionStmtClass: {
374 // In the case of ObjCForCollectionStmt, it appears twice in a CFG:
376 // (1) inside a basic block, which represents the binding of the
377 // 'element' variable to a value.
378 // (2) in a terminator, which represents the branch.
380 // For (1), subengines will bind a value (i.e., 0 or 1) indicating
381 // whether or not collection contains any more elements. We cannot
382 // just test to see if the element is nil because a container can
383 // contain nil elements.
384 HandleBranch(Term, Term, B, Pred);
385 return;
388 case Stmt::SwitchStmtClass: {
389 GRSwitchNodeBuilder builder(Pred, B, cast<SwitchStmt>(Term)->getCond(),
390 this);
392 ProcessSwitch(builder);
393 return;
396 case Stmt::WhileStmtClass:
397 HandleBranch(cast<WhileStmt>(Term)->getCond(), Term, B, Pred);
398 return;
402 assert (B->succ_size() == 1 &&
403 "Blocks with no terminator should have at most 1 successor.");
405 generateNode(BlockEdge(B, *(B->succ_begin()), Pred->getLocationContext()),
406 Pred->State, Pred);
409 void GRCoreEngine::HandleBranch(const Stmt* Cond, const Stmt* Term,
410 const CFGBlock * B, ExplodedNode* Pred) {
411 assert (B->succ_size() == 2);
413 GRBranchNodeBuilder Builder(B, *(B->succ_begin()), *(B->succ_begin()+1),
414 Pred, this);
416 ProcessBranch(Cond, Term, Builder);
419 void GRCoreEngine::HandlePostStmt(const CFGBlock* B, unsigned StmtIdx,
420 ExplodedNode* Pred) {
421 assert (!B->empty());
423 if (StmtIdx == B->size())
424 HandleBlockExit(B, Pred);
425 else {
426 GRStmtNodeBuilder Builder(B, StmtIdx, Pred, this,
427 SubEngine.getStateManager());
428 ProcessElement((*B)[StmtIdx], Builder);
432 /// generateNode - Utility method to generate nodes, hook up successors,
433 /// and add nodes to the worklist.
434 void GRCoreEngine::generateNode(const ProgramPoint& Loc,
435 const GRState* State, ExplodedNode* Pred) {
437 bool IsNew;
438 ExplodedNode* Node = G->getNode(Loc, State, &IsNew);
440 if (Pred)
441 Node->addPredecessor(Pred, *G); // Link 'Node' with its predecessor.
442 else {
443 assert (IsNew);
444 G->addRoot(Node); // 'Node' has no predecessor. Make it a root.
447 // Only add 'Node' to the worklist if it was freshly generated.
448 if (IsNew) WList->Enqueue(Node);
451 GRStmtNodeBuilder::GRStmtNodeBuilder(const CFGBlock* b, unsigned idx,
452 ExplodedNode* N, GRCoreEngine* e,
453 GRStateManager &mgr)
454 : Eng(*e), B(*b), Idx(idx), Pred(N), Mgr(mgr),
455 PurgingDeadSymbols(false), BuildSinks(false), HasGeneratedNode(false),
456 PointKind(ProgramPoint::PostStmtKind), Tag(0) {
457 Deferred.insert(N);
458 CleanedState = Pred->getState();
461 GRStmtNodeBuilder::~GRStmtNodeBuilder() {
462 for (DeferredTy::iterator I=Deferred.begin(), E=Deferred.end(); I!=E; ++I)
463 if (!(*I)->isSink())
464 GenerateAutoTransition(*I);
467 void GRStmtNodeBuilder::GenerateAutoTransition(ExplodedNode* N) {
468 assert (!N->isSink());
470 // Check if this node entered a callee.
471 if (isa<CallEnter>(N->getLocation())) {
472 // Still use the index of the CallExpr. It's needed to create the callee
473 // StackFrameContext.
474 Eng.WList->Enqueue(N, &B, Idx);
475 return;
478 // Do not create extra nodes. Move to the next CFG element.
479 if (isa<PostInitializer>(N->getLocation())) {
480 Eng.WList->Enqueue(N, &B, Idx+1);
481 return;
484 PostStmt Loc(getStmt(), N->getLocationContext());
486 if (Loc == N->getLocation()) {
487 // Note: 'N' should be a fresh node because otherwise it shouldn't be
488 // a member of Deferred.
489 Eng.WList->Enqueue(N, &B, Idx+1);
490 return;
493 bool IsNew;
494 ExplodedNode* Succ = Eng.G->getNode(Loc, N->State, &IsNew);
495 Succ->addPredecessor(N, *Eng.G);
497 if (IsNew)
498 Eng.WList->Enqueue(Succ, &B, Idx+1);
501 ExplodedNode* GRStmtNodeBuilder::MakeNode(ExplodedNodeSet& Dst, const Stmt* S,
502 ExplodedNode* Pred, const GRState* St,
503 ProgramPoint::Kind K) {
505 ExplodedNode* N = generateNode(S, St, Pred, K);
507 if (N) {
508 if (BuildSinks)
509 N->markAsSink();
510 else
511 Dst.Add(N);
514 return N;
517 static ProgramPoint GetProgramPoint(const Stmt *S, ProgramPoint::Kind K,
518 const LocationContext *LC, const void *tag){
519 switch (K) {
520 default:
521 assert(false && "Unhandled ProgramPoint kind");
522 case ProgramPoint::PreStmtKind:
523 return PreStmt(S, LC, tag);
524 case ProgramPoint::PostStmtKind:
525 return PostStmt(S, LC, tag);
526 case ProgramPoint::PreLoadKind:
527 return PreLoad(S, LC, tag);
528 case ProgramPoint::PostLoadKind:
529 return PostLoad(S, LC, tag);
530 case ProgramPoint::PreStoreKind:
531 return PreStore(S, LC, tag);
532 case ProgramPoint::PostStoreKind:
533 return PostStore(S, LC, tag);
534 case ProgramPoint::PostLValueKind:
535 return PostLValue(S, LC, tag);
536 case ProgramPoint::PostPurgeDeadSymbolsKind:
537 return PostPurgeDeadSymbols(S, LC, tag);
541 ExplodedNode*
542 GRStmtNodeBuilder::generateNodeInternal(const Stmt* S, const GRState* state,
543 ExplodedNode* Pred,
544 ProgramPoint::Kind K,
545 const void *tag) {
547 const ProgramPoint &L = GetProgramPoint(S, K, Pred->getLocationContext(),tag);
548 return generateNodeInternal(L, state, Pred);
551 ExplodedNode*
552 GRStmtNodeBuilder::generateNodeInternal(const ProgramPoint &Loc,
553 const GRState* State,
554 ExplodedNode* Pred) {
555 bool IsNew;
556 ExplodedNode* N = Eng.G->getNode(Loc, State, &IsNew);
557 N->addPredecessor(Pred, *Eng.G);
558 Deferred.erase(Pred);
560 if (IsNew) {
561 Deferred.insert(N);
562 return N;
565 return NULL;
568 ExplodedNode* GRBranchNodeBuilder::generateNode(const GRState* State,
569 bool branch) {
571 // If the branch has been marked infeasible we should not generate a node.
572 if (!isFeasible(branch))
573 return NULL;
575 bool IsNew;
577 ExplodedNode* Succ =
578 Eng.G->getNode(BlockEdge(Src,branch ? DstT:DstF,Pred->getLocationContext()),
579 State, &IsNew);
581 Succ->addPredecessor(Pred, *Eng.G);
583 if (branch)
584 GeneratedTrue = true;
585 else
586 GeneratedFalse = true;
588 if (IsNew) {
589 Deferred.push_back(Succ);
590 return Succ;
593 return NULL;
596 GRBranchNodeBuilder::~GRBranchNodeBuilder() {
597 if (!GeneratedTrue) generateNode(Pred->State, true);
598 if (!GeneratedFalse) generateNode(Pred->State, false);
600 for (DeferredTy::iterator I=Deferred.begin(), E=Deferred.end(); I!=E; ++I)
601 if (!(*I)->isSink()) Eng.WList->Enqueue(*I);
605 ExplodedNode*
606 GRIndirectGotoNodeBuilder::generateNode(const iterator& I, const GRState* St,
607 bool isSink) {
608 bool IsNew;
610 ExplodedNode* Succ = Eng.G->getNode(BlockEdge(Src, I.getBlock(),
611 Pred->getLocationContext()), St, &IsNew);
613 Succ->addPredecessor(Pred, *Eng.G);
615 if (IsNew) {
617 if (isSink)
618 Succ->markAsSink();
619 else
620 Eng.WList->Enqueue(Succ);
622 return Succ;
625 return NULL;
629 ExplodedNode*
630 GRSwitchNodeBuilder::generateCaseStmtNode(const iterator& I, const GRState* St){
632 bool IsNew;
634 ExplodedNode* Succ = Eng.G->getNode(BlockEdge(Src, I.getBlock(),
635 Pred->getLocationContext()), St, &IsNew);
636 Succ->addPredecessor(Pred, *Eng.G);
638 if (IsNew) {
639 Eng.WList->Enqueue(Succ);
640 return Succ;
643 return NULL;
647 ExplodedNode*
648 GRSwitchNodeBuilder::generateDefaultCaseNode(const GRState* St, bool isSink) {
650 // Get the block for the default case.
651 assert (Src->succ_rbegin() != Src->succ_rend());
652 CFGBlock* DefaultBlock = *Src->succ_rbegin();
654 bool IsNew;
656 ExplodedNode* Succ = Eng.G->getNode(BlockEdge(Src, DefaultBlock,
657 Pred->getLocationContext()), St, &IsNew);
658 Succ->addPredecessor(Pred, *Eng.G);
660 if (IsNew) {
661 if (isSink)
662 Succ->markAsSink();
663 else
664 Eng.WList->Enqueue(Succ);
666 return Succ;
669 return NULL;
672 GREndPathNodeBuilder::~GREndPathNodeBuilder() {
673 // Auto-generate an EOP node if one has not been generated.
674 if (!HasGeneratedNode) {
675 // If we are in an inlined call, generate CallExit node.
676 if (Pred->getLocationContext()->getParent())
677 GenerateCallExitNode(Pred->State);
678 else
679 generateNode(Pred->State);
683 ExplodedNode*
684 GREndPathNodeBuilder::generateNode(const GRState* State, const void *tag,
685 ExplodedNode* P) {
686 HasGeneratedNode = true;
687 bool IsNew;
689 ExplodedNode* Node = Eng.G->getNode(BlockEntrance(&B,
690 Pred->getLocationContext(), tag), State, &IsNew);
692 Node->addPredecessor(P ? P : Pred, *Eng.G);
694 if (IsNew) {
695 Eng.G->addEndOfPath(Node);
696 return Node;
699 return NULL;
702 void GREndPathNodeBuilder::GenerateCallExitNode(const GRState *state) {
703 HasGeneratedNode = true;
704 // Create a CallExit node and enqueue it.
705 const StackFrameContext *LocCtx
706 = cast<StackFrameContext>(Pred->getLocationContext());
707 const Stmt *CE = LocCtx->getCallSite();
709 // Use the the callee location context.
710 CallExit Loc(CE, LocCtx);
712 bool isNew;
713 ExplodedNode *Node = Eng.G->getNode(Loc, state, &isNew);
714 Node->addPredecessor(Pred, *Eng.G);
716 if (isNew)
717 Eng.WList->Enqueue(Node);
721 void GRCallEnterNodeBuilder::generateNode(const GRState *state) {
722 // Check if the callee is in the same translation unit.
723 if (CalleeCtx->getTranslationUnit() !=
724 Pred->getLocationContext()->getTranslationUnit()) {
725 // Create a new engine. We must be careful that the new engine should not
726 // reference data structures owned by the old engine.
728 AnalysisManager &OldMgr = Eng.SubEngine.getAnalysisManager();
730 // Get the callee's translation unit.
731 idx::TranslationUnit *TU = CalleeCtx->getTranslationUnit();
733 // Create a new AnalysisManager with components of the callee's
734 // TranslationUnit.
735 // The Diagnostic is actually shared when we create ASTUnits from AST files.
736 AnalysisManager AMgr(TU->getASTContext(), TU->getDiagnostic(),
737 OldMgr.getLangOptions(),
738 OldMgr.getPathDiagnosticClient(),
739 OldMgr.getStoreManagerCreator(),
740 OldMgr.getConstraintManagerCreator(),
741 OldMgr.getIndexer(),
742 OldMgr.getMaxNodes(), OldMgr.getMaxVisit(),
743 OldMgr.shouldVisualizeGraphviz(),
744 OldMgr.shouldVisualizeUbigraph(),
745 OldMgr.shouldPurgeDead(),
746 OldMgr.shouldEagerlyAssume(),
747 OldMgr.shouldTrimGraph(),
748 OldMgr.shouldInlineCall(),
749 OldMgr.getAnalysisContextManager().getUseUnoptimizedCFG(),
750 OldMgr.getAnalysisContextManager().getAddImplicitDtors(),
751 OldMgr.getAnalysisContextManager().getAddInitializers());
752 llvm::OwningPtr<GRTransferFuncs> TF(MakeCFRefCountTF(AMgr.getASTContext(),
753 /* GCEnabled */ false,
754 AMgr.getLangOptions()));
755 // Create the new engine.
756 GRExprEngine NewEng(AMgr, TF.take());
758 // Create the new LocationContext.
759 AnalysisContext *NewAnaCtx = AMgr.getAnalysisContext(CalleeCtx->getDecl(),
760 CalleeCtx->getTranslationUnit());
761 const StackFrameContext *OldLocCtx = CalleeCtx;
762 const StackFrameContext *NewLocCtx = AMgr.getStackFrame(NewAnaCtx,
763 OldLocCtx->getParent(),
764 OldLocCtx->getCallSite(),
765 OldLocCtx->getCallSiteBlock(),
766 OldLocCtx->getIndex());
768 // Now create an initial state for the new engine.
769 const GRState *NewState = NewEng.getStateManager().MarshalState(state,
770 NewLocCtx);
771 ExplodedNodeSet ReturnNodes;
772 NewEng.ExecuteWorkListWithInitialState(NewLocCtx, AMgr.getMaxNodes(),
773 NewState, ReturnNodes);
774 return;
777 // Get the callee entry block.
778 const CFGBlock *Entry = &(CalleeCtx->getCFG()->getEntry());
779 assert(Entry->empty());
780 assert(Entry->succ_size() == 1);
782 // Get the solitary successor.
783 const CFGBlock *SuccB = *(Entry->succ_begin());
785 // Construct an edge representing the starting location in the callee.
786 BlockEdge Loc(Entry, SuccB, CalleeCtx);
788 bool isNew;
789 ExplodedNode *Node = Eng.G->getNode(Loc, state, &isNew);
790 Node->addPredecessor(const_cast<ExplodedNode*>(Pred), *Eng.G);
792 if (isNew)
793 Eng.WList->Enqueue(Node);
796 void GRCallExitNodeBuilder::generateNode(const GRState *state) {
797 // Get the callee's location context.
798 const StackFrameContext *LocCtx
799 = cast<StackFrameContext>(Pred->getLocationContext());
800 // When exiting an implicit automatic obj dtor call, the callsite is the Stmt
801 // that triggers the dtor.
802 PostStmt Loc(LocCtx->getCallSite(), LocCtx->getParent());
803 bool isNew;
804 ExplodedNode *Node = Eng.G->getNode(Loc, state, &isNew);
805 Node->addPredecessor(const_cast<ExplodedNode*>(Pred), *Eng.G);
806 if (isNew)
807 Eng.WList->Enqueue(Node, LocCtx->getCallSiteBlock(),
808 LocCtx->getIndex() + 1);