1 //==- CoreEngine.cpp - Path-Sensitive Dataflow Engine ------------*- C++ -*-//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file defines a generic engine for intraprocedural, path-sensitive,
11 // dataflow analysis via graph reachability engine.
12 //
13 //===----------------------------------------------------------------------===//
14
15 #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h"
16 #include "clang/StaticAnalyzer/Core/PathSensitive/CoreEngine.h"
17 #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
18 #include "clang/Index/TranslationUnit.h"
19 #include "clang/AST/Expr.h"
20 #include "llvm/Support/Casting.h"
21 #include "llvm/ADT/DenseMap.h"
22
23 using llvm::cast;
24 using llvm::isa;
25 using namespace clang;
26 using namespace ento;
27
28 // This should be removed in the future.
29 namespace clang {
30 namespace ento {
31 TransferFuncs* MakeCFRefCountTF(ASTContext& Ctx, bool GCEnabled,
32 const LangOptions& lopts);
33 }
34 }
35
36 //===----------------------------------------------------------------------===//
37 // Worklist classes for exploration of reachable states.
38 //===----------------------------------------------------------------------===//
39
~Visitor()40 WorkList::Visitor::~Visitor() {}
41
42 namespace {
43 class DFS : public WorkList {
44 llvm::SmallVector<WorkListUnit,20> Stack;
45 public:
hasWork() const46 virtual bool hasWork() const {
47 return !Stack.empty();
48 }
49
enqueue(const WorkListUnit & U)50 virtual void enqueue(const WorkListUnit& U) {
51 Stack.push_back(U);
52 }
53
dequeue()54 virtual WorkListUnit dequeue() {
55 assert (!Stack.empty());
56 const WorkListUnit& U = Stack.back();
57 Stack.pop_back(); // This technically "invalidates" U, but we are fine.
58 return U;
59 }
60
visitItemsInWorkList(Visitor & V)61 virtual bool visitItemsInWorkList(Visitor &V) {
62 for (llvm::SmallVectorImpl<WorkListUnit>::iterator
63 I = Stack.begin(), E = Stack.end(); I != E; ++I) {
64 if (V.visit(*I))
65 return true;
66 }
67 return false;
68 }
69 };
70
71 class BFS : public WorkList {
72 std::deque<WorkListUnit> Queue;
73 public:
hasWork() const74 virtual bool hasWork() const {
75 return !Queue.empty();
76 }
77
enqueue(const WorkListUnit & U)78 virtual void enqueue(const WorkListUnit& U) {
79 Queue.push_front(U);
80 }
81
dequeue()82 virtual WorkListUnit dequeue() {
83 WorkListUnit U = Queue.front();
84 Queue.pop_front();
85 return U;
86 }
87
visitItemsInWorkList(Visitor & V)88 virtual bool visitItemsInWorkList(Visitor &V) {
89 for (std::deque<WorkListUnit>::iterator
90 I = Queue.begin(), E = Queue.end(); I != E; ++I) {
91 if (V.visit(*I))
92 return true;
93 }
94 return false;
95 }
96 };
97
98 } // end anonymous namespace
99
100 // Place the dstor for WorkList here because it contains virtual member
101 // functions, and we the code for the dstor generated in one compilation unit.
~WorkList()102 WorkList::~WorkList() {}
103
makeDFS()104 WorkList *WorkList::makeDFS() { return new DFS(); }
makeBFS()105 WorkList *WorkList::makeBFS() { return new BFS(); }
106
107 namespace {
108 class BFSBlockDFSContents : public WorkList {
109 std::deque<WorkListUnit> Queue;
110 llvm::SmallVector<WorkListUnit,20> Stack;
111 public:
hasWork() const112 virtual bool hasWork() const {
113 return !Queue.empty() || !Stack.empty();
114 }
115
enqueue(const WorkListUnit & U)116 virtual void enqueue(const WorkListUnit& U) {
117 if (isa<BlockEntrance>(U.getNode()->getLocation()))
118 Queue.push_front(U);
119 else
120 Stack.push_back(U);
121 }
122
dequeue()123 virtual WorkListUnit dequeue() {
124 // Process all basic blocks to completion.
125 if (!Stack.empty()) {
126 const WorkListUnit& U = Stack.back();
127 Stack.pop_back(); // This technically "invalidates" U, but we are fine.
128 return U;
129 }
130
131 assert(!Queue.empty());
132 // Don't use const reference. The subsequent pop_back() might make it
133 // unsafe.
134 WorkListUnit U = Queue.front();
135 Queue.pop_front();
136 return U;
137 }
visitItemsInWorkList(Visitor & V)138 virtual bool visitItemsInWorkList(Visitor &V) {
139 for (llvm::SmallVectorImpl<WorkListUnit>::iterator
140 I = Stack.begin(), E = Stack.end(); I != E; ++I) {
141 if (V.visit(*I))
142 return true;
143 }
144 for (std::deque<WorkListUnit>::iterator
145 I = Queue.begin(), E = Queue.end(); I != E; ++I) {
146 if (V.visit(*I))
147 return true;
148 }
149 return false;
150 }
151
152 };
153 } // end anonymous namespace
154
makeBFSBlockDFSContents()155 WorkList* WorkList::makeBFSBlockDFSContents() {
156 return new BFSBlockDFSContents();
157 }
158
159 //===----------------------------------------------------------------------===//
160 // Core analysis engine.
161 //===----------------------------------------------------------------------===//
162
163 /// ExecuteWorkList - Run the worklist algorithm for a maximum number of steps.
ExecuteWorkList(const LocationContext * L,unsigned Steps,const GRState * InitState)164 bool CoreEngine::ExecuteWorkList(const LocationContext *L, unsigned Steps,
165 const GRState *InitState) {
166
167 if (G->num_roots() == 0) { // Initialize the analysis by constructing
168 // the root if none exists.
169
170 const CFGBlock* Entry = &(L->getCFG()->getEntry());
171
172 assert (Entry->empty() &&
173 "Entry block must be empty.");
174
175 assert (Entry->succ_size() == 1 &&
176 "Entry block must have 1 successor.");
177
178 // Get the solitary successor.
179 const CFGBlock* Succ = *(Entry->succ_begin());
180
181 // Construct an edge representing the
182 // starting location in the function.
183 BlockEdge StartLoc(Entry, Succ, L);
184
185 // Set the current block counter to being empty.
186 WList->setBlockCounter(BCounterFactory.GetEmptyCounter());
187
188 if (!InitState)
189 // Generate the root.
190 generateNode(StartLoc, SubEng.getInitialState(L), 0);
191 else
192 generateNode(StartLoc, InitState, 0);
193 }
194
195 // Check if we have a steps limit
196 bool UnlimitedSteps = Steps == 0;
197
198 while (WList->hasWork()) {
199 if (!UnlimitedSteps) {
200 if (Steps == 0)
201 break;
202 --Steps;
203 }
204
205 const WorkListUnit& WU = WList->dequeue();
206
207 // Set the current block counter.
208 WList->setBlockCounter(WU.getBlockCounter());
209
210 // Retrieve the node.
211 ExplodedNode* Node = WU.getNode();
212
213 // Dispatch on the location type.
214 switch (Node->getLocation().getKind()) {
215 case ProgramPoint::BlockEdgeKind:
216 HandleBlockEdge(cast<BlockEdge>(Node->getLocation()), Node);
217 break;
218
219 case ProgramPoint::BlockEntranceKind:
220 HandleBlockEntrance(cast<BlockEntrance>(Node->getLocation()), Node);
221 break;
222
223 case ProgramPoint::BlockExitKind:
224 assert (false && "BlockExit location never occur in forward analysis.");
225 break;
226
227 case ProgramPoint::CallEnterKind:
228 HandleCallEnter(cast<CallEnter>(Node->getLocation()), WU.getBlock(),
229 WU.getIndex(), Node);
230 break;
231
232 case ProgramPoint::CallExitKind:
233 HandleCallExit(cast<CallExit>(Node->getLocation()), Node);
234 break;
235
236 default:
237 assert(isa<PostStmt>(Node->getLocation()) ||
238 isa<PostInitializer>(Node->getLocation()));
239 HandlePostStmt(WU.getBlock(), WU.getIndex(), Node);
240 break;
241 }
242 }
243
244 SubEng.processEndWorklist(hasWorkRemaining());
245 return WList->hasWork();
246 }
247
ExecuteWorkListWithInitialState(const LocationContext * L,unsigned Steps,const GRState * InitState,ExplodedNodeSet & Dst)248 void CoreEngine::ExecuteWorkListWithInitialState(const LocationContext *L,
249 unsigned Steps,
250 const GRState *InitState,
251 ExplodedNodeSet &Dst) {
252 ExecuteWorkList(L, Steps, InitState);
253 for (llvm::SmallVectorImpl<ExplodedNode*>::iterator I = G->EndNodes.begin(),
254 E = G->EndNodes.end(); I != E; ++I) {
255 Dst.Add(*I);
256 }
257 }
258
HandleCallEnter(const CallEnter & L,const CFGBlock * Block,unsigned Index,ExplodedNode * Pred)259 void CoreEngine::HandleCallEnter(const CallEnter &L, const CFGBlock *Block,
260 unsigned Index, ExplodedNode *Pred) {
261 CallEnterNodeBuilder Builder(*this, Pred, L.getCallExpr(),
262 L.getCalleeContext(), Block, Index);
263 SubEng.processCallEnter(Builder);
264 }
265
HandleCallExit(const CallExit & L,ExplodedNode * Pred)266 void CoreEngine::HandleCallExit(const CallExit &L, ExplodedNode *Pred) {
267 CallExitNodeBuilder Builder(*this, Pred);
268 SubEng.processCallExit(Builder);
269 }
270
HandleBlockEdge(const BlockEdge & L,ExplodedNode * Pred)271 void CoreEngine::HandleBlockEdge(const BlockEdge& L, ExplodedNode* Pred) {
272
273 const CFGBlock* Blk = L.getDst();
274
275 // Check if we are entering the EXIT block.
276 if (Blk == &(L.getLocationContext()->getCFG()->getExit())) {
277
278 assert (L.getLocationContext()->getCFG()->getExit().size() == 0
279 && "EXIT block cannot contain Stmts.");
280
281 // Process the final state transition.
282 EndOfFunctionNodeBuilder Builder(Blk, Pred, this);
283 SubEng.processEndOfFunction(Builder);
284
285 // This path is done. Don't enqueue any more nodes.
286 return;
287 }
288
289 // Call into the subengine to process entering the CFGBlock.
290 ExplodedNodeSet dstNodes;
291 BlockEntrance BE(Blk, Pred->getLocationContext());
292 GenericNodeBuilder<BlockEntrance> nodeBuilder(*this, Pred, BE);
293 SubEng.processCFGBlockEntrance(dstNodes, nodeBuilder);
294
295 if (dstNodes.empty()) {
296 if (!nodeBuilder.hasGeneratedNode) {
297 // Auto-generate a node and enqueue it to the worklist.
298 generateNode(BE, Pred->State, Pred);
299 }
300 }
301 else {
302 for (ExplodedNodeSet::iterator I = dstNodes.begin(), E = dstNodes.end();
303 I != E; ++I) {
304 WList->enqueue(*I);
305 }
306 }
307
308 for (llvm::SmallVectorImpl<ExplodedNode*>::const_iterator
309 I = nodeBuilder.sinks().begin(), E = nodeBuilder.sinks().end();
310 I != E; ++I) {
311 blocksExhausted.push_back(std::make_pair(L, *I));
312 }
313 }
314
HandleBlockEntrance(const BlockEntrance & L,ExplodedNode * Pred)315 void CoreEngine::HandleBlockEntrance(const BlockEntrance& L,
316 ExplodedNode* Pred) {
317
318 // Increment the block counter.
319 BlockCounter Counter = WList->getBlockCounter();
320 Counter = BCounterFactory.IncrementCount(Counter,
321 Pred->getLocationContext()->getCurrentStackFrame(),
322 L.getBlock()->getBlockID());
323 WList->setBlockCounter(Counter);
324
325 // Process the entrance of the block.
326 if (CFGElement E = L.getFirstElement()) {
327 StmtNodeBuilder Builder(L.getBlock(), 0, Pred, this,
328 SubEng.getStateManager());
329 SubEng.processCFGElement(E, Builder);
330 }
331 else
332 HandleBlockExit(L.getBlock(), Pred);
333 }
334
HandleBlockExit(const CFGBlock * B,ExplodedNode * Pred)335 void CoreEngine::HandleBlockExit(const CFGBlock * B, ExplodedNode* Pred) {
336
337 if (const Stmt* Term = B->getTerminator()) {
338 switch (Term->getStmtClass()) {
339 default:
340 assert(false && "Analysis for this terminator not implemented.");
341 break;
342
343 case Stmt::BinaryOperatorClass: // '&&' and '||'
344 HandleBranch(cast<BinaryOperator>(Term)->getLHS(), Term, B, Pred);
345 return;
346
347 case Stmt::BinaryConditionalOperatorClass:
348 case Stmt::ConditionalOperatorClass:
349 HandleBranch(cast<AbstractConditionalOperator>(Term)->getCond(),
350 Term, B, Pred);
351 return;
352
353 // FIXME: Use constant-folding in CFG construction to simplify this
354 // case.
355
356 case Stmt::ChooseExprClass:
357 HandleBranch(cast<ChooseExpr>(Term)->getCond(), Term, B, Pred);
358 return;
359
360 case Stmt::DoStmtClass:
361 HandleBranch(cast<DoStmt>(Term)->getCond(), Term, B, Pred);
362 return;
363
364 case Stmt::ForStmtClass:
365 HandleBranch(cast<ForStmt>(Term)->getCond(), Term, B, Pred);
366 return;
367
368 case Stmt::ContinueStmtClass:
369 case Stmt::BreakStmtClass:
370 case Stmt::GotoStmtClass:
371 break;
372
373 case Stmt::IfStmtClass:
374 HandleBranch(cast<IfStmt>(Term)->getCond(), Term, B, Pred);
375 return;
376
377 case Stmt::IndirectGotoStmtClass: {
378 // Only 1 successor: the indirect goto dispatch block.
379 assert (B->succ_size() == 1);
380
381 IndirectGotoNodeBuilder
382 builder(Pred, B, cast<IndirectGotoStmt>(Term)->getTarget(),
383 *(B->succ_begin()), this);
384
385 SubEng.processIndirectGoto(builder);
386 return;
387 }
388
389 case Stmt::ObjCForCollectionStmtClass: {
390 // In the case of ObjCForCollectionStmt, it appears twice in a CFG:
391 //
392 // (1) inside a basic block, which represents the binding of the
393 // 'element' variable to a value.
394 // (2) in a terminator, which represents the branch.
395 //
396 // For (1), subengines will bind a value (i.e., 0 or 1) indicating
397 // whether or not collection contains any more elements. We cannot
398 // just test to see if the element is nil because a container can
399 // contain nil elements.
400 HandleBranch(Term, Term, B, Pred);
401 return;
402 }
403
404 case Stmt::SwitchStmtClass: {
405 SwitchNodeBuilder builder(Pred, B, cast<SwitchStmt>(Term)->getCond(),
406 this);
407
408 SubEng.processSwitch(builder);
409 return;
410 }
411
412 case Stmt::WhileStmtClass:
413 HandleBranch(cast<WhileStmt>(Term)->getCond(), Term, B, Pred);
414 return;
415 }
416 }
417
418 assert (B->succ_size() == 1 &&
419 "Blocks with no terminator should have at most 1 successor.");
420
421 generateNode(BlockEdge(B, *(B->succ_begin()), Pred->getLocationContext()),
422 Pred->State, Pred);
423 }
424
HandleBranch(const Stmt * Cond,const Stmt * Term,const CFGBlock * B,ExplodedNode * Pred)425 void CoreEngine::HandleBranch(const Stmt* Cond, const Stmt* Term,
426 const CFGBlock * B, ExplodedNode* Pred) {
427 assert(B->succ_size() == 2);
428 BranchNodeBuilder Builder(B, *(B->succ_begin()), *(B->succ_begin()+1),
429 Pred, this);
430 SubEng.processBranch(Cond, Term, Builder);
431 }
432
HandlePostStmt(const CFGBlock * B,unsigned StmtIdx,ExplodedNode * Pred)433 void CoreEngine::HandlePostStmt(const CFGBlock* B, unsigned StmtIdx,
434 ExplodedNode* Pred) {
435 assert (!B->empty());
436
437 if (StmtIdx == B->size())
438 HandleBlockExit(B, Pred);
439 else {
440 StmtNodeBuilder Builder(B, StmtIdx, Pred, this,
441 SubEng.getStateManager());
442 SubEng.processCFGElement((*B)[StmtIdx], Builder);
443 }
444 }
445
446 /// generateNode - Utility method to generate nodes, hook up successors,
447 /// and add nodes to the worklist.
generateNode(const ProgramPoint & Loc,const GRState * State,ExplodedNode * Pred)448 void CoreEngine::generateNode(const ProgramPoint& Loc,
449 const GRState* State, ExplodedNode* Pred) {
450
451 bool IsNew;
452 ExplodedNode* Node = G->getNode(Loc, State, &IsNew);
453
454 if (Pred)
455 Node->addPredecessor(Pred, *G); // Link 'Node' with its predecessor.
456 else {
457 assert (IsNew);
458 G->addRoot(Node); // 'Node' has no predecessor. Make it a root.
459 }
460
461 // Only add 'Node' to the worklist if it was freshly generated.
462 if (IsNew) WList->enqueue(Node);
463 }
464
465 ExplodedNode *
generateNodeImpl(const GRState * state,ExplodedNode * pred,ProgramPoint programPoint,bool asSink)466 GenericNodeBuilderImpl::generateNodeImpl(const GRState *state,
467 ExplodedNode *pred,
468 ProgramPoint programPoint,
469 bool asSink) {
470
471 hasGeneratedNode = true;
472 bool isNew;
473 ExplodedNode *node = engine.getGraph().getNode(programPoint, state, &isNew);
474 if (pred)
475 node->addPredecessor(pred, engine.getGraph());
476 if (isNew) {
477 if (asSink) {
478 node->markAsSink();
479 sinksGenerated.push_back(node);
480 }
481 return node;
482 }
483 return 0;
484 }
485
StmtNodeBuilder(const CFGBlock * b,unsigned idx,ExplodedNode * N,CoreEngine * e,GRStateManager & mgr)486 StmtNodeBuilder::StmtNodeBuilder(const CFGBlock* b, unsigned idx,
487 ExplodedNode* N, CoreEngine* e,
488 GRStateManager &mgr)
489 : Eng(*e), B(*b), Idx(idx), Pred(N), Mgr(mgr),
490 PurgingDeadSymbols(false), BuildSinks(false), hasGeneratedNode(false),
491 PointKind(ProgramPoint::PostStmtKind), Tag(0) {
492 Deferred.insert(N);
493 CleanedState = Pred->getState();
494 }
495
~StmtNodeBuilder()496 StmtNodeBuilder::~StmtNodeBuilder() {
497 for (DeferredTy::iterator I=Deferred.begin(), E=Deferred.end(); I!=E; ++I)
498 if (!(*I)->isSink())
499 GenerateAutoTransition(*I);
500 }
501
GenerateAutoTransition(ExplodedNode * N)502 void StmtNodeBuilder::GenerateAutoTransition(ExplodedNode* N) {
503 assert (!N->isSink());
504
505 // Check if this node entered a callee.
506 if (isa<CallEnter>(N->getLocation())) {
507 // Still use the index of the CallExpr. It's needed to create the callee
508 // StackFrameContext.
509 Eng.WList->enqueue(N, &B, Idx);
510 return;
511 }
512
513 // Do not create extra nodes. Move to the next CFG element.
514 if (isa<PostInitializer>(N->getLocation())) {
515 Eng.WList->enqueue(N, &B, Idx+1);
516 return;
517 }
518
519 PostStmt Loc(getStmt(), N->getLocationContext());
520
521 if (Loc == N->getLocation()) {
522 // Note: 'N' should be a fresh node because otherwise it shouldn't be
523 // a member of Deferred.
524 Eng.WList->enqueue(N, &B, Idx+1);
525 return;
526 }
527
528 bool IsNew;
529 ExplodedNode* Succ = Eng.G->getNode(Loc, N->State, &IsNew);
530 Succ->addPredecessor(N, *Eng.G);
531
532 if (IsNew)
533 Eng.WList->enqueue(Succ, &B, Idx+1);
534 }
535
MakeNode(ExplodedNodeSet & Dst,const Stmt * S,ExplodedNode * Pred,const GRState * St,ProgramPoint::Kind K)536 ExplodedNode* StmtNodeBuilder::MakeNode(ExplodedNodeSet& Dst, const Stmt* S,
537 ExplodedNode* Pred, const GRState* St,
538 ProgramPoint::Kind K) {
539
540 ExplodedNode* N = generateNode(S, St, Pred, K);
541
542 if (N) {
543 if (BuildSinks)
544 N->markAsSink();
545 else
546 Dst.Add(N);
547 }
548
549 return N;
550 }
551
GetProgramPoint(const Stmt * S,ProgramPoint::Kind K,const LocationContext * LC,const void * tag)552 static ProgramPoint GetProgramPoint(const Stmt *S, ProgramPoint::Kind K,
553 const LocationContext *LC, const void *tag){
554 switch (K) {
555 default:
556 assert(false && "Unhandled ProgramPoint kind");
557 case ProgramPoint::PreStmtKind:
558 return PreStmt(S, LC, tag);
559 case ProgramPoint::PostStmtKind:
560 return PostStmt(S, LC, tag);
561 case ProgramPoint::PreLoadKind:
562 return PreLoad(S, LC, tag);
563 case ProgramPoint::PostLoadKind:
564 return PostLoad(S, LC, tag);
565 case ProgramPoint::PreStoreKind:
566 return PreStore(S, LC, tag);
567 case ProgramPoint::PostStoreKind:
568 return PostStore(S, LC, tag);
569 case ProgramPoint::PostLValueKind:
570 return PostLValue(S, LC, tag);
571 case ProgramPoint::PostPurgeDeadSymbolsKind:
572 return PostPurgeDeadSymbols(S, LC, tag);
573 }
574 }
575
576 ExplodedNode*
generateNodeInternal(const Stmt * S,const GRState * state,ExplodedNode * Pred,ProgramPoint::Kind K,const void * tag)577 StmtNodeBuilder::generateNodeInternal(const Stmt* S, const GRState* state,
578 ExplodedNode* Pred,
579 ProgramPoint::Kind K,
580 const void *tag) {
581
582 const ProgramPoint &L = GetProgramPoint(S, K, Pred->getLocationContext(),tag);
583 return generateNodeInternal(L, state, Pred);
584 }
585
586 ExplodedNode*
generateNodeInternal(const ProgramPoint & Loc,const GRState * State,ExplodedNode * Pred)587 StmtNodeBuilder::generateNodeInternal(const ProgramPoint &Loc,
588 const GRState* State,
589 ExplodedNode* Pred) {
590 bool IsNew;
591 ExplodedNode* N = Eng.G->getNode(Loc, State, &IsNew);
592 N->addPredecessor(Pred, *Eng.G);
593 Deferred.erase(Pred);
594
595 if (IsNew) {
596 Deferred.insert(N);
597 return N;
598 }
599
600 return NULL;
601 }
602
603 // This function generate a new ExplodedNode but not a new branch(block edge).
generateNode(const Stmt * Condition,const GRState * State)604 ExplodedNode* BranchNodeBuilder::generateNode(const Stmt* Condition,
605 const GRState* State) {
606 bool IsNew;
607
608 ExplodedNode* Succ
609 = Eng.G->getNode(PostCondition(Condition, Pred->getLocationContext()), State,
610 &IsNew);
611
612 Succ->addPredecessor(Pred, *Eng.G);
613
614 Pred = Succ;
615
616 if (IsNew)
617 return Succ;
618
619 return NULL;
620 }
621
generateNode(const GRState * State,bool branch)622 ExplodedNode* BranchNodeBuilder::generateNode(const GRState* State,
623 bool branch) {
624
625 // If the branch has been marked infeasible we should not generate a node.
626 if (!isFeasible(branch))
627 return NULL;
628
629 bool IsNew;
630
631 ExplodedNode* Succ =
632 Eng.G->getNode(BlockEdge(Src,branch ? DstT:DstF,Pred->getLocationContext()),
633 State, &IsNew);
634
635 Succ->addPredecessor(Pred, *Eng.G);
636
637 if (branch)
638 GeneratedTrue = true;
639 else
640 GeneratedFalse = true;
641
642 if (IsNew) {
643 Deferred.push_back(Succ);
644 return Succ;
645 }
646
647 return NULL;
648 }
649
~BranchNodeBuilder()650 BranchNodeBuilder::~BranchNodeBuilder() {
651 if (!GeneratedTrue) generateNode(Pred->State, true);
652 if (!GeneratedFalse) generateNode(Pred->State, false);
653
654 for (DeferredTy::iterator I=Deferred.begin(), E=Deferred.end(); I!=E; ++I)
655 if (!(*I)->isSink()) Eng.WList->enqueue(*I);
656 }
657
658
659 ExplodedNode*
generateNode(const iterator & I,const GRState * St,bool isSink)660 IndirectGotoNodeBuilder::generateNode(const iterator& I, const GRState* St,
661 bool isSink) {
662 bool IsNew;
663
664 ExplodedNode* Succ = Eng.G->getNode(BlockEdge(Src, I.getBlock(),
665 Pred->getLocationContext()), St, &IsNew);
666
667 Succ->addPredecessor(Pred, *Eng.G);
668
669 if (IsNew) {
670
671 if (isSink)
672 Succ->markAsSink();
673 else
674 Eng.WList->enqueue(Succ);
675
676 return Succ;
677 }
678
679 return NULL;
680 }
681
682
683 ExplodedNode*
generateCaseStmtNode(const iterator & I,const GRState * St)684 SwitchNodeBuilder::generateCaseStmtNode(const iterator& I, const GRState* St){
685
686 bool IsNew;
687
688 ExplodedNode* Succ = Eng.G->getNode(BlockEdge(Src, I.getBlock(),
689 Pred->getLocationContext()), St, &IsNew);
690 Succ->addPredecessor(Pred, *Eng.G);
691
692 if (IsNew) {
693 Eng.WList->enqueue(Succ);
694 return Succ;
695 }
696
697 return NULL;
698 }
699
700
701 ExplodedNode*
generateDefaultCaseNode(const GRState * St,bool isSink)702 SwitchNodeBuilder::generateDefaultCaseNode(const GRState* St, bool isSink) {
703
704 // Get the block for the default case.
705 assert (Src->succ_rbegin() != Src->succ_rend());
706 CFGBlock* DefaultBlock = *Src->succ_rbegin();
707
708 bool IsNew;
709
710 ExplodedNode* Succ = Eng.G->getNode(BlockEdge(Src, DefaultBlock,
711 Pred->getLocationContext()), St, &IsNew);
712 Succ->addPredecessor(Pred, *Eng.G);
713
714 if (IsNew) {
715 if (isSink)
716 Succ->markAsSink();
717 else
718 Eng.WList->enqueue(Succ);
719
720 return Succ;
721 }
722
723 return NULL;
724 }
725
~EndOfFunctionNodeBuilder()726 EndOfFunctionNodeBuilder::~EndOfFunctionNodeBuilder() {
727 // Auto-generate an EOP node if one has not been generated.
728 if (!hasGeneratedNode) {
729 // If we are in an inlined call, generate CallExit node.
730 if (Pred->getLocationContext()->getParent())
731 GenerateCallExitNode(Pred->State);
732 else
733 generateNode(Pred->State);
734 }
735 }
736
737 ExplodedNode*
generateNode(const GRState * State,ExplodedNode * P,const void * tag)738 EndOfFunctionNodeBuilder::generateNode(const GRState* State,
739 ExplodedNode* P, const void *tag) {
740 hasGeneratedNode = true;
741 bool IsNew;
742
743 ExplodedNode* Node = Eng.G->getNode(BlockEntrance(&B,
744 Pred->getLocationContext(), tag ? tag : Tag),
745 State, &IsNew);
746
747 Node->addPredecessor(P ? P : Pred, *Eng.G);
748
749 if (IsNew) {
750 Eng.G->addEndOfPath(Node);
751 return Node;
752 }
753
754 return NULL;
755 }
756
GenerateCallExitNode(const GRState * state)757 void EndOfFunctionNodeBuilder::GenerateCallExitNode(const GRState *state) {
758 hasGeneratedNode = true;
759 // Create a CallExit node and enqueue it.
760 const StackFrameContext *LocCtx
761 = cast<StackFrameContext>(Pred->getLocationContext());
762 const Stmt *CE = LocCtx->getCallSite();
763
764 // Use the the callee location context.
765 CallExit Loc(CE, LocCtx);
766
767 bool isNew;
768 ExplodedNode *Node = Eng.G->getNode(Loc, state, &isNew);
769 Node->addPredecessor(Pred, *Eng.G);
770
771 if (isNew)
772 Eng.WList->enqueue(Node);
773 }
774
775
generateNode(const GRState * state)776 void CallEnterNodeBuilder::generateNode(const GRState *state) {
777 // Check if the callee is in the same translation unit.
778 if (CalleeCtx->getTranslationUnit() !=
779 Pred->getLocationContext()->getTranslationUnit()) {
780 // Create a new engine. We must be careful that the new engine should not
781 // reference data structures owned by the old engine.
782
783 AnalysisManager &OldMgr = Eng.SubEng.getAnalysisManager();
784
785 // Get the callee's translation unit.
786 idx::TranslationUnit *TU = CalleeCtx->getTranslationUnit();
787
788 // Create a new AnalysisManager with components of the callee's
789 // TranslationUnit.
790 // The Diagnostic is actually shared when we create ASTUnits from AST files.
791 AnalysisManager AMgr(TU->getASTContext(), TU->getDiagnostic(),
792 OldMgr.getLangOptions(),
793 OldMgr.getPathDiagnosticClient(),
794 OldMgr.getStoreManagerCreator(),
795 OldMgr.getConstraintManagerCreator(),
796 OldMgr.getCheckerManager(),
797 OldMgr.getIndexer(),
798 OldMgr.getMaxNodes(), OldMgr.getMaxVisit(),
799 OldMgr.shouldVisualizeGraphviz(),
800 OldMgr.shouldVisualizeUbigraph(),
801 OldMgr.shouldPurgeDead(),
802 OldMgr.shouldEagerlyAssume(),
803 OldMgr.shouldTrimGraph(),
804 OldMgr.shouldInlineCall(),
805 OldMgr.getAnalysisContextManager().getUseUnoptimizedCFG(),
806 OldMgr.getAnalysisContextManager().getAddImplicitDtors(),
807 OldMgr.getAnalysisContextManager().getAddInitializers(),
808 OldMgr.shouldEagerlyTrimExplodedGraph());
809 llvm::OwningPtr<TransferFuncs> TF(MakeCFRefCountTF(AMgr.getASTContext(),
810 /* GCEnabled */ false,
811 AMgr.getLangOptions()));
812 // Create the new engine.
813 ExprEngine NewEng(AMgr, TF.take());
814
815 // Create the new LocationContext.
816 AnalysisContext *NewAnaCtx = AMgr.getAnalysisContext(CalleeCtx->getDecl(),
817 CalleeCtx->getTranslationUnit());
818 const StackFrameContext *OldLocCtx = CalleeCtx;
819 const StackFrameContext *NewLocCtx = AMgr.getStackFrame(NewAnaCtx,
820 OldLocCtx->getParent(),
821 OldLocCtx->getCallSite(),
822 OldLocCtx->getCallSiteBlock(),
823 OldLocCtx->getIndex());
824
825 // Now create an initial state for the new engine.
826 const GRState *NewState = NewEng.getStateManager().MarshalState(state,
827 NewLocCtx);
828 ExplodedNodeSet ReturnNodes;
829 NewEng.ExecuteWorkListWithInitialState(NewLocCtx, AMgr.getMaxNodes(),
830 NewState, ReturnNodes);
831 return;
832 }
833
834 // Get the callee entry block.
835 const CFGBlock *Entry = &(CalleeCtx->getCFG()->getEntry());
836 assert(Entry->empty());
837 assert(Entry->succ_size() == 1);
838
839 // Get the solitary successor.
840 const CFGBlock *SuccB = *(Entry->succ_begin());
841
842 // Construct an edge representing the starting location in the callee.
843 BlockEdge Loc(Entry, SuccB, CalleeCtx);
844
845 bool isNew;
846 ExplodedNode *Node = Eng.G->getNode(Loc, state, &isNew);
847 Node->addPredecessor(const_cast<ExplodedNode*>(Pred), *Eng.G);
848
849 if (isNew)
850 Eng.WList->enqueue(Node);
851 }
852
generateNode(const GRState * state)853 void CallExitNodeBuilder::generateNode(const GRState *state) {
854 // Get the callee's location context.
855 const StackFrameContext *LocCtx
856 = cast<StackFrameContext>(Pred->getLocationContext());
857 // When exiting an implicit automatic obj dtor call, the callsite is the Stmt
858 // that triggers the dtor.
859 PostStmt Loc(LocCtx->getCallSite(), LocCtx->getParent());
860 bool isNew;
861 ExplodedNode *Node = Eng.G->getNode(Loc, state, &isNew);
862 Node->addPredecessor(const_cast<ExplodedNode*>(Pred), *Eng.G);
863 if (isNew)
864 Eng.WList->enqueue(Node, LocCtx->getCallSiteBlock(),
865 LocCtx->getIndex() + 1);
866 }
867