1 //===- CoreEngine.cpp - Path-Sensitive Dataflow Engine --------------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file defines a generic engine for intraprocedural, path-sensitive,
10 // dataflow analysis via graph reachability engine.
11 //
12 //===----------------------------------------------------------------------===//
13
14 #include "clang/StaticAnalyzer/Core/PathSensitive/CoreEngine.h"
15 #include "clang/AST/Expr.h"
16 #include "clang/AST/ExprCXX.h"
17 #include "clang/AST/Stmt.h"
18 #include "clang/AST/StmtCXX.h"
19 #include "clang/Analysis/AnalysisDeclContext.h"
20 #include "clang/Analysis/CFG.h"
21 #include "clang/Analysis/ProgramPoint.h"
22 #include "clang/Basic/LLVM.h"
23 #include "clang/StaticAnalyzer/Core/AnalyzerOptions.h"
24 #include "clang/StaticAnalyzer/Core/PathSensitive/BlockCounter.h"
25 #include "clang/StaticAnalyzer/Core/PathSensitive/ExplodedGraph.h"
26 #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
27 #include "clang/StaticAnalyzer/Core/PathSensitive/FunctionSummary.h"
28 #include "clang/StaticAnalyzer/Core/PathSensitive/WorkList.h"
29 #include "llvm/ADT/Optional.h"
30 #include "llvm/ADT/STLExtras.h"
31 #include "llvm/ADT/Statistic.h"
32 #include "llvm/Support/Casting.h"
33 #include "llvm/Support/ErrorHandling.h"
34 #include <algorithm>
35 #include <cassert>
36 #include <memory>
37 #include <utility>
38
39 using namespace clang;
40 using namespace ento;
41
42 #define DEBUG_TYPE "CoreEngine"
43
44 STATISTIC(NumSteps,
45 "The # of steps executed.");
46 STATISTIC(NumReachedMaxSteps,
47 "The # of times we reached the max number of steps.");
48 STATISTIC(NumPathsExplored,
49 "The # of paths explored by the analyzer.");
50
51 //===----------------------------------------------------------------------===//
52 // Core analysis engine.
53 //===----------------------------------------------------------------------===//
54
generateWorkList(AnalyzerOptions & Opts)55 static std::unique_ptr<WorkList> generateWorkList(AnalyzerOptions &Opts) {
56 switch (Opts.getExplorationStrategy()) {
57 case ExplorationStrategyKind::DFS:
58 return WorkList::makeDFS();
59 case ExplorationStrategyKind::BFS:
60 return WorkList::makeBFS();
61 case ExplorationStrategyKind::BFSBlockDFSContents:
62 return WorkList::makeBFSBlockDFSContents();
63 case ExplorationStrategyKind::UnexploredFirst:
64 return WorkList::makeUnexploredFirst();
65 case ExplorationStrategyKind::UnexploredFirstQueue:
66 return WorkList::makeUnexploredFirstPriorityQueue();
67 case ExplorationStrategyKind::UnexploredFirstLocationQueue:
68 return WorkList::makeUnexploredFirstPriorityLocationQueue();
69 }
70 llvm_unreachable("Unknown AnalyzerOptions::ExplorationStrategyKind");
71 }
72
CoreEngine(ExprEngine & exprengine,FunctionSummariesTy * FS,AnalyzerOptions & Opts)73 CoreEngine::CoreEngine(ExprEngine &exprengine, FunctionSummariesTy *FS,
74 AnalyzerOptions &Opts)
75 : ExprEng(exprengine), WList(generateWorkList(Opts)),
76 BCounterFactory(G.getAllocator()), FunctionSummaries(FS) {}
77
78 /// ExecuteWorkList - Run the worklist algorithm for a maximum number of steps.
ExecuteWorkList(const LocationContext * L,unsigned Steps,ProgramStateRef InitState)79 bool CoreEngine::ExecuteWorkList(const LocationContext *L, unsigned Steps,
80 ProgramStateRef InitState) {
81 if (G.num_roots() == 0) { // Initialize the analysis by constructing
82 // the root if none exists.
83
84 const CFGBlock *Entry = &(L->getCFG()->getEntry());
85
86 assert(Entry->empty() && "Entry block must be empty.");
87
88 assert(Entry->succ_size() == 1 && "Entry block must have 1 successor.");
89
90 // Mark the entry block as visited.
91 FunctionSummaries->markVisitedBasicBlock(Entry->getBlockID(),
92 L->getDecl(),
93 L->getCFG()->getNumBlockIDs());
94
95 // Get the solitary successor.
96 const CFGBlock *Succ = *(Entry->succ_begin());
97
98 // Construct an edge representing the
99 // starting location in the function.
100 BlockEdge StartLoc(Entry, Succ, L);
101
102 // Set the current block counter to being empty.
103 WList->setBlockCounter(BCounterFactory.GetEmptyCounter());
104
105 if (!InitState)
106 InitState = ExprEng.getInitialState(L);
107
108 bool IsNew;
109 ExplodedNode *Node = G.getNode(StartLoc, InitState, false, &IsNew);
110 assert(IsNew);
111 G.addRoot(Node);
112
113 NodeBuilderContext BuilderCtx(*this, StartLoc.getDst(), Node);
114 ExplodedNodeSet DstBegin;
115 ExprEng.processBeginOfFunction(BuilderCtx, Node, DstBegin, StartLoc);
116
117 enqueue(DstBegin);
118 }
119
120 // Check if we have a steps limit
121 bool UnlimitedSteps = Steps == 0;
122 // Cap our pre-reservation in the event that the user specifies
123 // a very large number of maximum steps.
124 const unsigned PreReservationCap = 4000000;
125 if(!UnlimitedSteps)
126 G.reserve(std::min(Steps,PreReservationCap));
127
128 while (WList->hasWork()) {
129 if (!UnlimitedSteps) {
130 if (Steps == 0) {
131 NumReachedMaxSteps++;
132 break;
133 }
134 --Steps;
135 }
136
137 NumSteps++;
138
139 const WorkListUnit& WU = WList->dequeue();
140
141 // Set the current block counter.
142 WList->setBlockCounter(WU.getBlockCounter());
143
144 // Retrieve the node.
145 ExplodedNode *Node = WU.getNode();
146
147 dispatchWorkItem(Node, Node->getLocation(), WU);
148 }
149 ExprEng.processEndWorklist();
150 return WList->hasWork();
151 }
152
dispatchWorkItem(ExplodedNode * Pred,ProgramPoint Loc,const WorkListUnit & WU)153 void CoreEngine::dispatchWorkItem(ExplodedNode* Pred, ProgramPoint Loc,
154 const WorkListUnit& WU) {
155 // Dispatch on the location type.
156 switch (Loc.getKind()) {
157 case ProgramPoint::BlockEdgeKind:
158 HandleBlockEdge(Loc.castAs<BlockEdge>(), Pred);
159 break;
160
161 case ProgramPoint::BlockEntranceKind:
162 HandleBlockEntrance(Loc.castAs<BlockEntrance>(), Pred);
163 break;
164
165 case ProgramPoint::BlockExitKind:
166 assert(false && "BlockExit location never occur in forward analysis.");
167 break;
168
169 case ProgramPoint::CallEnterKind:
170 HandleCallEnter(Loc.castAs<CallEnter>(), Pred);
171 break;
172
173 case ProgramPoint::CallExitBeginKind:
174 ExprEng.processCallExit(Pred);
175 break;
176
177 case ProgramPoint::EpsilonKind: {
178 assert(Pred->hasSinglePred() &&
179 "Assume epsilon has exactly one predecessor by construction");
180 ExplodedNode *PNode = Pred->getFirstPred();
181 dispatchWorkItem(Pred, PNode->getLocation(), WU);
182 break;
183 }
184 default:
185 assert(Loc.getAs<PostStmt>() ||
186 Loc.getAs<PostInitializer>() ||
187 Loc.getAs<PostImplicitCall>() ||
188 Loc.getAs<CallExitEnd>() ||
189 Loc.getAs<LoopExit>() ||
190 Loc.getAs<PostAllocatorCall>());
191 HandlePostStmt(WU.getBlock(), WU.getIndex(), Pred);
192 break;
193 }
194 }
195
ExecuteWorkListWithInitialState(const LocationContext * L,unsigned Steps,ProgramStateRef InitState,ExplodedNodeSet & Dst)196 bool CoreEngine::ExecuteWorkListWithInitialState(const LocationContext *L,
197 unsigned Steps,
198 ProgramStateRef InitState,
199 ExplodedNodeSet &Dst) {
200 bool DidNotFinish = ExecuteWorkList(L, Steps, InitState);
201 for (ExplodedGraph::eop_iterator I = G.eop_begin(), E = G.eop_end(); I != E;
202 ++I) {
203 Dst.Add(*I);
204 }
205 return DidNotFinish;
206 }
207
HandleBlockEdge(const BlockEdge & L,ExplodedNode * Pred)208 void CoreEngine::HandleBlockEdge(const BlockEdge &L, ExplodedNode *Pred) {
209 const CFGBlock *Blk = L.getDst();
210 NodeBuilderContext BuilderCtx(*this, Blk, Pred);
211
212 // Mark this block as visited.
213 const LocationContext *LC = Pred->getLocationContext();
214 FunctionSummaries->markVisitedBasicBlock(Blk->getBlockID(),
215 LC->getDecl(),
216 LC->getCFG()->getNumBlockIDs());
217
218 // Display a prunable path note to the user if it's a virtual bases branch
219 // and we're taking the path that skips virtual base constructors.
220 if (L.getSrc()->getTerminator().isVirtualBaseBranch() &&
221 L.getDst() == *L.getSrc()->succ_begin()) {
222 ProgramPoint P = L.withTag(getNoteTags().makeNoteTag(
223 [](BugReporterContext &, PathSensitiveBugReport &) -> std::string {
224 // TODO: Just call out the name of the most derived class
225 // when we know it.
226 return "Virtual base initialization skipped because "
227 "it has already been handled by the most derived class";
228 }, /*IsPrunable=*/true));
229 // Perform the transition.
230 ExplodedNodeSet Dst;
231 NodeBuilder Bldr(Pred, Dst, BuilderCtx);
232 Pred = Bldr.generateNode(P, Pred->getState(), Pred);
233 if (!Pred)
234 return;
235 }
236
237 // Check if we are entering the EXIT block.
238 if (Blk == &(L.getLocationContext()->getCFG()->getExit())) {
239 assert(L.getLocationContext()->getCFG()->getExit().empty() &&
240 "EXIT block cannot contain Stmts.");
241
242 // Get return statement..
243 const ReturnStmt *RS = nullptr;
244 if (!L.getSrc()->empty()) {
245 CFGElement LastElement = L.getSrc()->back();
246 if (Optional<CFGStmt> LastStmt = LastElement.getAs<CFGStmt>()) {
247 RS = dyn_cast<ReturnStmt>(LastStmt->getStmt());
248 } else if (Optional<CFGAutomaticObjDtor> AutoDtor =
249 LastElement.getAs<CFGAutomaticObjDtor>()) {
250 RS = dyn_cast<ReturnStmt>(AutoDtor->getTriggerStmt());
251 }
252 }
253
254 // Process the final state transition.
255 ExprEng.processEndOfFunction(BuilderCtx, Pred, RS);
256
257 // This path is done. Don't enqueue any more nodes.
258 return;
259 }
260
261 // Call into the ExprEngine to process entering the CFGBlock.
262 ExplodedNodeSet dstNodes;
263 BlockEntrance BE(Blk, Pred->getLocationContext());
264 NodeBuilderWithSinks nodeBuilder(Pred, dstNodes, BuilderCtx, BE);
265 ExprEng.processCFGBlockEntrance(L, nodeBuilder, Pred);
266
267 // Auto-generate a node.
268 if (!nodeBuilder.hasGeneratedNodes()) {
269 nodeBuilder.generateNode(Pred->State, Pred);
270 }
271
272 // Enqueue nodes onto the worklist.
273 enqueue(dstNodes);
274 }
275
HandleBlockEntrance(const BlockEntrance & L,ExplodedNode * Pred)276 void CoreEngine::HandleBlockEntrance(const BlockEntrance &L,
277 ExplodedNode *Pred) {
278 // Increment the block counter.
279 const LocationContext *LC = Pred->getLocationContext();
280 unsigned BlockId = L.getBlock()->getBlockID();
281 BlockCounter Counter = WList->getBlockCounter();
282 Counter = BCounterFactory.IncrementCount(Counter, LC->getStackFrame(),
283 BlockId);
284 WList->setBlockCounter(Counter);
285
286 // Process the entrance of the block.
287 if (Optional<CFGElement> E = L.getFirstElement()) {
288 NodeBuilderContext Ctx(*this, L.getBlock(), Pred);
289 ExprEng.processCFGElement(*E, Pred, 0, &Ctx);
290 }
291 else
292 HandleBlockExit(L.getBlock(), Pred);
293 }
294
HandleBlockExit(const CFGBlock * B,ExplodedNode * Pred)295 void CoreEngine::HandleBlockExit(const CFGBlock * B, ExplodedNode *Pred) {
296 if (const Stmt *Term = B->getTerminatorStmt()) {
297 switch (Term->getStmtClass()) {
298 default:
299 llvm_unreachable("Analysis for this terminator not implemented.");
300
301 case Stmt::CXXBindTemporaryExprClass:
302 HandleCleanupTemporaryBranch(
303 cast<CXXBindTemporaryExpr>(Term), B, Pred);
304 return;
305
306 // Model static initializers.
307 case Stmt::DeclStmtClass:
308 HandleStaticInit(cast<DeclStmt>(Term), B, Pred);
309 return;
310
311 case Stmt::BinaryOperatorClass: // '&&' and '||'
312 HandleBranch(cast<BinaryOperator>(Term)->getLHS(), Term, B, Pred);
313 return;
314
315 case Stmt::BinaryConditionalOperatorClass:
316 case Stmt::ConditionalOperatorClass:
317 HandleBranch(cast<AbstractConditionalOperator>(Term)->getCond(),
318 Term, B, Pred);
319 return;
320
321 // FIXME: Use constant-folding in CFG construction to simplify this
322 // case.
323
324 case Stmt::ChooseExprClass:
325 HandleBranch(cast<ChooseExpr>(Term)->getCond(), Term, B, Pred);
326 return;
327
328 case Stmt::CXXTryStmtClass:
329 // Generate a node for each of the successors.
330 // Our logic for EH analysis can certainly be improved.
331 for (CFGBlock::const_succ_iterator it = B->succ_begin(),
332 et = B->succ_end(); it != et; ++it) {
333 if (const CFGBlock *succ = *it) {
334 generateNode(BlockEdge(B, succ, Pred->getLocationContext()),
335 Pred->State, Pred);
336 }
337 }
338 return;
339
340 case Stmt::DoStmtClass:
341 HandleBranch(cast<DoStmt>(Term)->getCond(), Term, B, Pred);
342 return;
343
344 case Stmt::CXXForRangeStmtClass:
345 HandleBranch(cast<CXXForRangeStmt>(Term)->getCond(), Term, B, Pred);
346 return;
347
348 case Stmt::ForStmtClass:
349 HandleBranch(cast<ForStmt>(Term)->getCond(), Term, B, Pred);
350 return;
351
352 case Stmt::ContinueStmtClass:
353 case Stmt::BreakStmtClass:
354 case Stmt::GotoStmtClass:
355 break;
356
357 case Stmt::IfStmtClass:
358 HandleBranch(cast<IfStmt>(Term)->getCond(), Term, B, Pred);
359 return;
360
361 case Stmt::IndirectGotoStmtClass: {
362 // Only 1 successor: the indirect goto dispatch block.
363 assert(B->succ_size() == 1);
364
365 IndirectGotoNodeBuilder
366 builder(Pred, B, cast<IndirectGotoStmt>(Term)->getTarget(),
367 *(B->succ_begin()), this);
368
369 ExprEng.processIndirectGoto(builder);
370 return;
371 }
372
373 case Stmt::ObjCForCollectionStmtClass:
374 // In the case of ObjCForCollectionStmt, it appears twice in a CFG:
375 //
376 // (1) inside a basic block, which represents the binding of the
377 // 'element' variable to a value.
378 // (2) in a terminator, which represents the branch.
379 //
380 // For (1), ExprEngine will bind a value (i.e., 0 or 1) indicating
381 // whether or not collection contains any more elements. We cannot
382 // just test to see if the element is nil because a container can
383 // contain nil elements.
384 HandleBranch(Term, Term, B, Pred);
385 return;
386
387 case Stmt::SwitchStmtClass: {
388 SwitchNodeBuilder builder(Pred, B, cast<SwitchStmt>(Term)->getCond(),
389 this);
390
391 ExprEng.processSwitch(builder);
392 return;
393 }
394
395 case Stmt::WhileStmtClass:
396 HandleBranch(cast<WhileStmt>(Term)->getCond(), Term, B, Pred);
397 return;
398
399 case Stmt::GCCAsmStmtClass:
400 assert(cast<GCCAsmStmt>(Term)->isAsmGoto() && "Encountered GCCAsmStmt without labels");
401 // TODO: Handle jumping to labels
402 return;
403 }
404 }
405
406 if (B->getTerminator().isVirtualBaseBranch()) {
407 HandleVirtualBaseBranch(B, Pred);
408 return;
409 }
410
411 assert(B->succ_size() == 1 &&
412 "Blocks with no terminator should have at most 1 successor.");
413
414 generateNode(BlockEdge(B, *(B->succ_begin()), Pred->getLocationContext()),
415 Pred->State, Pred);
416 }
417
HandleCallEnter(const CallEnter & CE,ExplodedNode * Pred)418 void CoreEngine::HandleCallEnter(const CallEnter &CE, ExplodedNode *Pred) {
419 NodeBuilderContext BuilderCtx(*this, CE.getEntry(), Pred);
420 ExprEng.processCallEnter(BuilderCtx, CE, Pred);
421 }
422
HandleBranch(const Stmt * Cond,const Stmt * Term,const CFGBlock * B,ExplodedNode * Pred)423 void CoreEngine::HandleBranch(const Stmt *Cond, const Stmt *Term,
424 const CFGBlock * B, ExplodedNode *Pred) {
425 assert(B->succ_size() == 2);
426 NodeBuilderContext Ctx(*this, B, Pred);
427 ExplodedNodeSet Dst;
428 ExprEng.processBranch(Cond, Ctx, Pred, Dst, *(B->succ_begin()),
429 *(B->succ_begin() + 1));
430 // Enqueue the new frontier onto the worklist.
431 enqueue(Dst);
432 }
433
HandleCleanupTemporaryBranch(const CXXBindTemporaryExpr * BTE,const CFGBlock * B,ExplodedNode * Pred)434 void CoreEngine::HandleCleanupTemporaryBranch(const CXXBindTemporaryExpr *BTE,
435 const CFGBlock *B,
436 ExplodedNode *Pred) {
437 assert(B->succ_size() == 2);
438 NodeBuilderContext Ctx(*this, B, Pred);
439 ExplodedNodeSet Dst;
440 ExprEng.processCleanupTemporaryBranch(BTE, Ctx, Pred, Dst, *(B->succ_begin()),
441 *(B->succ_begin() + 1));
442 // Enqueue the new frontier onto the worklist.
443 enqueue(Dst);
444 }
445
HandleStaticInit(const DeclStmt * DS,const CFGBlock * B,ExplodedNode * Pred)446 void CoreEngine::HandleStaticInit(const DeclStmt *DS, const CFGBlock *B,
447 ExplodedNode *Pred) {
448 assert(B->succ_size() == 2);
449 NodeBuilderContext Ctx(*this, B, Pred);
450 ExplodedNodeSet Dst;
451 ExprEng.processStaticInitializer(DS, Ctx, Pred, Dst,
452 *(B->succ_begin()), *(B->succ_begin()+1));
453 // Enqueue the new frontier onto the worklist.
454 enqueue(Dst);
455 }
456
HandlePostStmt(const CFGBlock * B,unsigned StmtIdx,ExplodedNode * Pred)457 void CoreEngine::HandlePostStmt(const CFGBlock *B, unsigned StmtIdx,
458 ExplodedNode *Pred) {
459 assert(B);
460 assert(!B->empty());
461
462 if (StmtIdx == B->size())
463 HandleBlockExit(B, Pred);
464 else {
465 NodeBuilderContext Ctx(*this, B, Pred);
466 ExprEng.processCFGElement((*B)[StmtIdx], Pred, StmtIdx, &Ctx);
467 }
468 }
469
HandleVirtualBaseBranch(const CFGBlock * B,ExplodedNode * Pred)470 void CoreEngine::HandleVirtualBaseBranch(const CFGBlock *B,
471 ExplodedNode *Pred) {
472 const LocationContext *LCtx = Pred->getLocationContext();
473 if (const auto *CallerCtor = dyn_cast_or_null<CXXConstructExpr>(
474 LCtx->getStackFrame()->getCallSite())) {
475 switch (CallerCtor->getConstructionKind()) {
476 case CXXConstructExpr::CK_NonVirtualBase:
477 case CXXConstructExpr::CK_VirtualBase: {
478 BlockEdge Loc(B, *B->succ_begin(), LCtx);
479 HandleBlockEdge(Loc, Pred);
480 return;
481 }
482 default:
483 break;
484 }
485 }
486
487 // We either don't see a parent stack frame because we're in the top frame,
488 // or the parent stack frame doesn't initialize our virtual bases.
489 BlockEdge Loc(B, *(B->succ_begin() + 1), LCtx);
490 HandleBlockEdge(Loc, Pred);
491 }
492
493 /// generateNode - Utility method to generate nodes, hook up successors,
494 /// and add nodes to the worklist.
generateNode(const ProgramPoint & Loc,ProgramStateRef State,ExplodedNode * Pred)495 void CoreEngine::generateNode(const ProgramPoint &Loc,
496 ProgramStateRef State,
497 ExplodedNode *Pred) {
498 bool IsNew;
499 ExplodedNode *Node = G.getNode(Loc, State, false, &IsNew);
500
501 if (Pred)
502 Node->addPredecessor(Pred, G); // Link 'Node' with its predecessor.
503 else {
504 assert(IsNew);
505 G.addRoot(Node); // 'Node' has no predecessor. Make it a root.
506 }
507
508 // Only add 'Node' to the worklist if it was freshly generated.
509 if (IsNew) WList->enqueue(Node);
510 }
511
enqueueStmtNode(ExplodedNode * N,const CFGBlock * Block,unsigned Idx)512 void CoreEngine::enqueueStmtNode(ExplodedNode *N,
513 const CFGBlock *Block, unsigned Idx) {
514 assert(Block);
515 assert(!N->isSink());
516
517 // Check if this node entered a callee.
518 if (N->getLocation().getAs<CallEnter>()) {
519 // Still use the index of the CallExpr. It's needed to create the callee
520 // StackFrameContext.
521 WList->enqueue(N, Block, Idx);
522 return;
523 }
524
525 // Do not create extra nodes. Move to the next CFG element.
526 if (N->getLocation().getAs<PostInitializer>() ||
527 N->getLocation().getAs<PostImplicitCall>()||
528 N->getLocation().getAs<LoopExit>()) {
529 WList->enqueue(N, Block, Idx+1);
530 return;
531 }
532
533 if (N->getLocation().getAs<EpsilonPoint>()) {
534 WList->enqueue(N, Block, Idx);
535 return;
536 }
537
538 if ((*Block)[Idx].getKind() == CFGElement::NewAllocator) {
539 WList->enqueue(N, Block, Idx+1);
540 return;
541 }
542
543 // At this point, we know we're processing a normal statement.
544 CFGStmt CS = (*Block)[Idx].castAs<CFGStmt>();
545 PostStmt Loc(CS.getStmt(), N->getLocationContext());
546
547 if (Loc == N->getLocation().withTag(nullptr)) {
548 // Note: 'N' should be a fresh node because otherwise it shouldn't be
549 // a member of Deferred.
550 WList->enqueue(N, Block, Idx+1);
551 return;
552 }
553
554 bool IsNew;
555 ExplodedNode *Succ = G.getNode(Loc, N->getState(), false, &IsNew);
556 Succ->addPredecessor(N, G);
557
558 if (IsNew)
559 WList->enqueue(Succ, Block, Idx+1);
560 }
561
generateCallExitBeginNode(ExplodedNode * N,const ReturnStmt * RS)562 ExplodedNode *CoreEngine::generateCallExitBeginNode(ExplodedNode *N,
563 const ReturnStmt *RS) {
564 // Create a CallExitBegin node and enqueue it.
565 const auto *LocCtx = cast<StackFrameContext>(N->getLocationContext());
566
567 // Use the callee location context.
568 CallExitBegin Loc(LocCtx, RS);
569
570 bool isNew;
571 ExplodedNode *Node = G.getNode(Loc, N->getState(), false, &isNew);
572 Node->addPredecessor(N, G);
573 return isNew ? Node : nullptr;
574 }
575
enqueue(ExplodedNodeSet & Set)576 void CoreEngine::enqueue(ExplodedNodeSet &Set) {
577 for (const auto I : Set)
578 WList->enqueue(I);
579 }
580
enqueue(ExplodedNodeSet & Set,const CFGBlock * Block,unsigned Idx)581 void CoreEngine::enqueue(ExplodedNodeSet &Set,
582 const CFGBlock *Block, unsigned Idx) {
583 for (const auto I : Set)
584 enqueueStmtNode(I, Block, Idx);
585 }
586
enqueueEndOfFunction(ExplodedNodeSet & Set,const ReturnStmt * RS)587 void CoreEngine::enqueueEndOfFunction(ExplodedNodeSet &Set, const ReturnStmt *RS) {
588 for (auto I : Set) {
589 // If we are in an inlined call, generate CallExitBegin node.
590 if (I->getLocationContext()->getParent()) {
591 I = generateCallExitBeginNode(I, RS);
592 if (I)
593 WList->enqueue(I);
594 } else {
595 // TODO: We should run remove dead bindings here.
596 G.addEndOfPath(I);
597 NumPathsExplored++;
598 }
599 }
600 }
601
anchor()602 void NodeBuilder::anchor() {}
603
generateNodeImpl(const ProgramPoint & Loc,ProgramStateRef State,ExplodedNode * FromN,bool MarkAsSink)604 ExplodedNode* NodeBuilder::generateNodeImpl(const ProgramPoint &Loc,
605 ProgramStateRef State,
606 ExplodedNode *FromN,
607 bool MarkAsSink) {
608 HasGeneratedNodes = true;
609 bool IsNew;
610 ExplodedNode *N = C.Eng.G.getNode(Loc, State, MarkAsSink, &IsNew);
611 N->addPredecessor(FromN, C.Eng.G);
612 Frontier.erase(FromN);
613
614 if (!IsNew)
615 return nullptr;
616
617 if (!MarkAsSink)
618 Frontier.Add(N);
619
620 return N;
621 }
622
anchor()623 void NodeBuilderWithSinks::anchor() {}
624
~StmtNodeBuilder()625 StmtNodeBuilder::~StmtNodeBuilder() {
626 if (EnclosingBldr)
627 for (const auto I : Frontier)
628 EnclosingBldr->addNodes(I);
629 }
630
anchor()631 void BranchNodeBuilder::anchor() {}
632
generateNode(ProgramStateRef State,bool branch,ExplodedNode * NodePred)633 ExplodedNode *BranchNodeBuilder::generateNode(ProgramStateRef State,
634 bool branch,
635 ExplodedNode *NodePred) {
636 // If the branch has been marked infeasible we should not generate a node.
637 if (!isFeasible(branch))
638 return nullptr;
639
640 ProgramPoint Loc = BlockEdge(C.Block, branch ? DstT:DstF,
641 NodePred->getLocationContext());
642 ExplodedNode *Succ = generateNodeImpl(Loc, State, NodePred);
643 return Succ;
644 }
645
646 ExplodedNode*
generateNode(const iterator & I,ProgramStateRef St,bool IsSink)647 IndirectGotoNodeBuilder::generateNode(const iterator &I,
648 ProgramStateRef St,
649 bool IsSink) {
650 bool IsNew;
651 ExplodedNode *Succ =
652 Eng.G.getNode(BlockEdge(Src, I.getBlock(), Pred->getLocationContext()),
653 St, IsSink, &IsNew);
654 Succ->addPredecessor(Pred, Eng.G);
655
656 if (!IsNew)
657 return nullptr;
658
659 if (!IsSink)
660 Eng.WList->enqueue(Succ);
661
662 return Succ;
663 }
664
665 ExplodedNode*
generateCaseStmtNode(const iterator & I,ProgramStateRef St)666 SwitchNodeBuilder::generateCaseStmtNode(const iterator &I,
667 ProgramStateRef St) {
668 bool IsNew;
669 ExplodedNode *Succ =
670 Eng.G.getNode(BlockEdge(Src, I.getBlock(), Pred->getLocationContext()),
671 St, false, &IsNew);
672 Succ->addPredecessor(Pred, Eng.G);
673 if (!IsNew)
674 return nullptr;
675
676 Eng.WList->enqueue(Succ);
677 return Succ;
678 }
679
680 ExplodedNode*
generateDefaultCaseNode(ProgramStateRef St,bool IsSink)681 SwitchNodeBuilder::generateDefaultCaseNode(ProgramStateRef St,
682 bool IsSink) {
683 // Get the block for the default case.
684 assert(Src->succ_rbegin() != Src->succ_rend());
685 CFGBlock *DefaultBlock = *Src->succ_rbegin();
686
687 // Sanity check for default blocks that are unreachable and not caught
688 // by earlier stages.
689 if (!DefaultBlock)
690 return nullptr;
691
692 bool IsNew;
693 ExplodedNode *Succ =
694 Eng.G.getNode(BlockEdge(Src, DefaultBlock, Pred->getLocationContext()),
695 St, IsSink, &IsNew);
696 Succ->addPredecessor(Pred, Eng.G);
697
698 if (!IsNew)
699 return nullptr;
700
701 if (!IsSink)
702 Eng.WList->enqueue(Succ);
703
704 return Succ;
705 }
706