• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 //=-- ExplodedGraph.cpp - Local, Path-Sens. "Exploded Graph" -*- C++ -*------=//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 //  This file defines the template classes ExplodedNode and ExplodedGraph,
11 //  which represent a path-sensitive, intra-procedural "exploded graph."
12 //
13 //===----------------------------------------------------------------------===//
14 
15 #include "clang/StaticAnalyzer/Core/PathSensitive/ExplodedGraph.h"
16 #include "clang/AST/ParentMap.h"
17 #include "clang/AST/Stmt.h"
18 #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
19 #include "clang/StaticAnalyzer/Core/PathSensitive/ProgramState.h"
20 #include "llvm/ADT/DenseMap.h"
21 #include "llvm/ADT/DenseSet.h"
22 #include "llvm/ADT/SmallVector.h"
23 #include "llvm/ADT/Statistic.h"
24 #include <vector>
25 
26 using namespace clang;
27 using namespace ento;
28 
29 //===----------------------------------------------------------------------===//
30 // Node auditing.
31 //===----------------------------------------------------------------------===//
32 
33 // An out of line virtual method to provide a home for the class vtable.
~Auditor()34 ExplodedNode::Auditor::~Auditor() {}
35 
36 #ifndef NDEBUG
37 static ExplodedNode::Auditor* NodeAuditor = nullptr;
38 #endif
39 
SetAuditor(ExplodedNode::Auditor * A)40 void ExplodedNode::SetAuditor(ExplodedNode::Auditor* A) {
41 #ifndef NDEBUG
42   NodeAuditor = A;
43 #endif
44 }
45 
46 //===----------------------------------------------------------------------===//
47 // Cleanup.
48 //===----------------------------------------------------------------------===//
49 
ExplodedGraph()50 ExplodedGraph::ExplodedGraph()
51   : NumNodes(0), ReclaimNodeInterval(0) {}
52 
~ExplodedGraph()53 ExplodedGraph::~ExplodedGraph() {}
54 
55 //===----------------------------------------------------------------------===//
56 // Node reclamation.
57 //===----------------------------------------------------------------------===//
58 
isInterestingLValueExpr(const Expr * Ex)59 bool ExplodedGraph::isInterestingLValueExpr(const Expr *Ex) {
60   if (!Ex->isLValue())
61     return false;
62   return isa<DeclRefExpr>(Ex) ||
63          isa<MemberExpr>(Ex) ||
64          isa<ObjCIvarRefExpr>(Ex);
65 }
66 
shouldCollect(const ExplodedNode * node)67 bool ExplodedGraph::shouldCollect(const ExplodedNode *node) {
68   // First, we only consider nodes for reclamation of the following
69   // conditions apply:
70   //
71   // (1) 1 predecessor (that has one successor)
72   // (2) 1 successor (that has one predecessor)
73   //
74   // If a node has no successor it is on the "frontier", while a node
75   // with no predecessor is a root.
76   //
77   // After these prerequisites, we discard all "filler" nodes that
78   // are used only for intermediate processing, and are not essential
79   // for analyzer history:
80   //
81   // (a) PreStmtPurgeDeadSymbols
82   //
83   // We then discard all other nodes where *all* of the following conditions
84   // apply:
85   //
86   // (3) The ProgramPoint is for a PostStmt, but not a PostStore.
87   // (4) There is no 'tag' for the ProgramPoint.
88   // (5) The 'store' is the same as the predecessor.
89   // (6) The 'GDM' is the same as the predecessor.
90   // (7) The LocationContext is the same as the predecessor.
91   // (8) Expressions that are *not* lvalue expressions.
92   // (9) The PostStmt isn't for a non-consumed Stmt or Expr.
93   // (10) The successor is neither a CallExpr StmtPoint nor a CallEnter or
94   //      PreImplicitCall (so that we would be able to find it when retrying a
95   //      call with no inlining).
96   // FIXME: It may be safe to reclaim PreCall and PostCall nodes as well.
97 
98   // Conditions 1 and 2.
99   if (node->pred_size() != 1 || node->succ_size() != 1)
100     return false;
101 
102   const ExplodedNode *pred = *(node->pred_begin());
103   if (pred->succ_size() != 1)
104     return false;
105 
106   const ExplodedNode *succ = *(node->succ_begin());
107   if (succ->pred_size() != 1)
108     return false;
109 
110   // Now reclaim any nodes that are (by definition) not essential to
111   // analysis history and are not consulted by any client code.
112   ProgramPoint progPoint = node->getLocation();
113   if (progPoint.getAs<PreStmtPurgeDeadSymbols>())
114     return !progPoint.getTag();
115 
116   // Condition 3.
117   if (!progPoint.getAs<PostStmt>() || progPoint.getAs<PostStore>())
118     return false;
119 
120   // Condition 4.
121   if (progPoint.getTag())
122     return false;
123 
124   // Conditions 5, 6, and 7.
125   ProgramStateRef state = node->getState();
126   ProgramStateRef pred_state = pred->getState();
127   if (state->store != pred_state->store || state->GDM != pred_state->GDM ||
128       progPoint.getLocationContext() != pred->getLocationContext())
129     return false;
130 
131   // All further checks require expressions. As per #3, we know that we have
132   // a PostStmt.
133   const Expr *Ex = dyn_cast<Expr>(progPoint.castAs<PostStmt>().getStmt());
134   if (!Ex)
135     return false;
136 
137   // Condition 8.
138   // Do not collect nodes for "interesting" lvalue expressions since they are
139   // used extensively for generating path diagnostics.
140   if (isInterestingLValueExpr(Ex))
141     return false;
142 
143   // Condition 9.
144   // Do not collect nodes for non-consumed Stmt or Expr to ensure precise
145   // diagnostic generation; specifically, so that we could anchor arrows
146   // pointing to the beginning of statements (as written in code).
147   ParentMap &PM = progPoint.getLocationContext()->getParentMap();
148   if (!PM.isConsumedExpr(Ex))
149     return false;
150 
151   // Condition 10.
152   const ProgramPoint SuccLoc = succ->getLocation();
153   if (Optional<StmtPoint> SP = SuccLoc.getAs<StmtPoint>())
154     if (CallEvent::isCallStmt(SP->getStmt()))
155       return false;
156 
157   // Condition 10, continuation.
158   if (SuccLoc.getAs<CallEnter>() || SuccLoc.getAs<PreImplicitCall>())
159     return false;
160 
161   return true;
162 }
163 
collectNode(ExplodedNode * node)164 void ExplodedGraph::collectNode(ExplodedNode *node) {
165   // Removing a node means:
166   // (a) changing the predecessors successor to the successor of this node
167   // (b) changing the successors predecessor to the predecessor of this node
168   // (c) Putting 'node' onto freeNodes.
169   assert(node->pred_size() == 1 || node->succ_size() == 1);
170   ExplodedNode *pred = *(node->pred_begin());
171   ExplodedNode *succ = *(node->succ_begin());
172   pred->replaceSuccessor(succ);
173   succ->replacePredecessor(pred);
174   FreeNodes.push_back(node);
175   Nodes.RemoveNode(node);
176   --NumNodes;
177   node->~ExplodedNode();
178 }
179 
reclaimRecentlyAllocatedNodes()180 void ExplodedGraph::reclaimRecentlyAllocatedNodes() {
181   if (ChangedNodes.empty())
182     return;
183 
184   // Only periodically reclaim nodes so that we can build up a set of
185   // nodes that meet the reclamation criteria.  Freshly created nodes
186   // by definition have no successor, and thus cannot be reclaimed (see below).
187   assert(ReclaimCounter > 0);
188   if (--ReclaimCounter != 0)
189     return;
190   ReclaimCounter = ReclaimNodeInterval;
191 
192   for (NodeVector::iterator it = ChangedNodes.begin(), et = ChangedNodes.end();
193        it != et; ++it) {
194     ExplodedNode *node = *it;
195     if (shouldCollect(node))
196       collectNode(node);
197   }
198   ChangedNodes.clear();
199 }
200 
201 //===----------------------------------------------------------------------===//
202 // ExplodedNode.
203 //===----------------------------------------------------------------------===//
204 
205 // An NodeGroup's storage type is actually very much like a TinyPtrVector:
206 // it can be either a pointer to a single ExplodedNode, or a pointer to a
207 // BumpVector allocated with the ExplodedGraph's allocator. This allows the
208 // common case of single-node NodeGroups to be implemented with no extra memory.
209 //
210 // Consequently, each of the NodeGroup methods have up to four cases to handle:
211 // 1. The flag is set and this group does not actually contain any nodes.
212 // 2. The group is empty, in which case the storage value is null.
213 // 3. The group contains a single node.
214 // 4. The group contains more than one node.
215 typedef BumpVector<ExplodedNode *> ExplodedNodeVector;
216 typedef llvm::PointerUnion<ExplodedNode *, ExplodedNodeVector *> GroupStorage;
217 
addPredecessor(ExplodedNode * V,ExplodedGraph & G)218 void ExplodedNode::addPredecessor(ExplodedNode *V, ExplodedGraph &G) {
219   assert (!V->isSink());
220   Preds.addNode(V, G);
221   V->Succs.addNode(this, G);
222 #ifndef NDEBUG
223   if (NodeAuditor) NodeAuditor->AddEdge(V, this);
224 #endif
225 }
226 
replaceNode(ExplodedNode * node)227 void ExplodedNode::NodeGroup::replaceNode(ExplodedNode *node) {
228   assert(!getFlag());
229 
230   GroupStorage &Storage = reinterpret_cast<GroupStorage&>(P);
231   assert(Storage.is<ExplodedNode *>());
232   Storage = node;
233   assert(Storage.is<ExplodedNode *>());
234 }
235 
addNode(ExplodedNode * N,ExplodedGraph & G)236 void ExplodedNode::NodeGroup::addNode(ExplodedNode *N, ExplodedGraph &G) {
237   assert(!getFlag());
238 
239   GroupStorage &Storage = reinterpret_cast<GroupStorage&>(P);
240   if (Storage.isNull()) {
241     Storage = N;
242     assert(Storage.is<ExplodedNode *>());
243     return;
244   }
245 
246   ExplodedNodeVector *V = Storage.dyn_cast<ExplodedNodeVector *>();
247 
248   if (!V) {
249     // Switch from single-node to multi-node representation.
250     ExplodedNode *Old = Storage.get<ExplodedNode *>();
251 
252     BumpVectorContext &Ctx = G.getNodeAllocator();
253     V = G.getAllocator().Allocate<ExplodedNodeVector>();
254     new (V) ExplodedNodeVector(Ctx, 4);
255     V->push_back(Old, Ctx);
256 
257     Storage = V;
258     assert(!getFlag());
259     assert(Storage.is<ExplodedNodeVector *>());
260   }
261 
262   V->push_back(N, G.getNodeAllocator());
263 }
264 
size() const265 unsigned ExplodedNode::NodeGroup::size() const {
266   if (getFlag())
267     return 0;
268 
269   const GroupStorage &Storage = reinterpret_cast<const GroupStorage &>(P);
270   if (Storage.isNull())
271     return 0;
272   if (ExplodedNodeVector *V = Storage.dyn_cast<ExplodedNodeVector *>())
273     return V->size();
274   return 1;
275 }
276 
begin() const277 ExplodedNode * const *ExplodedNode::NodeGroup::begin() const {
278   if (getFlag())
279     return nullptr;
280 
281   const GroupStorage &Storage = reinterpret_cast<const GroupStorage &>(P);
282   if (Storage.isNull())
283     return nullptr;
284   if (ExplodedNodeVector *V = Storage.dyn_cast<ExplodedNodeVector *>())
285     return V->begin();
286   return Storage.getAddrOfPtr1();
287 }
288 
end() const289 ExplodedNode * const *ExplodedNode::NodeGroup::end() const {
290   if (getFlag())
291     return nullptr;
292 
293   const GroupStorage &Storage = reinterpret_cast<const GroupStorage &>(P);
294   if (Storage.isNull())
295     return nullptr;
296   if (ExplodedNodeVector *V = Storage.dyn_cast<ExplodedNodeVector *>())
297     return V->end();
298   return Storage.getAddrOfPtr1() + 1;
299 }
300 
getNode(const ProgramPoint & L,ProgramStateRef State,bool IsSink,bool * IsNew)301 ExplodedNode *ExplodedGraph::getNode(const ProgramPoint &L,
302                                      ProgramStateRef State,
303                                      bool IsSink,
304                                      bool* IsNew) {
305   // Profile 'State' to determine if we already have an existing node.
306   llvm::FoldingSetNodeID profile;
307   void *InsertPos = nullptr;
308 
309   NodeTy::Profile(profile, L, State, IsSink);
310   NodeTy* V = Nodes.FindNodeOrInsertPos(profile, InsertPos);
311 
312   if (!V) {
313     if (!FreeNodes.empty()) {
314       V = FreeNodes.back();
315       FreeNodes.pop_back();
316     }
317     else {
318       // Allocate a new node.
319       V = (NodeTy*) getAllocator().Allocate<NodeTy>();
320     }
321 
322     new (V) NodeTy(L, State, IsSink);
323 
324     if (ReclaimNodeInterval)
325       ChangedNodes.push_back(V);
326 
327     // Insert the node into the node set and return it.
328     Nodes.InsertNode(V, InsertPos);
329     ++NumNodes;
330 
331     if (IsNew) *IsNew = true;
332   }
333   else
334     if (IsNew) *IsNew = false;
335 
336   return V;
337 }
338 
createUncachedNode(const ProgramPoint & L,ProgramStateRef State,bool IsSink)339 ExplodedNode *ExplodedGraph::createUncachedNode(const ProgramPoint &L,
340                                                 ProgramStateRef State,
341                                                 bool IsSink) {
342   NodeTy *V = (NodeTy *) getAllocator().Allocate<NodeTy>();
343   new (V) NodeTy(L, State, IsSink);
344   return V;
345 }
346 
347 std::unique_ptr<ExplodedGraph>
trim(ArrayRef<const NodeTy * > Sinks,InterExplodedGraphMap * ForwardMap,InterExplodedGraphMap * InverseMap) const348 ExplodedGraph::trim(ArrayRef<const NodeTy *> Sinks,
349                     InterExplodedGraphMap *ForwardMap,
350                     InterExplodedGraphMap *InverseMap) const {
351 
352   if (Nodes.empty())
353     return nullptr;
354 
355   typedef llvm::DenseSet<const ExplodedNode*> Pass1Ty;
356   Pass1Ty Pass1;
357 
358   typedef InterExplodedGraphMap Pass2Ty;
359   InterExplodedGraphMap Pass2Scratch;
360   Pass2Ty &Pass2 = ForwardMap ? *ForwardMap : Pass2Scratch;
361 
362   SmallVector<const ExplodedNode*, 10> WL1, WL2;
363 
364   // ===- Pass 1 (reverse DFS) -===
365   for (ArrayRef<const NodeTy *>::iterator I = Sinks.begin(), E = Sinks.end();
366        I != E; ++I) {
367     if (*I)
368       WL1.push_back(*I);
369   }
370 
371   // Process the first worklist until it is empty.
372   while (!WL1.empty()) {
373     const ExplodedNode *N = WL1.pop_back_val();
374 
375     // Have we already visited this node?  If so, continue to the next one.
376     if (!Pass1.insert(N).second)
377       continue;
378 
379     // If this is a root enqueue it to the second worklist.
380     if (N->Preds.empty()) {
381       WL2.push_back(N);
382       continue;
383     }
384 
385     // Visit our predecessors and enqueue them.
386     WL1.append(N->Preds.begin(), N->Preds.end());
387   }
388 
389   // We didn't hit a root? Return with a null pointer for the new graph.
390   if (WL2.empty())
391     return nullptr;
392 
393   // Create an empty graph.
394   std::unique_ptr<ExplodedGraph> G = MakeEmptyGraph();
395 
396   // ===- Pass 2 (forward DFS to construct the new graph) -===
397   while (!WL2.empty()) {
398     const ExplodedNode *N = WL2.pop_back_val();
399 
400     // Skip this node if we have already processed it.
401     if (Pass2.find(N) != Pass2.end())
402       continue;
403 
404     // Create the corresponding node in the new graph and record the mapping
405     // from the old node to the new node.
406     ExplodedNode *NewN = G->createUncachedNode(N->getLocation(), N->State, N->isSink());
407     Pass2[N] = NewN;
408 
409     // Also record the reverse mapping from the new node to the old node.
410     if (InverseMap) (*InverseMap)[NewN] = N;
411 
412     // If this node is a root, designate it as such in the graph.
413     if (N->Preds.empty())
414       G->addRoot(NewN);
415 
416     // In the case that some of the intended predecessors of NewN have already
417     // been created, we should hook them up as predecessors.
418 
419     // Walk through the predecessors of 'N' and hook up their corresponding
420     // nodes in the new graph (if any) to the freshly created node.
421     for (ExplodedNode::pred_iterator I = N->Preds.begin(), E = N->Preds.end();
422          I != E; ++I) {
423       Pass2Ty::iterator PI = Pass2.find(*I);
424       if (PI == Pass2.end())
425         continue;
426 
427       NewN->addPredecessor(const_cast<ExplodedNode *>(PI->second), *G);
428     }
429 
430     // In the case that some of the intended successors of NewN have already
431     // been created, we should hook them up as successors.  Otherwise, enqueue
432     // the new nodes from the original graph that should have nodes created
433     // in the new graph.
434     for (ExplodedNode::succ_iterator I = N->Succs.begin(), E = N->Succs.end();
435          I != E; ++I) {
436       Pass2Ty::iterator PI = Pass2.find(*I);
437       if (PI != Pass2.end()) {
438         const_cast<ExplodedNode *>(PI->second)->addPredecessor(NewN, *G);
439         continue;
440       }
441 
442       // Enqueue nodes to the worklist that were marked during pass 1.
443       if (Pass1.count(*I))
444         WL2.push_back(*I);
445     }
446   }
447 
448   return G;
449 }
450 
451