• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 //=-- ExplodedGraph.cpp - Local, Path-Sens. "Exploded Graph" -*- C++ -*------=//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 //  This file defines the template classes ExplodedNode and ExplodedGraph,
11 //  which represent a path-sensitive, intra-procedural "exploded graph."
12 //
13 //===----------------------------------------------------------------------===//
14 
15 #include "clang/StaticAnalyzer/Core/PathSensitive/ExplodedGraph.h"
16 #include "clang/AST/ParentMap.h"
17 #include "clang/AST/Stmt.h"
18 #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
19 #include "clang/StaticAnalyzer/Core/PathSensitive/ProgramState.h"
20 #include "llvm/ADT/DenseMap.h"
21 #include "llvm/ADT/DenseSet.h"
22 #include "llvm/ADT/SmallVector.h"
23 #include "llvm/ADT/Statistic.h"
24 #include <vector>
25 
26 using namespace clang;
27 using namespace ento;
28 
29 //===----------------------------------------------------------------------===//
30 // Node auditing.
31 //===----------------------------------------------------------------------===//
32 
33 // An out of line virtual method to provide a home for the class vtable.
~Auditor()34 ExplodedNode::Auditor::~Auditor() {}
35 
36 #ifndef NDEBUG
37 static ExplodedNode::Auditor* NodeAuditor = nullptr;
38 #endif
39 
SetAuditor(ExplodedNode::Auditor * A)40 void ExplodedNode::SetAuditor(ExplodedNode::Auditor* A) {
41 #ifndef NDEBUG
42   NodeAuditor = A;
43 #endif
44 }
45 
46 //===----------------------------------------------------------------------===//
47 // Cleanup.
48 //===----------------------------------------------------------------------===//
49 
ExplodedGraph()50 ExplodedGraph::ExplodedGraph()
51   : NumNodes(0), ReclaimNodeInterval(0) {}
52 
~ExplodedGraph()53 ExplodedGraph::~ExplodedGraph() {}
54 
55 //===----------------------------------------------------------------------===//
56 // Node reclamation.
57 //===----------------------------------------------------------------------===//
58 
isInterestingLValueExpr(const Expr * Ex)59 bool ExplodedGraph::isInterestingLValueExpr(const Expr *Ex) {
60   if (!Ex->isLValue())
61     return false;
62   return isa<DeclRefExpr>(Ex) ||
63          isa<MemberExpr>(Ex) ||
64          isa<ObjCIvarRefExpr>(Ex);
65 }
66 
shouldCollect(const ExplodedNode * node)67 bool ExplodedGraph::shouldCollect(const ExplodedNode *node) {
68   // First, we only consider nodes for reclamation of the following
69   // conditions apply:
70   //
71   // (1) 1 predecessor (that has one successor)
72   // (2) 1 successor (that has one predecessor)
73   //
74   // If a node has no successor it is on the "frontier", while a node
75   // with no predecessor is a root.
76   //
77   // After these prerequisites, we discard all "filler" nodes that
78   // are used only for intermediate processing, and are not essential
79   // for analyzer history:
80   //
81   // (a) PreStmtPurgeDeadSymbols
82   //
83   // We then discard all other nodes where *all* of the following conditions
84   // apply:
85   //
86   // (3) The ProgramPoint is for a PostStmt, but not a PostStore.
87   // (4) There is no 'tag' for the ProgramPoint.
88   // (5) The 'store' is the same as the predecessor.
89   // (6) The 'GDM' is the same as the predecessor.
90   // (7) The LocationContext is the same as the predecessor.
91   // (8) Expressions that are *not* lvalue expressions.
92   // (9) The PostStmt isn't for a non-consumed Stmt or Expr.
93   // (10) The successor is neither a CallExpr StmtPoint nor a CallEnter or
94   //      PreImplicitCall (so that we would be able to find it when retrying a
95   //      call with no inlining).
96   // FIXME: It may be safe to reclaim PreCall and PostCall nodes as well.
97 
98   // Conditions 1 and 2.
99   if (node->pred_size() != 1 || node->succ_size() != 1)
100     return false;
101 
102   const ExplodedNode *pred = *(node->pred_begin());
103   if (pred->succ_size() != 1)
104     return false;
105 
106   const ExplodedNode *succ = *(node->succ_begin());
107   if (succ->pred_size() != 1)
108     return false;
109 
110   // Now reclaim any nodes that are (by definition) not essential to
111   // analysis history and are not consulted by any client code.
112   ProgramPoint progPoint = node->getLocation();
113   if (progPoint.getAs<PreStmtPurgeDeadSymbols>())
114     return !progPoint.getTag();
115 
116   // Condition 3.
117   if (!progPoint.getAs<PostStmt>() || progPoint.getAs<PostStore>())
118     return false;
119 
120   // Condition 4.
121   if (progPoint.getTag())
122     return false;
123 
124   // Conditions 5, 6, and 7.
125   ProgramStateRef state = node->getState();
126   ProgramStateRef pred_state = pred->getState();
127   if (state->store != pred_state->store || state->GDM != pred_state->GDM ||
128       progPoint.getLocationContext() != pred->getLocationContext())
129     return false;
130 
131   // All further checks require expressions. As per #3, we know that we have
132   // a PostStmt.
133   const Expr *Ex = dyn_cast<Expr>(progPoint.castAs<PostStmt>().getStmt());
134   if (!Ex)
135     return false;
136 
137   // Condition 8.
138   // Do not collect nodes for "interesting" lvalue expressions since they are
139   // used extensively for generating path diagnostics.
140   if (isInterestingLValueExpr(Ex))
141     return false;
142 
143   // Condition 9.
144   // Do not collect nodes for non-consumed Stmt or Expr to ensure precise
145   // diagnostic generation; specifically, so that we could anchor arrows
146   // pointing to the beginning of statements (as written in code).
147   ParentMap &PM = progPoint.getLocationContext()->getParentMap();
148   if (!PM.isConsumedExpr(Ex))
149     return false;
150 
151   // Condition 10.
152   const ProgramPoint SuccLoc = succ->getLocation();
153   if (Optional<StmtPoint> SP = SuccLoc.getAs<StmtPoint>())
154     if (CallEvent::isCallStmt(SP->getStmt()))
155       return false;
156 
157   // Condition 10, continuation.
158   if (SuccLoc.getAs<CallEnter>() || SuccLoc.getAs<PreImplicitCall>())
159     return false;
160 
161   return true;
162 }
163 
collectNode(ExplodedNode * node)164 void ExplodedGraph::collectNode(ExplodedNode *node) {
165   // Removing a node means:
166   // (a) changing the predecessors successor to the successor of this node
167   // (b) changing the successors predecessor to the predecessor of this node
168   // (c) Putting 'node' onto freeNodes.
169   assert(node->pred_size() == 1 || node->succ_size() == 1);
170   ExplodedNode *pred = *(node->pred_begin());
171   ExplodedNode *succ = *(node->succ_begin());
172   pred->replaceSuccessor(succ);
173   succ->replacePredecessor(pred);
174   FreeNodes.push_back(node);
175   Nodes.RemoveNode(node);
176   --NumNodes;
177   node->~ExplodedNode();
178 }
179 
reclaimRecentlyAllocatedNodes()180 void ExplodedGraph::reclaimRecentlyAllocatedNodes() {
181   if (ChangedNodes.empty())
182     return;
183 
184   // Only periodically reclaim nodes so that we can build up a set of
185   // nodes that meet the reclamation criteria.  Freshly created nodes
186   // by definition have no successor, and thus cannot be reclaimed (see below).
187   assert(ReclaimCounter > 0);
188   if (--ReclaimCounter != 0)
189     return;
190   ReclaimCounter = ReclaimNodeInterval;
191 
192   for (NodeVector::iterator it = ChangedNodes.begin(), et = ChangedNodes.end();
193        it != et; ++it) {
194     ExplodedNode *node = *it;
195     if (shouldCollect(node))
196       collectNode(node);
197   }
198   ChangedNodes.clear();
199 }
200 
201 //===----------------------------------------------------------------------===//
202 // ExplodedNode.
203 //===----------------------------------------------------------------------===//
204 
205 // An NodeGroup's storage type is actually very much like a TinyPtrVector:
206 // it can be either a pointer to a single ExplodedNode, or a pointer to a
207 // BumpVector allocated with the ExplodedGraph's allocator. This allows the
208 // common case of single-node NodeGroups to be implemented with no extra memory.
209 //
210 // Consequently, each of the NodeGroup methods have up to four cases to handle:
211 // 1. The flag is set and this group does not actually contain any nodes.
212 // 2. The group is empty, in which case the storage value is null.
213 // 3. The group contains a single node.
214 // 4. The group contains more than one node.
215 typedef BumpVector<ExplodedNode *> ExplodedNodeVector;
216 typedef llvm::PointerUnion<ExplodedNode *, ExplodedNodeVector *> GroupStorage;
217 
addPredecessor(ExplodedNode * V,ExplodedGraph & G)218 void ExplodedNode::addPredecessor(ExplodedNode *V, ExplodedGraph &G) {
219   assert (!V->isSink());
220   Preds.addNode(V, G);
221   V->Succs.addNode(this, G);
222 #ifndef NDEBUG
223   if (NodeAuditor) NodeAuditor->AddEdge(V, this);
224 #endif
225 }
226 
replaceNode(ExplodedNode * node)227 void ExplodedNode::NodeGroup::replaceNode(ExplodedNode *node) {
228   assert(!getFlag());
229 
230   GroupStorage &Storage = reinterpret_cast<GroupStorage&>(P);
231   assert(Storage.is<ExplodedNode *>());
232   Storage = node;
233   assert(Storage.is<ExplodedNode *>());
234 }
235 
addNode(ExplodedNode * N,ExplodedGraph & G)236 void ExplodedNode::NodeGroup::addNode(ExplodedNode *N, ExplodedGraph &G) {
237   assert(!getFlag());
238 
239   GroupStorage &Storage = reinterpret_cast<GroupStorage&>(P);
240   if (Storage.isNull()) {
241     Storage = N;
242     assert(Storage.is<ExplodedNode *>());
243     return;
244   }
245 
246   ExplodedNodeVector *V = Storage.dyn_cast<ExplodedNodeVector *>();
247 
248   if (!V) {
249     // Switch from single-node to multi-node representation.
250     ExplodedNode *Old = Storage.get<ExplodedNode *>();
251 
252     BumpVectorContext &Ctx = G.getNodeAllocator();
253     V = G.getAllocator().Allocate<ExplodedNodeVector>();
254     new (V) ExplodedNodeVector(Ctx, 4);
255     V->push_back(Old, Ctx);
256 
257     Storage = V;
258     assert(!getFlag());
259     assert(Storage.is<ExplodedNodeVector *>());
260   }
261 
262   V->push_back(N, G.getNodeAllocator());
263 }
264 
size() const265 unsigned ExplodedNode::NodeGroup::size() const {
266   if (getFlag())
267     return 0;
268 
269   const GroupStorage &Storage = reinterpret_cast<const GroupStorage &>(P);
270   if (Storage.isNull())
271     return 0;
272   if (ExplodedNodeVector *V = Storage.dyn_cast<ExplodedNodeVector *>())
273     return V->size();
274   return 1;
275 }
276 
begin() const277 ExplodedNode * const *ExplodedNode::NodeGroup::begin() const {
278   if (getFlag())
279     return nullptr;
280 
281   const GroupStorage &Storage = reinterpret_cast<const GroupStorage &>(P);
282   if (Storage.isNull())
283     return nullptr;
284   if (ExplodedNodeVector *V = Storage.dyn_cast<ExplodedNodeVector *>())
285     return V->begin();
286   return Storage.getAddrOfPtr1();
287 }
288 
end() const289 ExplodedNode * const *ExplodedNode::NodeGroup::end() const {
290   if (getFlag())
291     return nullptr;
292 
293   const GroupStorage &Storage = reinterpret_cast<const GroupStorage &>(P);
294   if (Storage.isNull())
295     return nullptr;
296   if (ExplodedNodeVector *V = Storage.dyn_cast<ExplodedNodeVector *>())
297     return V->end();
298   return Storage.getAddrOfPtr1() + 1;
299 }
300 
getNode(const ProgramPoint & L,ProgramStateRef State,bool IsSink,bool * IsNew)301 ExplodedNode *ExplodedGraph::getNode(const ProgramPoint &L,
302                                      ProgramStateRef State,
303                                      bool IsSink,
304                                      bool* IsNew) {
305   // Profile 'State' to determine if we already have an existing node.
306   llvm::FoldingSetNodeID profile;
307   void *InsertPos = nullptr;
308 
309   NodeTy::Profile(profile, L, State, IsSink);
310   NodeTy* V = Nodes.FindNodeOrInsertPos(profile, InsertPos);
311 
312   if (!V) {
313     if (!FreeNodes.empty()) {
314       V = FreeNodes.back();
315       FreeNodes.pop_back();
316     }
317     else {
318       // Allocate a new node.
319       V = (NodeTy*) getAllocator().Allocate<NodeTy>();
320     }
321 
322     new (V) NodeTy(L, State, IsSink);
323 
324     if (ReclaimNodeInterval)
325       ChangedNodes.push_back(V);
326 
327     // Insert the node into the node set and return it.
328     Nodes.InsertNode(V, InsertPos);
329     ++NumNodes;
330 
331     if (IsNew) *IsNew = true;
332   }
333   else
334     if (IsNew) *IsNew = false;
335 
336   return V;
337 }
338 
339 ExplodedGraph *
trim(ArrayRef<const NodeTy * > Sinks,InterExplodedGraphMap * ForwardMap,InterExplodedGraphMap * InverseMap) const340 ExplodedGraph::trim(ArrayRef<const NodeTy *> Sinks,
341                     InterExplodedGraphMap *ForwardMap,
342                     InterExplodedGraphMap *InverseMap) const{
343 
344   if (Nodes.empty())
345     return nullptr;
346 
347   typedef llvm::DenseSet<const ExplodedNode*> Pass1Ty;
348   Pass1Ty Pass1;
349 
350   typedef InterExplodedGraphMap Pass2Ty;
351   InterExplodedGraphMap Pass2Scratch;
352   Pass2Ty &Pass2 = ForwardMap ? *ForwardMap : Pass2Scratch;
353 
354   SmallVector<const ExplodedNode*, 10> WL1, WL2;
355 
356   // ===- Pass 1 (reverse DFS) -===
357   for (ArrayRef<const NodeTy *>::iterator I = Sinks.begin(), E = Sinks.end();
358        I != E; ++I) {
359     if (*I)
360       WL1.push_back(*I);
361   }
362 
363   // Process the first worklist until it is empty.
364   while (!WL1.empty()) {
365     const ExplodedNode *N = WL1.pop_back_val();
366 
367     // Have we already visited this node?  If so, continue to the next one.
368     if (Pass1.count(N))
369       continue;
370 
371     // Otherwise, mark this node as visited.
372     Pass1.insert(N);
373 
374     // If this is a root enqueue it to the second worklist.
375     if (N->Preds.empty()) {
376       WL2.push_back(N);
377       continue;
378     }
379 
380     // Visit our predecessors and enqueue them.
381     for (ExplodedNode::pred_iterator I = N->Preds.begin(), E = N->Preds.end();
382          I != E; ++I)
383       WL1.push_back(*I);
384   }
385 
386   // We didn't hit a root? Return with a null pointer for the new graph.
387   if (WL2.empty())
388     return nullptr;
389 
390   // Create an empty graph.
391   ExplodedGraph* G = MakeEmptyGraph();
392 
393   // ===- Pass 2 (forward DFS to construct the new graph) -===
394   while (!WL2.empty()) {
395     const ExplodedNode *N = WL2.pop_back_val();
396 
397     // Skip this node if we have already processed it.
398     if (Pass2.find(N) != Pass2.end())
399       continue;
400 
401     // Create the corresponding node in the new graph and record the mapping
402     // from the old node to the new node.
403     ExplodedNode *NewN = G->getNode(N->getLocation(), N->State, N->isSink(),
404                                     nullptr);
405     Pass2[N] = NewN;
406 
407     // Also record the reverse mapping from the new node to the old node.
408     if (InverseMap) (*InverseMap)[NewN] = N;
409 
410     // If this node is a root, designate it as such in the graph.
411     if (N->Preds.empty())
412       G->addRoot(NewN);
413 
414     // In the case that some of the intended predecessors of NewN have already
415     // been created, we should hook them up as predecessors.
416 
417     // Walk through the predecessors of 'N' and hook up their corresponding
418     // nodes in the new graph (if any) to the freshly created node.
419     for (ExplodedNode::pred_iterator I = N->Preds.begin(), E = N->Preds.end();
420          I != E; ++I) {
421       Pass2Ty::iterator PI = Pass2.find(*I);
422       if (PI == Pass2.end())
423         continue;
424 
425       NewN->addPredecessor(const_cast<ExplodedNode *>(PI->second), *G);
426     }
427 
428     // In the case that some of the intended successors of NewN have already
429     // been created, we should hook them up as successors.  Otherwise, enqueue
430     // the new nodes from the original graph that should have nodes created
431     // in the new graph.
432     for (ExplodedNode::succ_iterator I = N->Succs.begin(), E = N->Succs.end();
433          I != E; ++I) {
434       Pass2Ty::iterator PI = Pass2.find(*I);
435       if (PI != Pass2.end()) {
436         const_cast<ExplodedNode *>(PI->second)->addPredecessor(NewN, *G);
437         continue;
438       }
439 
440       // Enqueue nodes to the worklist that were marked during pass 1.
441       if (Pass1.count(*I))
442         WL2.push_back(*I);
443     }
444   }
445 
446   return G;
447 }
448 
449