xref: /llvm-project/clang/lib/StaticAnalyzer/Core/ExplodedGraph.cpp (revision 6b85f8e99b38aed5bb4eeb4ca8d7ccdd5a1bf48c)
1 //===- ExplodedGraph.cpp - Local, Path-Sens. "Exploded Graph" -------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 //  This file defines the template classes ExplodedNode and ExplodedGraph,
10 //  which represent a path-sensitive, intra-procedural "exploded graph."
11 //
12 //===----------------------------------------------------------------------===//
13 
14 #include "clang/StaticAnalyzer/Core/PathSensitive/ExplodedGraph.h"
15 #include "clang/AST/Expr.h"
16 #include "clang/AST/ExprObjC.h"
17 #include "clang/AST/ParentMap.h"
18 #include "clang/AST/Stmt.h"
19 #include "clang/Analysis/CFGStmtMap.h"
20 #include "clang/Analysis/ProgramPoint.h"
21 #include "clang/Analysis/Support/BumpVector.h"
22 #include "clang/Basic/LLVM.h"
23 #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
24 #include "clang/StaticAnalyzer/Core/PathSensitive/ProgramState.h"
25 #include "clang/StaticAnalyzer/Core/PathSensitive/ProgramState_Fwd.h"
26 #include "llvm/ADT/DenseSet.h"
27 #include "llvm/ADT/FoldingSet.h"
28 #include "llvm/ADT/Optional.h"
29 #include "llvm/ADT/PointerUnion.h"
30 #include "llvm/ADT/SmallVector.h"
31 #include "llvm/Support/Casting.h"
32 #include <cassert>
33 #include <memory>
34 
35 using namespace clang;
36 using namespace ento;
37 
38 //===----------------------------------------------------------------------===//
39 // Cleanup.
40 //===----------------------------------------------------------------------===//
41 
42 ExplodedGraph::ExplodedGraph() = default;
43 
44 ExplodedGraph::~ExplodedGraph() = default;
45 
46 //===----------------------------------------------------------------------===//
47 // Node reclamation.
48 //===----------------------------------------------------------------------===//
49 
50 bool ExplodedGraph::isInterestingLValueExpr(const Expr *Ex) {
51   if (!Ex->isLValue())
52     return false;
53   return isa<DeclRefExpr>(Ex) ||
54          isa<MemberExpr>(Ex) ||
55          isa<ObjCIvarRefExpr>(Ex);
56 }
57 
58 bool ExplodedGraph::shouldCollect(const ExplodedNode *node) {
59   // First, we only consider nodes for reclamation of the following
60   // conditions apply:
61   //
62   // (1) 1 predecessor (that has one successor)
63   // (2) 1 successor (that has one predecessor)
64   //
65   // If a node has no successor it is on the "frontier", while a node
66   // with no predecessor is a root.
67   //
68   // After these prerequisites, we discard all "filler" nodes that
69   // are used only for intermediate processing, and are not essential
70   // for analyzer history:
71   //
72   // (a) PreStmtPurgeDeadSymbols
73   //
74   // We then discard all other nodes where *all* of the following conditions
75   // apply:
76   //
77   // (3) The ProgramPoint is for a PostStmt, but not a PostStore.
78   // (4) There is no 'tag' for the ProgramPoint.
79   // (5) The 'store' is the same as the predecessor.
80   // (6) The 'GDM' is the same as the predecessor.
81   // (7) The LocationContext is the same as the predecessor.
82   // (8) Expressions that are *not* lvalue expressions.
83   // (9) The PostStmt isn't for a non-consumed Stmt or Expr.
84   // (10) The successor is neither a CallExpr StmtPoint nor a CallEnter or
85   //      PreImplicitCall (so that we would be able to find it when retrying a
86   //      call with no inlining).
87   // FIXME: It may be safe to reclaim PreCall and PostCall nodes as well.
88 
89   // Conditions 1 and 2.
90   if (node->pred_size() != 1 || node->succ_size() != 1)
91     return false;
92 
93   const ExplodedNode *pred = *(node->pred_begin());
94   if (pred->succ_size() != 1)
95     return false;
96 
97   const ExplodedNode *succ = *(node->succ_begin());
98   if (succ->pred_size() != 1)
99     return false;
100 
101   // Now reclaim any nodes that are (by definition) not essential to
102   // analysis history and are not consulted by any client code.
103   ProgramPoint progPoint = node->getLocation();
104   if (progPoint.getAs<PreStmtPurgeDeadSymbols>())
105     return !progPoint.getTag();
106 
107   // Condition 3.
108   if (!progPoint.getAs<PostStmt>() || progPoint.getAs<PostStore>())
109     return false;
110 
111   // Condition 4.
112   if (progPoint.getTag())
113     return false;
114 
115   // Conditions 5, 6, and 7.
116   ProgramStateRef state = node->getState();
117   ProgramStateRef pred_state = pred->getState();
118   if (state->store != pred_state->store || state->GDM != pred_state->GDM ||
119       progPoint.getLocationContext() != pred->getLocationContext())
120     return false;
121 
122   // All further checks require expressions. As per #3, we know that we have
123   // a PostStmt.
124   const Expr *Ex = dyn_cast<Expr>(progPoint.castAs<PostStmt>().getStmt());
125   if (!Ex)
126     return false;
127 
128   // Condition 8.
129   // Do not collect nodes for "interesting" lvalue expressions since they are
130   // used extensively for generating path diagnostics.
131   if (isInterestingLValueExpr(Ex))
132     return false;
133 
134   // Condition 9.
135   // Do not collect nodes for non-consumed Stmt or Expr to ensure precise
136   // diagnostic generation; specifically, so that we could anchor arrows
137   // pointing to the beginning of statements (as written in code).
138   const ParentMap &PM = progPoint.getLocationContext()->getParentMap();
139   if (!PM.isConsumedExpr(Ex))
140     return false;
141 
142   // Condition 10.
143   const ProgramPoint SuccLoc = succ->getLocation();
144   if (Optional<StmtPoint> SP = SuccLoc.getAs<StmtPoint>())
145     if (CallEvent::isCallStmt(SP->getStmt()))
146       return false;
147 
148   // Condition 10, continuation.
149   if (SuccLoc.getAs<CallEnter>() || SuccLoc.getAs<PreImplicitCall>())
150     return false;
151 
152   return true;
153 }
154 
155 void ExplodedGraph::collectNode(ExplodedNode *node) {
156   // Removing a node means:
157   // (a) changing the predecessors successor to the successor of this node
158   // (b) changing the successors predecessor to the predecessor of this node
159   // (c) Putting 'node' onto freeNodes.
160   assert(node->pred_size() == 1 || node->succ_size() == 1);
161   ExplodedNode *pred = *(node->pred_begin());
162   ExplodedNode *succ = *(node->succ_begin());
163   pred->replaceSuccessor(succ);
164   succ->replacePredecessor(pred);
165   FreeNodes.push_back(node);
166   Nodes.RemoveNode(node);
167   --NumNodes;
168   node->~ExplodedNode();
169 }
170 
171 void ExplodedGraph::reclaimRecentlyAllocatedNodes() {
172   if (ChangedNodes.empty())
173     return;
174 
175   // Only periodically reclaim nodes so that we can build up a set of
176   // nodes that meet the reclamation criteria.  Freshly created nodes
177   // by definition have no successor, and thus cannot be reclaimed (see below).
178   assert(ReclaimCounter > 0);
179   if (--ReclaimCounter != 0)
180     return;
181   ReclaimCounter = ReclaimNodeInterval;
182 
183   for (const auto node : ChangedNodes)
184     if (shouldCollect(node))
185       collectNode(node);
186   ChangedNodes.clear();
187 }
188 
189 //===----------------------------------------------------------------------===//
190 // ExplodedNode.
191 //===----------------------------------------------------------------------===//
192 
193 // An NodeGroup's storage type is actually very much like a TinyPtrVector:
194 // it can be either a pointer to a single ExplodedNode, or a pointer to a
195 // BumpVector allocated with the ExplodedGraph's allocator. This allows the
196 // common case of single-node NodeGroups to be implemented with no extra memory.
197 //
198 // Consequently, each of the NodeGroup methods have up to four cases to handle:
199 // 1. The flag is set and this group does not actually contain any nodes.
200 // 2. The group is empty, in which case the storage value is null.
201 // 3. The group contains a single node.
202 // 4. The group contains more than one node.
203 using ExplodedNodeVector = BumpVector<ExplodedNode *>;
204 using GroupStorage = llvm::PointerUnion<ExplodedNode *, ExplodedNodeVector *>;
205 
206 void ExplodedNode::addPredecessor(ExplodedNode *V, ExplodedGraph &G) {
207   assert(!V->isSink());
208   Preds.addNode(V, G);
209   V->Succs.addNode(this, G);
210 }
211 
212 void ExplodedNode::NodeGroup::replaceNode(ExplodedNode *node) {
213   assert(!getFlag());
214 
215   GroupStorage &Storage = reinterpret_cast<GroupStorage&>(P);
216   assert(Storage.is<ExplodedNode *>());
217   Storage = node;
218   assert(Storage.is<ExplodedNode *>());
219 }
220 
221 void ExplodedNode::NodeGroup::addNode(ExplodedNode *N, ExplodedGraph &G) {
222   assert(!getFlag());
223 
224   GroupStorage &Storage = reinterpret_cast<GroupStorage&>(P);
225   if (Storage.isNull()) {
226     Storage = N;
227     assert(Storage.is<ExplodedNode *>());
228     return;
229   }
230 
231   ExplodedNodeVector *V = Storage.dyn_cast<ExplodedNodeVector *>();
232 
233   if (!V) {
234     // Switch from single-node to multi-node representation.
235     ExplodedNode *Old = Storage.get<ExplodedNode *>();
236 
237     BumpVectorContext &Ctx = G.getNodeAllocator();
238     V = G.getAllocator().Allocate<ExplodedNodeVector>();
239     new (V) ExplodedNodeVector(Ctx, 4);
240     V->push_back(Old, Ctx);
241 
242     Storage = V;
243     assert(!getFlag());
244     assert(Storage.is<ExplodedNodeVector *>());
245   }
246 
247   V->push_back(N, G.getNodeAllocator());
248 }
249 
250 unsigned ExplodedNode::NodeGroup::size() const {
251   if (getFlag())
252     return 0;
253 
254   const GroupStorage &Storage = reinterpret_cast<const GroupStorage &>(P);
255   if (Storage.isNull())
256     return 0;
257   if (ExplodedNodeVector *V = Storage.dyn_cast<ExplodedNodeVector *>())
258     return V->size();
259   return 1;
260 }
261 
262 ExplodedNode * const *ExplodedNode::NodeGroup::begin() const {
263   if (getFlag())
264     return nullptr;
265 
266   const GroupStorage &Storage = reinterpret_cast<const GroupStorage &>(P);
267   if (Storage.isNull())
268     return nullptr;
269   if (ExplodedNodeVector *V = Storage.dyn_cast<ExplodedNodeVector *>())
270     return V->begin();
271   return Storage.getAddrOfPtr1();
272 }
273 
274 ExplodedNode * const *ExplodedNode::NodeGroup::end() const {
275   if (getFlag())
276     return nullptr;
277 
278   const GroupStorage &Storage = reinterpret_cast<const GroupStorage &>(P);
279   if (Storage.isNull())
280     return nullptr;
281   if (ExplodedNodeVector *V = Storage.dyn_cast<ExplodedNodeVector *>())
282     return V->end();
283   return Storage.getAddrOfPtr1() + 1;
284 }
285 
286 int64_t ExplodedNode::getID(ExplodedGraph *G) const {
287   return G->getAllocator().identifyKnownAlignedObject<ExplodedNode>(this);
288 }
289 
290 bool ExplodedNode::isTrivial() const {
291   return pred_size() == 1 && succ_size() == 1 &&
292          getFirstPred()->getState()->getID() == getState()->getID() &&
293          getFirstPred()->succ_size() == 1;
294 }
295 
296 const CFGBlock *ExplodedNode::getCFGBlock() const {
297   ProgramPoint P = getLocation();
298   if (auto BEP = P.getAs<BlockEntrance>())
299     return BEP->getBlock();
300 
301   // Find the node's current statement in the CFG.
302   // FIXME: getStmtForDiagnostics() does nasty things in order to provide
303   // a valid statement for body farms, do we need this behavior here?
304   if (const Stmt *S = getStmtForDiagnostics())
305     return getLocationContext()
306         ->getAnalysisDeclContext()
307         ->getCFGStmtMap()
308         ->getBlock(S);
309 
310   return nullptr;
311 }
312 
313 static const LocationContext *
314 findTopAutosynthesizedParentContext(const LocationContext *LC) {
315   assert(LC->getAnalysisDeclContext()->isBodyAutosynthesized());
316   const LocationContext *ParentLC = LC->getParent();
317   assert(ParentLC && "We don't start analysis from autosynthesized code");
318   while (ParentLC->getAnalysisDeclContext()->isBodyAutosynthesized()) {
319     LC = ParentLC;
320     ParentLC = LC->getParent();
321     assert(ParentLC && "We don't start analysis from autosynthesized code");
322   }
323   return LC;
324 }
325 
326 const Stmt *ExplodedNode::getStmtForDiagnostics() const {
327   // We cannot place diagnostics on autosynthesized code.
328   // Put them onto the call site through which we jumped into autosynthesized
329   // code for the first time.
330   const LocationContext *LC = getLocationContext();
331   if (LC->getAnalysisDeclContext()->isBodyAutosynthesized()) {
332     // It must be a stack frame because we only autosynthesize functions.
333     return cast<StackFrameContext>(findTopAutosynthesizedParentContext(LC))
334         ->getCallSite();
335   }
336   // Otherwise, see if the node's program point directly points to a statement.
337   // FIXME: Refactor into a ProgramPoint method?
338   ProgramPoint P = getLocation();
339   if (auto SP = P.getAs<StmtPoint>())
340     return SP->getStmt();
341   if (auto BE = P.getAs<BlockEdge>())
342     return BE->getSrc()->getTerminatorStmt();
343   if (auto CE = P.getAs<CallEnter>())
344     return CE->getCallExpr();
345   if (auto CEE = P.getAs<CallExitEnd>())
346     return CEE->getCalleeContext()->getCallSite();
347   if (auto PIPP = P.getAs<PostInitializer>())
348     return PIPP->getInitializer()->getInit();
349   if (auto CEB = P.getAs<CallExitBegin>())
350     return CEB->getReturnStmt();
351   if (auto FEP = P.getAs<FunctionExitPoint>())
352     return FEP->getStmt();
353 
354   return nullptr;
355 }
356 
357 const Stmt *ExplodedNode::getNextStmtForDiagnostics() const {
358   for (const ExplodedNode *N = getFirstSucc(); N; N = N->getFirstSucc()) {
359     if (const Stmt *S = N->getStmtForDiagnostics()) {
360       // Check if the statement is '?' or '&&'/'||'.  These are "merges",
361       // not actual statement points.
362       switch (S->getStmtClass()) {
363         case Stmt::ChooseExprClass:
364         case Stmt::BinaryConditionalOperatorClass:
365         case Stmt::ConditionalOperatorClass:
366           continue;
367         case Stmt::BinaryOperatorClass: {
368           BinaryOperatorKind Op = cast<BinaryOperator>(S)->getOpcode();
369           if (Op == BO_LAnd || Op == BO_LOr)
370             continue;
371           break;
372         }
373         default:
374           break;
375       }
376       // We found the statement, so return it.
377       return S;
378     }
379   }
380 
381   return nullptr;
382 }
383 
384 const Stmt *ExplodedNode::getPreviousStmtForDiagnostics() const {
385   for (const ExplodedNode *N = getFirstPred(); N; N = N->getFirstPred())
386     if (const Stmt *S = N->getStmtForDiagnostics())
387       return S;
388 
389   return nullptr;
390 }
391 
392 const Stmt *ExplodedNode::getCurrentOrPreviousStmtForDiagnostics() const {
393   if (const Stmt *S = getStmtForDiagnostics())
394     return S;
395 
396   return getPreviousStmtForDiagnostics();
397 }
398 
399 ExplodedNode *ExplodedGraph::getNode(const ProgramPoint &L,
400                                      ProgramStateRef State,
401                                      bool IsSink,
402                                      bool* IsNew) {
403   // Profile 'State' to determine if we already have an existing node.
404   llvm::FoldingSetNodeID profile;
405   void *InsertPos = nullptr;
406 
407   NodeTy::Profile(profile, L, State, IsSink);
408   NodeTy* V = Nodes.FindNodeOrInsertPos(profile, InsertPos);
409 
410   if (!V) {
411     if (!FreeNodes.empty()) {
412       V = FreeNodes.back();
413       FreeNodes.pop_back();
414     }
415     else {
416       // Allocate a new node.
417       V = (NodeTy*) getAllocator().Allocate<NodeTy>();
418     }
419 
420     new (V) NodeTy(L, State, IsSink);
421 
422     if (ReclaimNodeInterval)
423       ChangedNodes.push_back(V);
424 
425     // Insert the node into the node set and return it.
426     Nodes.InsertNode(V, InsertPos);
427     ++NumNodes;
428 
429     if (IsNew) *IsNew = true;
430   }
431   else
432     if (IsNew) *IsNew = false;
433 
434   return V;
435 }
436 
437 ExplodedNode *ExplodedGraph::createUncachedNode(const ProgramPoint &L,
438                                                 ProgramStateRef State,
439                                                 bool IsSink) {
440   NodeTy *V = (NodeTy *) getAllocator().Allocate<NodeTy>();
441   new (V) NodeTy(L, State, IsSink);
442   return V;
443 }
444 
445 std::unique_ptr<ExplodedGraph>
446 ExplodedGraph::trim(ArrayRef<const NodeTy *> Sinks,
447                     InterExplodedGraphMap *ForwardMap,
448                     InterExplodedGraphMap *InverseMap) const {
449   if (Nodes.empty())
450     return nullptr;
451 
452   using Pass1Ty = llvm::DenseSet<const ExplodedNode *>;
453   Pass1Ty Pass1;
454 
455   using Pass2Ty = InterExplodedGraphMap;
456   InterExplodedGraphMap Pass2Scratch;
457   Pass2Ty &Pass2 = ForwardMap ? *ForwardMap : Pass2Scratch;
458 
459   SmallVector<const ExplodedNode*, 10> WL1, WL2;
460 
461   // ===- Pass 1 (reverse DFS) -===
462   for (const auto Sink : Sinks)
463     if (Sink)
464       WL1.push_back(Sink);
465 
466   // Process the first worklist until it is empty.
467   while (!WL1.empty()) {
468     const ExplodedNode *N = WL1.pop_back_val();
469 
470     // Have we already visited this node?  If so, continue to the next one.
471     if (!Pass1.insert(N).second)
472       continue;
473 
474     // If this is a root enqueue it to the second worklist.
475     if (N->Preds.empty()) {
476       WL2.push_back(N);
477       continue;
478     }
479 
480     // Visit our predecessors and enqueue them.
481     WL1.append(N->Preds.begin(), N->Preds.end());
482   }
483 
484   // We didn't hit a root? Return with a null pointer for the new graph.
485   if (WL2.empty())
486     return nullptr;
487 
488   // Create an empty graph.
489   std::unique_ptr<ExplodedGraph> G = MakeEmptyGraph();
490 
491   // ===- Pass 2 (forward DFS to construct the new graph) -===
492   while (!WL2.empty()) {
493     const ExplodedNode *N = WL2.pop_back_val();
494 
495     // Skip this node if we have already processed it.
496     if (Pass2.find(N) != Pass2.end())
497       continue;
498 
499     // Create the corresponding node in the new graph and record the mapping
500     // from the old node to the new node.
501     ExplodedNode *NewN = G->createUncachedNode(N->getLocation(), N->State, N->isSink());
502     Pass2[N] = NewN;
503 
504     // Also record the reverse mapping from the new node to the old node.
505     if (InverseMap) (*InverseMap)[NewN] = N;
506 
507     // If this node is a root, designate it as such in the graph.
508     if (N->Preds.empty())
509       G->addRoot(NewN);
510 
511     // In the case that some of the intended predecessors of NewN have already
512     // been created, we should hook them up as predecessors.
513 
514     // Walk through the predecessors of 'N' and hook up their corresponding
515     // nodes in the new graph (if any) to the freshly created node.
516     for (ExplodedNode::pred_iterator I = N->Preds.begin(), E = N->Preds.end();
517          I != E; ++I) {
518       Pass2Ty::iterator PI = Pass2.find(*I);
519       if (PI == Pass2.end())
520         continue;
521 
522       NewN->addPredecessor(const_cast<ExplodedNode *>(PI->second), *G);
523     }
524 
525     // In the case that some of the intended successors of NewN have already
526     // been created, we should hook them up as successors.  Otherwise, enqueue
527     // the new nodes from the original graph that should have nodes created
528     // in the new graph.
529     for (ExplodedNode::succ_iterator I = N->Succs.begin(), E = N->Succs.end();
530          I != E; ++I) {
531       Pass2Ty::iterator PI = Pass2.find(*I);
532       if (PI != Pass2.end()) {
533         const_cast<ExplodedNode *>(PI->second)->addPredecessor(NewN, *G);
534         continue;
535       }
536 
537       // Enqueue nodes to the worklist that were marked during pass 1.
538       if (Pass1.count(*I))
539         WL2.push_back(*I);
540     }
541   }
542 
543   return G;
544 }
545