xref: /llvm-project/clang/lib/StaticAnalyzer/Core/CoreEngine.cpp (revision ef0aab3138ac8a57c370623c32854f797713dda2)
1 //===- CoreEngine.cpp - Path-Sensitive Dataflow Engine --------------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 //  This file defines a generic engine for intraprocedural, path-sensitive,
10 //  dataflow analysis via graph reachability engine.
11 //
12 //===----------------------------------------------------------------------===//
13 
14 #include "clang/StaticAnalyzer/Core/PathSensitive/CoreEngine.h"
15 #include "clang/AST/Expr.h"
16 #include "clang/AST/ExprCXX.h"
17 #include "clang/AST/Stmt.h"
18 #include "clang/AST/StmtCXX.h"
19 #include "clang/Analysis/AnalysisDeclContext.h"
20 #include "clang/Analysis/CFG.h"
21 #include "clang/Analysis/ProgramPoint.h"
22 #include "clang/Basic/LLVM.h"
23 #include "clang/StaticAnalyzer/Core/AnalyzerOptions.h"
24 #include "clang/StaticAnalyzer/Core/PathSensitive/BlockCounter.h"
25 #include "clang/StaticAnalyzer/Core/PathSensitive/ExplodedGraph.h"
26 #include "clang/StaticAnalyzer/Core/PathSensitive/FunctionSummary.h"
27 #include "clang/StaticAnalyzer/Core/PathSensitive/SubEngine.h"
28 #include "clang/StaticAnalyzer/Core/PathSensitive/WorkList.h"
29 #include "llvm/ADT/Optional.h"
30 #include "llvm/ADT/STLExtras.h"
31 #include "llvm/ADT/Statistic.h"
32 #include "llvm/Support/Casting.h"
33 #include "llvm/Support/ErrorHandling.h"
34 #include <algorithm>
35 #include <cassert>
36 #include <memory>
37 #include <utility>
38 
39 using namespace clang;
40 using namespace ento;
41 
42 #define DEBUG_TYPE "CoreEngine"
43 
44 STATISTIC(NumSteps,
45             "The # of steps executed.");
46 STATISTIC(NumReachedMaxSteps,
47             "The # of times we reached the max number of steps.");
48 STATISTIC(NumPathsExplored,
49             "The # of paths explored by the analyzer.");
50 
51 //===----------------------------------------------------------------------===//
52 // Core analysis engine.
53 //===----------------------------------------------------------------------===//
54 
55 static std::unique_ptr<WorkList> generateWorkList(AnalyzerOptions &Opts,
56                                                   SubEngine &subengine) {
57   switch (Opts.getExplorationStrategy()) {
58     case ExplorationStrategyKind::DFS:
59       return WorkList::makeDFS();
60     case ExplorationStrategyKind::BFS:
61       return WorkList::makeBFS();
62     case ExplorationStrategyKind::BFSBlockDFSContents:
63       return WorkList::makeBFSBlockDFSContents();
64     case ExplorationStrategyKind::UnexploredFirst:
65       return WorkList::makeUnexploredFirst();
66     case ExplorationStrategyKind::UnexploredFirstQueue:
67       return WorkList::makeUnexploredFirstPriorityQueue();
68     case ExplorationStrategyKind::UnexploredFirstLocationQueue:
69       return WorkList::makeUnexploredFirstPriorityLocationQueue();
70   }
71   llvm_unreachable("Unknown AnalyzerOptions::ExplorationStrategyKind");
72 }
73 
74 CoreEngine::CoreEngine(SubEngine &subengine, FunctionSummariesTy *FS,
75                        AnalyzerOptions &Opts)
76     : SubEng(subengine), WList(generateWorkList(Opts, subengine)),
77       BCounterFactory(G.getAllocator()), FunctionSummaries(FS) {}
78 
79 /// ExecuteWorkList - Run the worklist algorithm for a maximum number of steps.
80 bool CoreEngine::ExecuteWorkList(const LocationContext *L, unsigned Steps,
81                                    ProgramStateRef InitState) {
82   if (G.num_roots() == 0) { // Initialize the analysis by constructing
83     // the root if none exists.
84 
85     const CFGBlock *Entry = &(L->getCFG()->getEntry());
86 
87     assert(Entry->empty() && "Entry block must be empty.");
88 
89     assert(Entry->succ_size() == 1 && "Entry block must have 1 successor.");
90 
91     // Mark the entry block as visited.
92     FunctionSummaries->markVisitedBasicBlock(Entry->getBlockID(),
93                                              L->getDecl(),
94                                              L->getCFG()->getNumBlockIDs());
95 
96     // Get the solitary successor.
97     const CFGBlock *Succ = *(Entry->succ_begin());
98 
99     // Construct an edge representing the
100     // starting location in the function.
101     BlockEdge StartLoc(Entry, Succ, L);
102 
103     // Set the current block counter to being empty.
104     WList->setBlockCounter(BCounterFactory.GetEmptyCounter());
105 
106     if (!InitState)
107       InitState = SubEng.getInitialState(L);
108 
109     bool IsNew;
110     ExplodedNode *Node = G.getNode(StartLoc, InitState, false, &IsNew);
111     assert(IsNew);
112     G.addRoot(Node);
113 
114     NodeBuilderContext BuilderCtx(*this, StartLoc.getDst(), Node);
115     ExplodedNodeSet DstBegin;
116     SubEng.processBeginOfFunction(BuilderCtx, Node, DstBegin, StartLoc);
117 
118     enqueue(DstBegin);
119   }
120 
121   // Check if we have a steps limit
122   bool UnlimitedSteps = Steps == 0;
123   // Cap our pre-reservation in the event that the user specifies
124   // a very large number of maximum steps.
125   const unsigned PreReservationCap = 4000000;
126   if(!UnlimitedSteps)
127     G.reserve(std::min(Steps,PreReservationCap));
128 
129   while (WList->hasWork()) {
130     if (!UnlimitedSteps) {
131       if (Steps == 0) {
132         NumReachedMaxSteps++;
133         break;
134       }
135       --Steps;
136     }
137 
138     NumSteps++;
139 
140     const WorkListUnit& WU = WList->dequeue();
141 
142     // Set the current block counter.
143     WList->setBlockCounter(WU.getBlockCounter());
144 
145     // Retrieve the node.
146     ExplodedNode *Node = WU.getNode();
147 
148     dispatchWorkItem(Node, Node->getLocation(), WU);
149   }
150   SubEng.processEndWorklist();
151   return WList->hasWork();
152 }
153 
154 void CoreEngine::dispatchWorkItem(ExplodedNode* Pred, ProgramPoint Loc,
155                                   const WorkListUnit& WU) {
156   // Dispatch on the location type.
157   switch (Loc.getKind()) {
158     case ProgramPoint::BlockEdgeKind:
159       HandleBlockEdge(Loc.castAs<BlockEdge>(), Pred);
160       break;
161 
162     case ProgramPoint::BlockEntranceKind:
163       HandleBlockEntrance(Loc.castAs<BlockEntrance>(), Pred);
164       break;
165 
166     case ProgramPoint::BlockExitKind:
167       assert(false && "BlockExit location never occur in forward analysis.");
168       break;
169 
170     case ProgramPoint::CallEnterKind:
171       HandleCallEnter(Loc.castAs<CallEnter>(), Pred);
172       break;
173 
174     case ProgramPoint::CallExitBeginKind:
175       SubEng.processCallExit(Pred);
176       break;
177 
178     case ProgramPoint::EpsilonKind: {
179       assert(Pred->hasSinglePred() &&
180              "Assume epsilon has exactly one predecessor by construction");
181       ExplodedNode *PNode = Pred->getFirstPred();
182       dispatchWorkItem(Pred, PNode->getLocation(), WU);
183       break;
184     }
185     default:
186       assert(Loc.getAs<PostStmt>() ||
187              Loc.getAs<PostInitializer>() ||
188              Loc.getAs<PostImplicitCall>() ||
189              Loc.getAs<CallExitEnd>() ||
190              Loc.getAs<LoopExit>() ||
191              Loc.getAs<PostAllocatorCall>());
192       HandlePostStmt(WU.getBlock(), WU.getIndex(), Pred);
193       break;
194   }
195 }
196 
197 bool CoreEngine::ExecuteWorkListWithInitialState(const LocationContext *L,
198                                                  unsigned Steps,
199                                                  ProgramStateRef InitState,
200                                                  ExplodedNodeSet &Dst) {
201   bool DidNotFinish = ExecuteWorkList(L, Steps, InitState);
202   for (ExplodedGraph::eop_iterator I = G.eop_begin(), E = G.eop_end(); I != E;
203        ++I) {
204     Dst.Add(*I);
205   }
206   return DidNotFinish;
207 }
208 
209 void CoreEngine::HandleBlockEdge(const BlockEdge &L, ExplodedNode *Pred) {
210   const CFGBlock *Blk = L.getDst();
211   NodeBuilderContext BuilderCtx(*this, Blk, Pred);
212 
213   // Mark this block as visited.
214   const LocationContext *LC = Pred->getLocationContext();
215   FunctionSummaries->markVisitedBasicBlock(Blk->getBlockID(),
216                                            LC->getDecl(),
217                                            LC->getCFG()->getNumBlockIDs());
218 
219   // Display a prunable path note to the user if it's a virtual bases branch
220   // and we're taking the path that skips virtual base constructors.
221   if (L.getSrc()->getTerminator().isVirtualBaseBranch() &&
222       L.getDst() == *L.getSrc()->succ_begin()) {
223     ProgramPoint P = L.withTag(getNoteTags().makeNoteTag(
224         [](BugReporterContext &, BugReport &) -> std::string {
225           // TODO: Just call out the name of the most derived class
226           // when we know it.
227           return "Virtual base initialization skipped because "
228                  "it has already been handled by the most derived class";
229         }, /*IsPrunable=*/true));
230     // Perform the transition.
231     ExplodedNodeSet Dst;
232     NodeBuilder Bldr(Pred, Dst, BuilderCtx);
233     Pred = Bldr.generateNode(P, Pred->getState(), Pred);
234     if (!Pred)
235       return;
236   }
237 
238   // Check if we are entering the EXIT block.
239   if (Blk == &(L.getLocationContext()->getCFG()->getExit())) {
240     assert(L.getLocationContext()->getCFG()->getExit().empty() &&
241            "EXIT block cannot contain Stmts.");
242 
243     // Get return statement..
244     const ReturnStmt *RS = nullptr;
245     if (!L.getSrc()->empty()) {
246       CFGElement LastElement = L.getSrc()->back();
247       if (Optional<CFGStmt> LastStmt = LastElement.getAs<CFGStmt>()) {
248         RS = dyn_cast<ReturnStmt>(LastStmt->getStmt());
249       } else if (Optional<CFGAutomaticObjDtor> AutoDtor =
250                  LastElement.getAs<CFGAutomaticObjDtor>()) {
251         RS = dyn_cast<ReturnStmt>(AutoDtor->getTriggerStmt());
252       }
253     }
254 
255     // Process the final state transition.
256     SubEng.processEndOfFunction(BuilderCtx, Pred, RS);
257 
258     // This path is done. Don't enqueue any more nodes.
259     return;
260   }
261 
262   // Call into the SubEngine to process entering the CFGBlock.
263   ExplodedNodeSet dstNodes;
264   BlockEntrance BE(Blk, Pred->getLocationContext());
265   NodeBuilderWithSinks nodeBuilder(Pred, dstNodes, BuilderCtx, BE);
266   SubEng.processCFGBlockEntrance(L, nodeBuilder, Pred);
267 
268   // Auto-generate a node.
269   if (!nodeBuilder.hasGeneratedNodes()) {
270     nodeBuilder.generateNode(Pred->State, Pred);
271   }
272 
273   // Enqueue nodes onto the worklist.
274   enqueue(dstNodes);
275 }
276 
277 void CoreEngine::HandleBlockEntrance(const BlockEntrance &L,
278                                        ExplodedNode *Pred) {
279   // Increment the block counter.
280   const LocationContext *LC = Pred->getLocationContext();
281   unsigned BlockId = L.getBlock()->getBlockID();
282   BlockCounter Counter = WList->getBlockCounter();
283   Counter = BCounterFactory.IncrementCount(Counter, LC->getStackFrame(),
284                                            BlockId);
285   WList->setBlockCounter(Counter);
286 
287   // Process the entrance of the block.
288   if (Optional<CFGElement> E = L.getFirstElement()) {
289     NodeBuilderContext Ctx(*this, L.getBlock(), Pred);
290     SubEng.processCFGElement(*E, Pred, 0, &Ctx);
291   }
292   else
293     HandleBlockExit(L.getBlock(), Pred);
294 }
295 
296 void CoreEngine::HandleBlockExit(const CFGBlock * B, ExplodedNode *Pred) {
297   if (const Stmt *Term = B->getTerminatorStmt()) {
298     switch (Term->getStmtClass()) {
299       default:
300         llvm_unreachable("Analysis for this terminator not implemented.");
301 
302       case Stmt::CXXBindTemporaryExprClass:
303         HandleCleanupTemporaryBranch(
304             cast<CXXBindTemporaryExpr>(Term), B, Pred);
305         return;
306 
307       // Model static initializers.
308       case Stmt::DeclStmtClass:
309         HandleStaticInit(cast<DeclStmt>(Term), B, Pred);
310         return;
311 
312       case Stmt::BinaryOperatorClass: // '&&' and '||'
313         HandleBranch(cast<BinaryOperator>(Term)->getLHS(), Term, B, Pred);
314         return;
315 
316       case Stmt::BinaryConditionalOperatorClass:
317       case Stmt::ConditionalOperatorClass:
318         HandleBranch(cast<AbstractConditionalOperator>(Term)->getCond(),
319                      Term, B, Pred);
320         return;
321 
322         // FIXME: Use constant-folding in CFG construction to simplify this
323         // case.
324 
325       case Stmt::ChooseExprClass:
326         HandleBranch(cast<ChooseExpr>(Term)->getCond(), Term, B, Pred);
327         return;
328 
329       case Stmt::CXXTryStmtClass:
330         // Generate a node for each of the successors.
331         // Our logic for EH analysis can certainly be improved.
332         for (CFGBlock::const_succ_iterator it = B->succ_begin(),
333              et = B->succ_end(); it != et; ++it) {
334           if (const CFGBlock *succ = *it) {
335             generateNode(BlockEdge(B, succ, Pred->getLocationContext()),
336                          Pred->State, Pred);
337           }
338         }
339         return;
340 
341       case Stmt::DoStmtClass:
342         HandleBranch(cast<DoStmt>(Term)->getCond(), Term, B, Pred);
343         return;
344 
345       case Stmt::CXXForRangeStmtClass:
346         HandleBranch(cast<CXXForRangeStmt>(Term)->getCond(), Term, B, Pred);
347         return;
348 
349       case Stmt::ForStmtClass:
350         HandleBranch(cast<ForStmt>(Term)->getCond(), Term, B, Pred);
351         return;
352 
353       case Stmt::ContinueStmtClass:
354       case Stmt::BreakStmtClass:
355       case Stmt::GotoStmtClass:
356         break;
357 
358       case Stmt::IfStmtClass:
359         HandleBranch(cast<IfStmt>(Term)->getCond(), Term, B, Pred);
360         return;
361 
362       case Stmt::IndirectGotoStmtClass: {
363         // Only 1 successor: the indirect goto dispatch block.
364         assert(B->succ_size() == 1);
365 
366         IndirectGotoNodeBuilder
367            builder(Pred, B, cast<IndirectGotoStmt>(Term)->getTarget(),
368                    *(B->succ_begin()), this);
369 
370         SubEng.processIndirectGoto(builder);
371         return;
372       }
373 
374       case Stmt::ObjCForCollectionStmtClass:
375         // In the case of ObjCForCollectionStmt, it appears twice in a CFG:
376         //
377         //  (1) inside a basic block, which represents the binding of the
378         //      'element' variable to a value.
379         //  (2) in a terminator, which represents the branch.
380         //
381         // For (1), subengines will bind a value (i.e., 0 or 1) indicating
382         // whether or not collection contains any more elements.  We cannot
383         // just test to see if the element is nil because a container can
384         // contain nil elements.
385         HandleBranch(Term, Term, B, Pred);
386         return;
387 
388       case Stmt::SwitchStmtClass: {
389         SwitchNodeBuilder builder(Pred, B, cast<SwitchStmt>(Term)->getCond(),
390                                     this);
391 
392         SubEng.processSwitch(builder);
393         return;
394       }
395 
396       case Stmt::WhileStmtClass:
397         HandleBranch(cast<WhileStmt>(Term)->getCond(), Term, B, Pred);
398         return;
399     }
400   }
401 
402   if (B->getTerminator().isVirtualBaseBranch()) {
403     HandleVirtualBaseBranch(B, Pred);
404     return;
405   }
406 
407   assert(B->succ_size() == 1 &&
408          "Blocks with no terminator should have at most 1 successor.");
409 
410   generateNode(BlockEdge(B, *(B->succ_begin()), Pred->getLocationContext()),
411                Pred->State, Pred);
412 }
413 
414 void CoreEngine::HandleCallEnter(const CallEnter &CE, ExplodedNode *Pred) {
415   NodeBuilderContext BuilderCtx(*this, CE.getEntry(), Pred);
416   SubEng.processCallEnter(BuilderCtx, CE, Pred);
417 }
418 
419 void CoreEngine::HandleBranch(const Stmt *Cond, const Stmt *Term,
420                                 const CFGBlock * B, ExplodedNode *Pred) {
421   assert(B->succ_size() == 2);
422   NodeBuilderContext Ctx(*this, B, Pred);
423   ExplodedNodeSet Dst;
424   SubEng.processBranch(Cond, Ctx, Pred, Dst, *(B->succ_begin()),
425                        *(B->succ_begin() + 1));
426   // Enqueue the new frontier onto the worklist.
427   enqueue(Dst);
428 }
429 
430 void CoreEngine::HandleCleanupTemporaryBranch(const CXXBindTemporaryExpr *BTE,
431                                               const CFGBlock *B,
432                                               ExplodedNode *Pred) {
433   assert(B->succ_size() == 2);
434   NodeBuilderContext Ctx(*this, B, Pred);
435   ExplodedNodeSet Dst;
436   SubEng.processCleanupTemporaryBranch(BTE, Ctx, Pred, Dst, *(B->succ_begin()),
437                                        *(B->succ_begin() + 1));
438   // Enqueue the new frontier onto the worklist.
439   enqueue(Dst);
440 }
441 
442 void CoreEngine::HandleStaticInit(const DeclStmt *DS, const CFGBlock *B,
443                                   ExplodedNode *Pred) {
444   assert(B->succ_size() == 2);
445   NodeBuilderContext Ctx(*this, B, Pred);
446   ExplodedNodeSet Dst;
447   SubEng.processStaticInitializer(DS, Ctx, Pred, Dst,
448                                   *(B->succ_begin()), *(B->succ_begin()+1));
449   // Enqueue the new frontier onto the worklist.
450   enqueue(Dst);
451 }
452 
453 void CoreEngine::HandlePostStmt(const CFGBlock *B, unsigned StmtIdx,
454                                 ExplodedNode *Pred) {
455   assert(B);
456   assert(!B->empty());
457 
458   if (StmtIdx == B->size())
459     HandleBlockExit(B, Pred);
460   else {
461     NodeBuilderContext Ctx(*this, B, Pred);
462     SubEng.processCFGElement((*B)[StmtIdx], Pred, StmtIdx, &Ctx);
463   }
464 }
465 
466 void CoreEngine::HandleVirtualBaseBranch(const CFGBlock *B,
467                                          ExplodedNode *Pred) {
468   const LocationContext *LCtx = Pred->getLocationContext();
469   if (const auto *CallerCtor = dyn_cast_or_null<CXXConstructExpr>(
470           LCtx->getStackFrame()->getCallSite())) {
471     switch (CallerCtor->getConstructionKind()) {
472     case CXXConstructExpr::CK_NonVirtualBase:
473     case CXXConstructExpr::CK_VirtualBase: {
474       BlockEdge Loc(B, *B->succ_begin(), LCtx);
475       HandleBlockEdge(Loc, Pred);
476       return;
477     }
478     default:
479       break;
480     }
481   }
482 
483   // We either don't see a parent stack frame because we're in the top frame,
484   // or the parent stack frame doesn't initialize our virtual bases.
485   BlockEdge Loc(B, *(B->succ_begin() + 1), LCtx);
486   HandleBlockEdge(Loc, Pred);
487 }
488 
489 /// generateNode - Utility method to generate nodes, hook up successors,
490 ///  and add nodes to the worklist.
491 void CoreEngine::generateNode(const ProgramPoint &Loc,
492                               ProgramStateRef State,
493                               ExplodedNode *Pred) {
494   bool IsNew;
495   ExplodedNode *Node = G.getNode(Loc, State, false, &IsNew);
496 
497   if (Pred)
498     Node->addPredecessor(Pred, G); // Link 'Node' with its predecessor.
499   else {
500     assert(IsNew);
501     G.addRoot(Node); // 'Node' has no predecessor.  Make it a root.
502   }
503 
504   // Only add 'Node' to the worklist if it was freshly generated.
505   if (IsNew) WList->enqueue(Node);
506 }
507 
508 void CoreEngine::enqueueStmtNode(ExplodedNode *N,
509                                  const CFGBlock *Block, unsigned Idx) {
510   assert(Block);
511   assert(!N->isSink());
512 
513   // Check if this node entered a callee.
514   if (N->getLocation().getAs<CallEnter>()) {
515     // Still use the index of the CallExpr. It's needed to create the callee
516     // StackFrameContext.
517     WList->enqueue(N, Block, Idx);
518     return;
519   }
520 
521   // Do not create extra nodes. Move to the next CFG element.
522   if (N->getLocation().getAs<PostInitializer>() ||
523       N->getLocation().getAs<PostImplicitCall>()||
524       N->getLocation().getAs<LoopExit>()) {
525     WList->enqueue(N, Block, Idx+1);
526     return;
527   }
528 
529   if (N->getLocation().getAs<EpsilonPoint>()) {
530     WList->enqueue(N, Block, Idx);
531     return;
532   }
533 
534   if ((*Block)[Idx].getKind() == CFGElement::NewAllocator) {
535     WList->enqueue(N, Block, Idx+1);
536     return;
537   }
538 
539   // At this point, we know we're processing a normal statement.
540   CFGStmt CS = (*Block)[Idx].castAs<CFGStmt>();
541   PostStmt Loc(CS.getStmt(), N->getLocationContext());
542 
543   if (Loc == N->getLocation().withTag(nullptr)) {
544     // Note: 'N' should be a fresh node because otherwise it shouldn't be
545     // a member of Deferred.
546     WList->enqueue(N, Block, Idx+1);
547     return;
548   }
549 
550   bool IsNew;
551   ExplodedNode *Succ = G.getNode(Loc, N->getState(), false, &IsNew);
552   Succ->addPredecessor(N, G);
553 
554   if (IsNew)
555     WList->enqueue(Succ, Block, Idx+1);
556 }
557 
558 ExplodedNode *CoreEngine::generateCallExitBeginNode(ExplodedNode *N,
559                                                     const ReturnStmt *RS) {
560   // Create a CallExitBegin node and enqueue it.
561   const auto *LocCtx = cast<StackFrameContext>(N->getLocationContext());
562 
563   // Use the callee location context.
564   CallExitBegin Loc(LocCtx, RS);
565 
566   bool isNew;
567   ExplodedNode *Node = G.getNode(Loc, N->getState(), false, &isNew);
568   Node->addPredecessor(N, G);
569   return isNew ? Node : nullptr;
570 }
571 
572 void CoreEngine::enqueue(ExplodedNodeSet &Set) {
573   for (const auto I : Set)
574     WList->enqueue(I);
575 }
576 
577 void CoreEngine::enqueue(ExplodedNodeSet &Set,
578                          const CFGBlock *Block, unsigned Idx) {
579   for (const auto I : Set)
580     enqueueStmtNode(I, Block, Idx);
581 }
582 
583 void CoreEngine::enqueueEndOfFunction(ExplodedNodeSet &Set, const ReturnStmt *RS) {
584   for (auto I : Set) {
585     // If we are in an inlined call, generate CallExitBegin node.
586     if (I->getLocationContext()->getParent()) {
587       I = generateCallExitBeginNode(I, RS);
588       if (I)
589         WList->enqueue(I);
590     } else {
591       // TODO: We should run remove dead bindings here.
592       G.addEndOfPath(I);
593       NumPathsExplored++;
594     }
595   }
596 }
597 
598 void NodeBuilder::anchor() {}
599 
600 ExplodedNode* NodeBuilder::generateNodeImpl(const ProgramPoint &Loc,
601                                             ProgramStateRef State,
602                                             ExplodedNode *FromN,
603                                             bool MarkAsSink) {
604   HasGeneratedNodes = true;
605   bool IsNew;
606   ExplodedNode *N = C.Eng.G.getNode(Loc, State, MarkAsSink, &IsNew);
607   N->addPredecessor(FromN, C.Eng.G);
608   Frontier.erase(FromN);
609 
610   if (!IsNew)
611     return nullptr;
612 
613   if (!MarkAsSink)
614     Frontier.Add(N);
615 
616   return N;
617 }
618 
619 void NodeBuilderWithSinks::anchor() {}
620 
621 StmtNodeBuilder::~StmtNodeBuilder() {
622   if (EnclosingBldr)
623     for (const auto I : Frontier)
624       EnclosingBldr->addNodes(I);
625 }
626 
627 void BranchNodeBuilder::anchor() {}
628 
629 ExplodedNode *BranchNodeBuilder::generateNode(ProgramStateRef State,
630                                               bool branch,
631                                               ExplodedNode *NodePred) {
632   // If the branch has been marked infeasible we should not generate a node.
633   if (!isFeasible(branch))
634     return nullptr;
635 
636   ProgramPoint Loc = BlockEdge(C.Block, branch ? DstT:DstF,
637                                NodePred->getLocationContext());
638   ExplodedNode *Succ = generateNodeImpl(Loc, State, NodePred);
639   return Succ;
640 }
641 
642 ExplodedNode*
643 IndirectGotoNodeBuilder::generateNode(const iterator &I,
644                                       ProgramStateRef St,
645                                       bool IsSink) {
646   bool IsNew;
647   ExplodedNode *Succ =
648       Eng.G.getNode(BlockEdge(Src, I.getBlock(), Pred->getLocationContext()),
649                     St, IsSink, &IsNew);
650   Succ->addPredecessor(Pred, Eng.G);
651 
652   if (!IsNew)
653     return nullptr;
654 
655   if (!IsSink)
656     Eng.WList->enqueue(Succ);
657 
658   return Succ;
659 }
660 
661 ExplodedNode*
662 SwitchNodeBuilder::generateCaseStmtNode(const iterator &I,
663                                         ProgramStateRef St) {
664   bool IsNew;
665   ExplodedNode *Succ =
666       Eng.G.getNode(BlockEdge(Src, I.getBlock(), Pred->getLocationContext()),
667                     St, false, &IsNew);
668   Succ->addPredecessor(Pred, Eng.G);
669   if (!IsNew)
670     return nullptr;
671 
672   Eng.WList->enqueue(Succ);
673   return Succ;
674 }
675 
676 ExplodedNode*
677 SwitchNodeBuilder::generateDefaultCaseNode(ProgramStateRef St,
678                                            bool IsSink) {
679   // Get the block for the default case.
680   assert(Src->succ_rbegin() != Src->succ_rend());
681   CFGBlock *DefaultBlock = *Src->succ_rbegin();
682 
683   // Sanity check for default blocks that are unreachable and not caught
684   // by earlier stages.
685   if (!DefaultBlock)
686     return nullptr;
687 
688   bool IsNew;
689   ExplodedNode *Succ =
690       Eng.G.getNode(BlockEdge(Src, DefaultBlock, Pred->getLocationContext()),
691                     St, IsSink, &IsNew);
692   Succ->addPredecessor(Pred, Eng.G);
693 
694   if (!IsNew)
695     return nullptr;
696 
697   if (!IsSink)
698     Eng.WList->enqueue(Succ);
699 
700   return Succ;
701 }
702