xref: /llvm-project/clang/lib/StaticAnalyzer/Core/CoreEngine.cpp (revision d1dd5c3a88724a25d9fef3962600ce5b223440ca)
1 //===- CoreEngine.cpp - Path-Sensitive Dataflow Engine --------------------===//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 //  This file defines a generic engine for intraprocedural, path-sensitive,
11 //  dataflow analysis via graph reachability engine.
12 //
13 //===----------------------------------------------------------------------===//
14 
15 #include "clang/StaticAnalyzer/Core/PathSensitive/CoreEngine.h"
16 #include "clang/AST/Expr.h"
17 #include "clang/AST/ExprCXX.h"
18 #include "clang/AST/Stmt.h"
19 #include "clang/AST/StmtCXX.h"
20 #include "clang/Analysis/AnalysisDeclContext.h"
21 #include "clang/Analysis/CFG.h"
22 #include "clang/Analysis/ProgramPoint.h"
23 #include "clang/Basic/LLVM.h"
24 #include "clang/StaticAnalyzer/Core/AnalyzerOptions.h"
25 #include "clang/StaticAnalyzer/Core/PathSensitive/BlockCounter.h"
26 #include "clang/StaticAnalyzer/Core/PathSensitive/ExplodedGraph.h"
27 #include "clang/StaticAnalyzer/Core/PathSensitive/FunctionSummary.h"
28 #include "clang/StaticAnalyzer/Core/PathSensitive/SubEngine.h"
29 #include "clang/StaticAnalyzer/Core/PathSensitive/WorkList.h"
30 #include "llvm/ADT/Optional.h"
31 #include "llvm/ADT/STLExtras.h"
32 #include "llvm/ADT/Statistic.h"
33 #include "llvm/Support/Casting.h"
34 #include "llvm/Support/ErrorHandling.h"
35 #include <algorithm>
36 #include <cassert>
37 #include <memory>
38 #include <utility>
39 
40 using namespace clang;
41 using namespace ento;
42 
43 #define DEBUG_TYPE "CoreEngine"
44 
45 STATISTIC(NumSteps,
46             "The # of steps executed.");
47 STATISTIC(NumReachedMaxSteps,
48             "The # of times we reached the max number of steps.");
49 STATISTIC(NumPathsExplored,
50             "The # of paths explored by the analyzer.");
51 
52 //===----------------------------------------------------------------------===//
53 // Core analysis engine.
54 //===----------------------------------------------------------------------===//
55 
56 static std::unique_ptr<WorkList> generateWorkList(AnalyzerOptions &Opts,
57                                                   SubEngine &subengine) {
58   switch (Opts.getExplorationStrategy()) {
59     case AnalyzerOptions::ExplorationStrategyKind::DFS:
60       return WorkList::makeDFS();
61     case AnalyzerOptions::ExplorationStrategyKind::BFS:
62       return WorkList::makeBFS();
63     case AnalyzerOptions::ExplorationStrategyKind::BFSBlockDFSContents:
64       return WorkList::makeBFSBlockDFSContents();
65     case AnalyzerOptions::ExplorationStrategyKind::UnexploredFirst:
66       return WorkList::makeUnexploredFirst();
67     case AnalyzerOptions::ExplorationStrategyKind::UnexploredFirstQueue:
68       return WorkList::makeUnexploredFirstPriorityQueue();
69     case AnalyzerOptions::ExplorationStrategyKind::UnexploredFirstLocationQueue:
70       return WorkList::makeUnexploredFirstPriorityLocationQueue();
71     default:
72       llvm_unreachable("Unexpected case");
73   }
74 }
75 
76 CoreEngine::CoreEngine(SubEngine &subengine, FunctionSummariesTy *FS,
77                        AnalyzerOptions &Opts)
78     : SubEng(subengine), WList(generateWorkList(Opts, subengine)),
79       BCounterFactory(G.getAllocator()), FunctionSummaries(FS) {}
80 
81 /// ExecuteWorkList - Run the worklist algorithm for a maximum number of steps.
82 bool CoreEngine::ExecuteWorkList(const LocationContext *L, unsigned Steps,
83                                    ProgramStateRef InitState) {
84   if (G.num_roots() == 0) { // Initialize the analysis by constructing
85     // the root if none exists.
86 
87     const CFGBlock *Entry = &(L->getCFG()->getEntry());
88 
89     assert(Entry->empty() && "Entry block must be empty.");
90 
91     assert(Entry->succ_size() == 1 && "Entry block must have 1 successor.");
92 
93     // Mark the entry block as visited.
94     FunctionSummaries->markVisitedBasicBlock(Entry->getBlockID(),
95                                              L->getDecl(),
96                                              L->getCFG()->getNumBlockIDs());
97 
98     // Get the solitary successor.
99     const CFGBlock *Succ = *(Entry->succ_begin());
100 
101     // Construct an edge representing the
102     // starting location in the function.
103     BlockEdge StartLoc(Entry, Succ, L);
104 
105     // Set the current block counter to being empty.
106     WList->setBlockCounter(BCounterFactory.GetEmptyCounter());
107 
108     if (!InitState)
109       InitState = SubEng.getInitialState(L);
110 
111     bool IsNew;
112     ExplodedNode *Node = G.getNode(StartLoc, InitState, false, &IsNew);
113     assert(IsNew);
114     G.addRoot(Node);
115 
116     NodeBuilderContext BuilderCtx(*this, StartLoc.getDst(), Node);
117     ExplodedNodeSet DstBegin;
118     SubEng.processBeginOfFunction(BuilderCtx, Node, DstBegin, StartLoc);
119 
120     enqueue(DstBegin);
121   }
122 
123   // Check if we have a steps limit
124   bool UnlimitedSteps = Steps == 0;
125   // Cap our pre-reservation in the event that the user specifies
126   // a very large number of maximum steps.
127   const unsigned PreReservationCap = 4000000;
128   if(!UnlimitedSteps)
129     G.reserve(std::min(Steps,PreReservationCap));
130 
131   while (WList->hasWork()) {
132     if (!UnlimitedSteps) {
133       if (Steps == 0) {
134         NumReachedMaxSteps++;
135         break;
136       }
137       --Steps;
138     }
139 
140     NumSteps++;
141 
142     const WorkListUnit& WU = WList->dequeue();
143 
144     // Set the current block counter.
145     WList->setBlockCounter(WU.getBlockCounter());
146 
147     // Retrieve the node.
148     ExplodedNode *Node = WU.getNode();
149 
150     dispatchWorkItem(Node, Node->getLocation(), WU);
151   }
152   SubEng.processEndWorklist();
153   return WList->hasWork();
154 }
155 
156 void CoreEngine::dispatchWorkItem(ExplodedNode* Pred, ProgramPoint Loc,
157                                   const WorkListUnit& WU) {
158   // Dispatch on the location type.
159   switch (Loc.getKind()) {
160     case ProgramPoint::BlockEdgeKind:
161       HandleBlockEdge(Loc.castAs<BlockEdge>(), Pred);
162       break;
163 
164     case ProgramPoint::BlockEntranceKind:
165       HandleBlockEntrance(Loc.castAs<BlockEntrance>(), Pred);
166       break;
167 
168     case ProgramPoint::BlockExitKind:
169       assert(false && "BlockExit location never occur in forward analysis.");
170       break;
171 
172     case ProgramPoint::CallEnterKind:
173       HandleCallEnter(Loc.castAs<CallEnter>(), Pred);
174       break;
175 
176     case ProgramPoint::CallExitBeginKind:
177       SubEng.processCallExit(Pred);
178       break;
179 
180     case ProgramPoint::EpsilonKind: {
181       assert(Pred->hasSinglePred() &&
182              "Assume epsilon has exactly one predecessor by construction");
183       ExplodedNode *PNode = Pred->getFirstPred();
184       dispatchWorkItem(Pred, PNode->getLocation(), WU);
185       break;
186     }
187     default:
188       assert(Loc.getAs<PostStmt>() ||
189              Loc.getAs<PostInitializer>() ||
190              Loc.getAs<PostImplicitCall>() ||
191              Loc.getAs<CallExitEnd>() ||
192              Loc.getAs<LoopExit>() ||
193              Loc.getAs<PostAllocatorCall>());
194       HandlePostStmt(WU.getBlock(), WU.getIndex(), Pred);
195       break;
196   }
197 }
198 
199 bool CoreEngine::ExecuteWorkListWithInitialState(const LocationContext *L,
200                                                  unsigned Steps,
201                                                  ProgramStateRef InitState,
202                                                  ExplodedNodeSet &Dst) {
203   bool DidNotFinish = ExecuteWorkList(L, Steps, InitState);
204   for (ExplodedGraph::eop_iterator I = G.eop_begin(), E = G.eop_end(); I != E;
205        ++I) {
206     Dst.Add(*I);
207   }
208   return DidNotFinish;
209 }
210 
211 void CoreEngine::HandleBlockEdge(const BlockEdge &L, ExplodedNode *Pred) {
212   const CFGBlock *Blk = L.getDst();
213   NodeBuilderContext BuilderCtx(*this, Blk, Pred);
214 
215   // Mark this block as visited.
216   const LocationContext *LC = Pred->getLocationContext();
217   FunctionSummaries->markVisitedBasicBlock(Blk->getBlockID(),
218                                            LC->getDecl(),
219                                            LC->getCFG()->getNumBlockIDs());
220 
221   // Check if we are entering the EXIT block.
222   if (Blk == &(L.getLocationContext()->getCFG()->getExit())) {
223     assert(L.getLocationContext()->getCFG()->getExit().empty() &&
224            "EXIT block cannot contain Stmts.");
225 
226     // Get return statement..
227     const ReturnStmt *RS = nullptr;
228     if (!L.getSrc()->empty()) {
229       CFGElement LastElement = L.getSrc()->back();
230       if (Optional<CFGStmt> LastStmt = LastElement.getAs<CFGStmt>()) {
231         RS = dyn_cast<ReturnStmt>(LastStmt->getStmt());
232       } else if (Optional<CFGAutomaticObjDtor> AutoDtor =
233                  LastElement.getAs<CFGAutomaticObjDtor>()) {
234         RS = dyn_cast<ReturnStmt>(AutoDtor->getTriggerStmt());
235       }
236     }
237 
238     // Process the final state transition.
239     SubEng.processEndOfFunction(BuilderCtx, Pred, RS);
240 
241     // This path is done. Don't enqueue any more nodes.
242     return;
243   }
244 
245   // Call into the SubEngine to process entering the CFGBlock.
246   ExplodedNodeSet dstNodes;
247   BlockEntrance BE(Blk, Pred->getLocationContext());
248   NodeBuilderWithSinks nodeBuilder(Pred, dstNodes, BuilderCtx, BE);
249   SubEng.processCFGBlockEntrance(L, nodeBuilder, Pred);
250 
251   // Auto-generate a node.
252   if (!nodeBuilder.hasGeneratedNodes()) {
253     nodeBuilder.generateNode(Pred->State, Pred);
254   }
255 
256   // Enqueue nodes onto the worklist.
257   enqueue(dstNodes);
258 }
259 
260 void CoreEngine::HandleBlockEntrance(const BlockEntrance &L,
261                                        ExplodedNode *Pred) {
262   // Increment the block counter.
263   const LocationContext *LC = Pred->getLocationContext();
264   unsigned BlockId = L.getBlock()->getBlockID();
265   BlockCounter Counter = WList->getBlockCounter();
266   Counter = BCounterFactory.IncrementCount(Counter, LC->getStackFrame(),
267                                            BlockId);
268   WList->setBlockCounter(Counter);
269 
270   // Process the entrance of the block.
271   if (Optional<CFGElement> E = L.getFirstElement()) {
272     NodeBuilderContext Ctx(*this, L.getBlock(), Pred);
273     SubEng.processCFGElement(*E, Pred, 0, &Ctx);
274   }
275   else
276     HandleBlockExit(L.getBlock(), Pred);
277 }
278 
279 void CoreEngine::HandleBlockExit(const CFGBlock * B, ExplodedNode *Pred) {
280   if (const Stmt *Term = B->getTerminator()) {
281     switch (Term->getStmtClass()) {
282       default:
283         llvm_unreachable("Analysis for this terminator not implemented.");
284 
285       case Stmt::CXXBindTemporaryExprClass:
286         HandleCleanupTemporaryBranch(
287             cast<CXXBindTemporaryExpr>(B->getTerminator().getStmt()), B, Pred);
288         return;
289 
290       // Model static initializers.
291       case Stmt::DeclStmtClass:
292         HandleStaticInit(cast<DeclStmt>(Term), B, Pred);
293         return;
294 
295       case Stmt::BinaryOperatorClass: // '&&' and '||'
296         HandleBranch(cast<BinaryOperator>(Term)->getLHS(), Term, B, Pred);
297         return;
298 
299       case Stmt::BinaryConditionalOperatorClass:
300       case Stmt::ConditionalOperatorClass:
301         HandleBranch(cast<AbstractConditionalOperator>(Term)->getCond(),
302                      Term, B, Pred);
303         return;
304 
305         // FIXME: Use constant-folding in CFG construction to simplify this
306         // case.
307 
308       case Stmt::ChooseExprClass:
309         HandleBranch(cast<ChooseExpr>(Term)->getCond(), Term, B, Pred);
310         return;
311 
312       case Stmt::CXXTryStmtClass:
313         // Generate a node for each of the successors.
314         // Our logic for EH analysis can certainly be improved.
315         for (CFGBlock::const_succ_iterator it = B->succ_begin(),
316              et = B->succ_end(); it != et; ++it) {
317           if (const CFGBlock *succ = *it) {
318             generateNode(BlockEdge(B, succ, Pred->getLocationContext()),
319                          Pred->State, Pred);
320           }
321         }
322         return;
323 
324       case Stmt::DoStmtClass:
325         HandleBranch(cast<DoStmt>(Term)->getCond(), Term, B, Pred);
326         return;
327 
328       case Stmt::CXXForRangeStmtClass:
329         HandleBranch(cast<CXXForRangeStmt>(Term)->getCond(), Term, B, Pred);
330         return;
331 
332       case Stmt::ForStmtClass:
333         HandleBranch(cast<ForStmt>(Term)->getCond(), Term, B, Pred);
334         return;
335 
336       case Stmt::ContinueStmtClass:
337       case Stmt::BreakStmtClass:
338       case Stmt::GotoStmtClass:
339         break;
340 
341       case Stmt::IfStmtClass:
342         HandleBranch(cast<IfStmt>(Term)->getCond(), Term, B, Pred);
343         return;
344 
345       case Stmt::IndirectGotoStmtClass: {
346         // Only 1 successor: the indirect goto dispatch block.
347         assert(B->succ_size() == 1);
348 
349         IndirectGotoNodeBuilder
350            builder(Pred, B, cast<IndirectGotoStmt>(Term)->getTarget(),
351                    *(B->succ_begin()), this);
352 
353         SubEng.processIndirectGoto(builder);
354         return;
355       }
356 
357       case Stmt::ObjCForCollectionStmtClass:
358         // In the case of ObjCForCollectionStmt, it appears twice in a CFG:
359         //
360         //  (1) inside a basic block, which represents the binding of the
361         //      'element' variable to a value.
362         //  (2) in a terminator, which represents the branch.
363         //
364         // For (1), subengines will bind a value (i.e., 0 or 1) indicating
365         // whether or not collection contains any more elements.  We cannot
366         // just test to see if the element is nil because a container can
367         // contain nil elements.
368         HandleBranch(Term, Term, B, Pred);
369         return;
370 
371       case Stmt::SwitchStmtClass: {
372         SwitchNodeBuilder builder(Pred, B, cast<SwitchStmt>(Term)->getCond(),
373                                     this);
374 
375         SubEng.processSwitch(builder);
376         return;
377       }
378 
379       case Stmt::WhileStmtClass:
380         HandleBranch(cast<WhileStmt>(Term)->getCond(), Term, B, Pred);
381         return;
382     }
383   }
384 
385   assert(B->succ_size() == 1 &&
386          "Blocks with no terminator should have at most 1 successor.");
387 
388   generateNode(BlockEdge(B, *(B->succ_begin()), Pred->getLocationContext()),
389                Pred->State, Pred);
390 }
391 
392 void CoreEngine::HandleCallEnter(const CallEnter &CE, ExplodedNode *Pred) {
393   NodeBuilderContext BuilderCtx(*this, CE.getEntry(), Pred);
394   SubEng.processCallEnter(BuilderCtx, CE, Pred);
395 }
396 
397 void CoreEngine::HandleBranch(const Stmt *Cond, const Stmt *Term,
398                                 const CFGBlock * B, ExplodedNode *Pred) {
399   assert(B->succ_size() == 2);
400   NodeBuilderContext Ctx(*this, B, Pred);
401   ExplodedNodeSet Dst;
402   SubEng.processBranch(Cond, Ctx, Pred, Dst, *(B->succ_begin()),
403                        *(B->succ_begin() + 1));
404   // Enqueue the new frontier onto the worklist.
405   enqueue(Dst);
406 }
407 
408 void CoreEngine::HandleCleanupTemporaryBranch(const CXXBindTemporaryExpr *BTE,
409                                               const CFGBlock *B,
410                                               ExplodedNode *Pred) {
411   assert(B->succ_size() == 2);
412   NodeBuilderContext Ctx(*this, B, Pred);
413   ExplodedNodeSet Dst;
414   SubEng.processCleanupTemporaryBranch(BTE, Ctx, Pred, Dst, *(B->succ_begin()),
415                                        *(B->succ_begin() + 1));
416   // Enqueue the new frontier onto the worklist.
417   enqueue(Dst);
418 }
419 
420 void CoreEngine::HandleStaticInit(const DeclStmt *DS, const CFGBlock *B,
421                                   ExplodedNode *Pred) {
422   assert(B->succ_size() == 2);
423   NodeBuilderContext Ctx(*this, B, Pred);
424   ExplodedNodeSet Dst;
425   SubEng.processStaticInitializer(DS, Ctx, Pred, Dst,
426                                   *(B->succ_begin()), *(B->succ_begin()+1));
427   // Enqueue the new frontier onto the worklist.
428   enqueue(Dst);
429 }
430 
431 void CoreEngine::HandlePostStmt(const CFGBlock *B, unsigned StmtIdx,
432                                 ExplodedNode *Pred) {
433   assert(B);
434   assert(!B->empty());
435 
436   if (StmtIdx == B->size())
437     HandleBlockExit(B, Pred);
438   else {
439     NodeBuilderContext Ctx(*this, B, Pred);
440     SubEng.processCFGElement((*B)[StmtIdx], Pred, StmtIdx, &Ctx);
441   }
442 }
443 
444 /// generateNode - Utility method to generate nodes, hook up successors,
445 ///  and add nodes to the worklist.
446 void CoreEngine::generateNode(const ProgramPoint &Loc,
447                               ProgramStateRef State,
448                               ExplodedNode *Pred) {
449   bool IsNew;
450   ExplodedNode *Node = G.getNode(Loc, State, false, &IsNew);
451 
452   if (Pred)
453     Node->addPredecessor(Pred, G); // Link 'Node' with its predecessor.
454   else {
455     assert(IsNew);
456     G.addRoot(Node); // 'Node' has no predecessor.  Make it a root.
457   }
458 
459   // Only add 'Node' to the worklist if it was freshly generated.
460   if (IsNew) WList->enqueue(Node);
461 }
462 
463 void CoreEngine::enqueueStmtNode(ExplodedNode *N,
464                                  const CFGBlock *Block, unsigned Idx) {
465   assert(Block);
466   assert(!N->isSink());
467 
468   // Check if this node entered a callee.
469   if (N->getLocation().getAs<CallEnter>()) {
470     // Still use the index of the CallExpr. It's needed to create the callee
471     // StackFrameContext.
472     WList->enqueue(N, Block, Idx);
473     return;
474   }
475 
476   // Do not create extra nodes. Move to the next CFG element.
477   if (N->getLocation().getAs<PostInitializer>() ||
478       N->getLocation().getAs<PostImplicitCall>()||
479       N->getLocation().getAs<LoopExit>()) {
480     WList->enqueue(N, Block, Idx+1);
481     return;
482   }
483 
484   if (N->getLocation().getAs<EpsilonPoint>()) {
485     WList->enqueue(N, Block, Idx);
486     return;
487   }
488 
489   if ((*Block)[Idx].getKind() == CFGElement::NewAllocator) {
490     WList->enqueue(N, Block, Idx+1);
491     return;
492   }
493 
494   // At this point, we know we're processing a normal statement.
495   CFGStmt CS = (*Block)[Idx].castAs<CFGStmt>();
496   PostStmt Loc(CS.getStmt(), N->getLocationContext());
497 
498   if (Loc == N->getLocation().withTag(nullptr)) {
499     // Note: 'N' should be a fresh node because otherwise it shouldn't be
500     // a member of Deferred.
501     WList->enqueue(N, Block, Idx+1);
502     return;
503   }
504 
505   bool IsNew;
506   ExplodedNode *Succ = G.getNode(Loc, N->getState(), false, &IsNew);
507   Succ->addPredecessor(N, G);
508 
509   if (IsNew)
510     WList->enqueue(Succ, Block, Idx+1);
511 }
512 
513 ExplodedNode *CoreEngine::generateCallExitBeginNode(ExplodedNode *N,
514                                                     const ReturnStmt *RS) {
515   // Create a CallExitBegin node and enqueue it.
516   const auto *LocCtx = cast<StackFrameContext>(N->getLocationContext());
517 
518   // Use the callee location context.
519   CallExitBegin Loc(LocCtx, RS);
520 
521   bool isNew;
522   ExplodedNode *Node = G.getNode(Loc, N->getState(), false, &isNew);
523   Node->addPredecessor(N, G);
524   return isNew ? Node : nullptr;
525 }
526 
527 void CoreEngine::enqueue(ExplodedNodeSet &Set) {
528   for (const auto I : Set)
529     WList->enqueue(I);
530 }
531 
532 void CoreEngine::enqueue(ExplodedNodeSet &Set,
533                          const CFGBlock *Block, unsigned Idx) {
534   for (const auto I : Set)
535     enqueueStmtNode(I, Block, Idx);
536 }
537 
538 void CoreEngine::enqueueEndOfFunction(ExplodedNodeSet &Set, const ReturnStmt *RS) {
539   for (auto I : Set) {
540     // If we are in an inlined call, generate CallExitBegin node.
541     if (I->getLocationContext()->getParent()) {
542       I = generateCallExitBeginNode(I, RS);
543       if (I)
544         WList->enqueue(I);
545     } else {
546       // TODO: We should run remove dead bindings here.
547       G.addEndOfPath(I);
548       NumPathsExplored++;
549     }
550   }
551 }
552 
553 void NodeBuilder::anchor() {}
554 
555 ExplodedNode* NodeBuilder::generateNodeImpl(const ProgramPoint &Loc,
556                                             ProgramStateRef State,
557                                             ExplodedNode *FromN,
558                                             bool MarkAsSink) {
559   HasGeneratedNodes = true;
560   bool IsNew;
561   ExplodedNode *N = C.Eng.G.getNode(Loc, State, MarkAsSink, &IsNew);
562   N->addPredecessor(FromN, C.Eng.G);
563   Frontier.erase(FromN);
564 
565   if (!IsNew)
566     return nullptr;
567 
568   if (!MarkAsSink)
569     Frontier.Add(N);
570 
571   return N;
572 }
573 
574 void NodeBuilderWithSinks::anchor() {}
575 
576 StmtNodeBuilder::~StmtNodeBuilder() {
577   if (EnclosingBldr)
578     for (const auto I : Frontier)
579       EnclosingBldr->addNodes(I);
580 }
581 
582 void BranchNodeBuilder::anchor() {}
583 
584 ExplodedNode *BranchNodeBuilder::generateNode(ProgramStateRef State,
585                                               bool branch,
586                                               ExplodedNode *NodePred) {
587   // If the branch has been marked infeasible we should not generate a node.
588   if (!isFeasible(branch))
589     return nullptr;
590 
591   ProgramPoint Loc = BlockEdge(C.Block, branch ? DstT:DstF,
592                                NodePred->getLocationContext());
593   ExplodedNode *Succ = generateNodeImpl(Loc, State, NodePred);
594   return Succ;
595 }
596 
597 ExplodedNode*
598 IndirectGotoNodeBuilder::generateNode(const iterator &I,
599                                       ProgramStateRef St,
600                                       bool IsSink) {
601   bool IsNew;
602   ExplodedNode *Succ =
603       Eng.G.getNode(BlockEdge(Src, I.getBlock(), Pred->getLocationContext()),
604                     St, IsSink, &IsNew);
605   Succ->addPredecessor(Pred, Eng.G);
606 
607   if (!IsNew)
608     return nullptr;
609 
610   if (!IsSink)
611     Eng.WList->enqueue(Succ);
612 
613   return Succ;
614 }
615 
616 ExplodedNode*
617 SwitchNodeBuilder::generateCaseStmtNode(const iterator &I,
618                                         ProgramStateRef St) {
619   bool IsNew;
620   ExplodedNode *Succ =
621       Eng.G.getNode(BlockEdge(Src, I.getBlock(), Pred->getLocationContext()),
622                     St, false, &IsNew);
623   Succ->addPredecessor(Pred, Eng.G);
624   if (!IsNew)
625     return nullptr;
626 
627   Eng.WList->enqueue(Succ);
628   return Succ;
629 }
630 
631 ExplodedNode*
632 SwitchNodeBuilder::generateDefaultCaseNode(ProgramStateRef St,
633                                            bool IsSink) {
634   // Get the block for the default case.
635   assert(Src->succ_rbegin() != Src->succ_rend());
636   CFGBlock *DefaultBlock = *Src->succ_rbegin();
637 
638   // Sanity check for default blocks that are unreachable and not caught
639   // by earlier stages.
640   if (!DefaultBlock)
641     return nullptr;
642 
643   bool IsNew;
644   ExplodedNode *Succ =
645       Eng.G.getNode(BlockEdge(Src, DefaultBlock, Pred->getLocationContext()),
646                     St, IsSink, &IsNew);
647   Succ->addPredecessor(Pred, Eng.G);
648 
649   if (!IsNew)
650     return nullptr;
651 
652   if (!IsSink)
653     Eng.WList->enqueue(Succ);
654 
655   return Succ;
656 }
657