1 //===- CoreEngine.cpp - Path-Sensitive Dataflow Engine --------------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file defines a generic engine for intraprocedural, path-sensitive,
10 // dataflow analysis via graph reachability engine.
11 //
12 //===----------------------------------------------------------------------===//
13
14 #include "clang/StaticAnalyzer/Core/PathSensitive/CoreEngine.h"
15 #include "clang/AST/Expr.h"
16 #include "clang/AST/ExprCXX.h"
17 #include "clang/AST/Stmt.h"
18 #include "clang/AST/StmtCXX.h"
19 #include "clang/Analysis/AnalysisDeclContext.h"
20 #include "clang/Analysis/CFG.h"
21 #include "clang/Analysis/ProgramPoint.h"
22 #include "clang/Basic/LLVM.h"
23 #include "clang/StaticAnalyzer/Core/AnalyzerOptions.h"
24 #include "clang/StaticAnalyzer/Core/PathSensitive/BlockCounter.h"
25 #include "clang/StaticAnalyzer/Core/PathSensitive/ExplodedGraph.h"
26 #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
27 #include "clang/StaticAnalyzer/Core/PathSensitive/FunctionSummary.h"
28 #include "clang/StaticAnalyzer/Core/PathSensitive/WorkList.h"
29 #include "llvm/ADT/Optional.h"
30 #include "llvm/ADT/STLExtras.h"
31 #include "llvm/ADT/Statistic.h"
32 #include "llvm/Support/Casting.h"
33 #include "llvm/Support/ErrorHandling.h"
34 #include <algorithm>
35 #include <cassert>
36 #include <memory>
37 #include <utility>
38
39 using namespace clang;
40 using namespace ento;
41
42 #define DEBUG_TYPE "CoreEngine"
43
44 STATISTIC(NumSteps,
45 "The # of steps executed.");
46 STATISTIC(NumReachedMaxSteps,
47 "The # of times we reached the max number of steps.");
48 STATISTIC(NumPathsExplored,
49 "The # of paths explored by the analyzer.");
50
51 //===----------------------------------------------------------------------===//
52 // Core analysis engine.
53 //===----------------------------------------------------------------------===//
54
generateWorkList(AnalyzerOptions & Opts)55 static std::unique_ptr<WorkList> generateWorkList(AnalyzerOptions &Opts) {
56 switch (Opts.getExplorationStrategy()) {
57 case ExplorationStrategyKind::DFS:
58 return WorkList::makeDFS();
59 case ExplorationStrategyKind::BFS:
60 return WorkList::makeBFS();
61 case ExplorationStrategyKind::BFSBlockDFSContents:
62 return WorkList::makeBFSBlockDFSContents();
63 case ExplorationStrategyKind::UnexploredFirst:
64 return WorkList::makeUnexploredFirst();
65 case ExplorationStrategyKind::UnexploredFirstQueue:
66 return WorkList::makeUnexploredFirstPriorityQueue();
67 case ExplorationStrategyKind::UnexploredFirstLocationQueue:
68 return WorkList::makeUnexploredFirstPriorityLocationQueue();
69 }
70 llvm_unreachable("Unknown AnalyzerOptions::ExplorationStrategyKind");
71 }
72
CoreEngine(ExprEngine & exprengine,FunctionSummariesTy * FS,AnalyzerOptions & Opts)73 CoreEngine::CoreEngine(ExprEngine &exprengine, FunctionSummariesTy *FS,
74 AnalyzerOptions &Opts)
75 : ExprEng(exprengine), WList(generateWorkList(Opts)),
76 BCounterFactory(G.getAllocator()), FunctionSummaries(FS) {}
77
78 /// ExecuteWorkList - Run the worklist algorithm for a maximum number of steps.
ExecuteWorkList(const LocationContext * L,unsigned Steps,ProgramStateRef InitState)79 bool CoreEngine::ExecuteWorkList(const LocationContext *L, unsigned Steps,
80 ProgramStateRef InitState) {
81 if (G.num_roots() == 0) { // Initialize the analysis by constructing
82 // the root if none exists.
83
84 const CFGBlock *Entry = &(L->getCFG()->getEntry());
85
86 assert(Entry->empty() && "Entry block must be empty.");
87
88 assert(Entry->succ_size() == 1 && "Entry block must have 1 successor.");
89
90 // Mark the entry block as visited.
91 FunctionSummaries->markVisitedBasicBlock(Entry->getBlockID(),
92 L->getDecl(),
93 L->getCFG()->getNumBlockIDs());
94
95 // Get the solitary successor.
96 const CFGBlock *Succ = *(Entry->succ_begin());
97
98 // Construct an edge representing the
99 // starting location in the function.
100 BlockEdge StartLoc(Entry, Succ, L);
101
102 // Set the current block counter to being empty.
103 WList->setBlockCounter(BCounterFactory.GetEmptyCounter());
104
105 if (!InitState)
106 InitState = ExprEng.getInitialState(L);
107
108 bool IsNew;
109 ExplodedNode *Node = G.getNode(StartLoc, InitState, false, &IsNew);
110 assert(IsNew);
111 G.addRoot(Node);
112
113 NodeBuilderContext BuilderCtx(*this, StartLoc.getDst(), Node);
114 ExplodedNodeSet DstBegin;
115 ExprEng.processBeginOfFunction(BuilderCtx, Node, DstBegin, StartLoc);
116
117 enqueue(DstBegin);
118 }
119
120 // Check if we have a steps limit
121 bool UnlimitedSteps = Steps == 0;
122 // Cap our pre-reservation in the event that the user specifies
123 // a very large number of maximum steps.
124 const unsigned PreReservationCap = 4000000;
125 if(!UnlimitedSteps)
126 G.reserve(std::min(Steps,PreReservationCap));
127
128 while (WList->hasWork()) {
129 if (!UnlimitedSteps) {
130 if (Steps == 0) {
131 NumReachedMaxSteps++;
132 break;
133 }
134 --Steps;
135 }
136
137 NumSteps++;
138
139 const WorkListUnit& WU = WList->dequeue();
140
141 // Set the current block counter.
142 WList->setBlockCounter(WU.getBlockCounter());
143
144 // Retrieve the node.
145 ExplodedNode *Node = WU.getNode();
146
147 dispatchWorkItem(Node, Node->getLocation(), WU);
148 }
149 ExprEng.processEndWorklist();
150 return WList->hasWork();
151 }
152
dispatchWorkItem(ExplodedNode * Pred,ProgramPoint Loc,const WorkListUnit & WU)153 void CoreEngine::dispatchWorkItem(ExplodedNode* Pred, ProgramPoint Loc,
154 const WorkListUnit& WU) {
155 // Dispatch on the location type.
156 switch (Loc.getKind()) {
157 case ProgramPoint::BlockEdgeKind:
158 HandleBlockEdge(Loc.castAs<BlockEdge>(), Pred);
159 break;
160
161 case ProgramPoint::BlockEntranceKind:
162 HandleBlockEntrance(Loc.castAs<BlockEntrance>(), Pred);
163 break;
164
165 case ProgramPoint::BlockExitKind:
166 assert(false && "BlockExit location never occur in forward analysis.");
167 break;
168
169 case ProgramPoint::CallEnterKind:
170 HandleCallEnter(Loc.castAs<CallEnter>(), Pred);
171 break;
172
173 case ProgramPoint::CallExitBeginKind:
174 ExprEng.processCallExit(Pred);
175 break;
176
177 case ProgramPoint::EpsilonKind: {
178 assert(Pred->hasSinglePred() &&
179 "Assume epsilon has exactly one predecessor by construction");
180 ExplodedNode *PNode = Pred->getFirstPred();
181 dispatchWorkItem(Pred, PNode->getLocation(), WU);
182 break;
183 }
184 default:
185 assert(Loc.getAs<PostStmt>() ||
186 Loc.getAs<PostInitializer>() ||
187 Loc.getAs<PostImplicitCall>() ||
188 Loc.getAs<CallExitEnd>() ||
189 Loc.getAs<LoopExit>() ||
190 Loc.getAs<PostAllocatorCall>());
191 HandlePostStmt(WU.getBlock(), WU.getIndex(), Pred);
192 break;
193 }
194 }
195
ExecuteWorkListWithInitialState(const LocationContext * L,unsigned Steps,ProgramStateRef InitState,ExplodedNodeSet & Dst)196 bool CoreEngine::ExecuteWorkListWithInitialState(const LocationContext *L,
197 unsigned Steps,
198 ProgramStateRef InitState,
199 ExplodedNodeSet &Dst) {
200 bool DidNotFinish = ExecuteWorkList(L, Steps, InitState);
201 for (ExplodedGraph::eop_iterator I = G.eop_begin(), E = G.eop_end(); I != E;
202 ++I) {
203 Dst.Add(*I);
204 }
205 return DidNotFinish;
206 }
207
HandleBlockEdge(const BlockEdge & L,ExplodedNode * Pred)208 void CoreEngine::HandleBlockEdge(const BlockEdge &L, ExplodedNode *Pred) {
209 const CFGBlock *Blk = L.getDst();
210 NodeBuilderContext BuilderCtx(*this, Blk, Pred);
211
212 // Mark this block as visited.
213 const LocationContext *LC = Pred->getLocationContext();
214 FunctionSummaries->markVisitedBasicBlock(Blk->getBlockID(),
215 LC->getDecl(),
216 LC->getCFG()->getNumBlockIDs());
217
218 // Display a prunable path note to the user if it's a virtual bases branch
219 // and we're taking the path that skips virtual base constructors.
220 if (L.getSrc()->getTerminator().isVirtualBaseBranch() &&
221 L.getDst() == *L.getSrc()->succ_begin()) {
222 ProgramPoint P = L.withTag(getNoteTags().makeNoteTag(
223 [](BugReporterContext &, PathSensitiveBugReport &) -> std::string {
224 // TODO: Just call out the name of the most derived class
225 // when we know it.
226 return "Virtual base initialization skipped because "
227 "it has already been handled by the most derived class";
228 }, /*IsPrunable=*/true));
229 // Perform the transition.
230 ExplodedNodeSet Dst;
231 NodeBuilder Bldr(Pred, Dst, BuilderCtx);
232 Pred = Bldr.generateNode(P, Pred->getState(), Pred);
233 if (!Pred)
234 return;
235 }
236
237 // Check if we are entering the EXIT block.
238 if (Blk == &(L.getLocationContext()->getCFG()->getExit())) {
239 assert(L.getLocationContext()->getCFG()->getExit().empty() &&
240 "EXIT block cannot contain Stmts.");
241
242 // Get return statement..
243 const ReturnStmt *RS = nullptr;
244 if (!L.getSrc()->empty()) {
245 CFGElement LastElement = L.getSrc()->back();
246 if (Optional<CFGStmt> LastStmt = LastElement.getAs<CFGStmt>()) {
247 RS = dyn_cast<ReturnStmt>(LastStmt->getStmt());
248 } else if (Optional<CFGAutomaticObjDtor> AutoDtor =
249 LastElement.getAs<CFGAutomaticObjDtor>()) {
250 RS = dyn_cast<ReturnStmt>(AutoDtor->getTriggerStmt());
251 }
252 }
253
254 // Process the final state transition.
255 ExprEng.processEndOfFunction(BuilderCtx, Pred, RS);
256
257 // This path is done. Don't enqueue any more nodes.
258 return;
259 }
260
261 // Call into the ExprEngine to process entering the CFGBlock.
262 ExplodedNodeSet dstNodes;
263 BlockEntrance BE(Blk, Pred->getLocationContext());
264 NodeBuilderWithSinks nodeBuilder(Pred, dstNodes, BuilderCtx, BE);
265 ExprEng.processCFGBlockEntrance(L, nodeBuilder, Pred);
266
267 // Auto-generate a node.
268 if (!nodeBuilder.hasGeneratedNodes()) {
269 nodeBuilder.generateNode(Pred->State, Pred);
270 }
271
272 // Enqueue nodes onto the worklist.
273 enqueue(dstNodes);
274 }
275
HandleBlockEntrance(const BlockEntrance & L,ExplodedNode * Pred)276 void CoreEngine::HandleBlockEntrance(const BlockEntrance &L,
277 ExplodedNode *Pred) {
278 // Increment the block counter.
279 const LocationContext *LC = Pred->getLocationContext();
280 unsigned BlockId = L.getBlock()->getBlockID();
281 BlockCounter Counter = WList->getBlockCounter();
282 Counter = BCounterFactory.IncrementCount(Counter, LC->getStackFrame(),
283 BlockId);
284 WList->setBlockCounter(Counter);
285
286 // Process the entrance of the block.
287 if (Optional<CFGElement> E = L.getFirstElement()) {
288 NodeBuilderContext Ctx(*this, L.getBlock(), Pred);
289 ExprEng.processCFGElement(*E, Pred, 0, &Ctx);
290 }
291 else
292 HandleBlockExit(L.getBlock(), Pred);
293 }
294
HandleBlockExit(const CFGBlock * B,ExplodedNode * Pred)295 void CoreEngine::HandleBlockExit(const CFGBlock * B, ExplodedNode *Pred) {
296 if (const Stmt *Term = B->getTerminatorStmt()) {
297 switch (Term->getStmtClass()) {
298 default:
299 llvm_unreachable("Analysis for this terminator not implemented.");
300
301 case Stmt::CXXBindTemporaryExprClass:
302 HandleCleanupTemporaryBranch(
303 cast<CXXBindTemporaryExpr>(Term), B, Pred);
304 return;
305
306 // Model static initializers.
307 case Stmt::DeclStmtClass:
308 HandleStaticInit(cast<DeclStmt>(Term), B, Pred);
309 return;
310
311 case Stmt::BinaryOperatorClass: // '&&' and '||'
312 HandleBranch(cast<BinaryOperator>(Term)->getLHS(), Term, B, Pred);
313 return;
314
315 case Stmt::BinaryConditionalOperatorClass:
316 case Stmt::ConditionalOperatorClass:
317 HandleBranch(cast<AbstractConditionalOperator>(Term)->getCond(),
318 Term, B, Pred);
319 return;
320
321 // FIXME: Use constant-folding in CFG construction to simplify this
322 // case.
323
324 case Stmt::ChooseExprClass:
325 HandleBranch(cast<ChooseExpr>(Term)->getCond(), Term, B, Pred);
326 return;
327
328 case Stmt::CXXTryStmtClass:
329 // Generate a node for each of the successors.
330 // Our logic for EH analysis can certainly be improved.
331 for (CFGBlock::const_succ_iterator it = B->succ_begin(),
332 et = B->succ_end(); it != et; ++it) {
333 if (const CFGBlock *succ = *it) {
334 generateNode(BlockEdge(B, succ, Pred->getLocationContext()),
335 Pred->State, Pred);
336 }
337 }
338 return;
339
340 case Stmt::DoStmtClass:
341 HandleBranch(cast<DoStmt>(Term)->getCond(), Term, B, Pred);
342 return;
343
344 case Stmt::CXXForRangeStmtClass:
345 HandleBranch(cast<CXXForRangeStmt>(Term)->getCond(), Term, B, Pred);
346 return;
347
348 case Stmt::ForStmtClass:
349 HandleBranch(cast<ForStmt>(Term)->getCond(), Term, B, Pred);
350 return;
351
352 case Stmt::SEHLeaveStmtClass:
353 case Stmt::ContinueStmtClass:
354 case Stmt::BreakStmtClass:
355 case Stmt::GotoStmtClass:
356 break;
357
358 case Stmt::IfStmtClass:
359 HandleBranch(cast<IfStmt>(Term)->getCond(), Term, B, Pred);
360 return;
361
362 case Stmt::IndirectGotoStmtClass: {
363 // Only 1 successor: the indirect goto dispatch block.
364 assert(B->succ_size() == 1);
365
366 IndirectGotoNodeBuilder
367 builder(Pred, B, cast<IndirectGotoStmt>(Term)->getTarget(),
368 *(B->succ_begin()), this);
369
370 ExprEng.processIndirectGoto(builder);
371 return;
372 }
373
374 case Stmt::ObjCForCollectionStmtClass:
375 // In the case of ObjCForCollectionStmt, it appears twice in a CFG:
376 //
377 // (1) inside a basic block, which represents the binding of the
378 // 'element' variable to a value.
379 // (2) in a terminator, which represents the branch.
380 //
381 // For (1), ExprEngine will bind a value (i.e., 0 or 1) indicating
382 // whether or not collection contains any more elements. We cannot
383 // just test to see if the element is nil because a container can
384 // contain nil elements.
385 HandleBranch(Term, Term, B, Pred);
386 return;
387
388 case Stmt::SwitchStmtClass: {
389 SwitchNodeBuilder builder(Pred, B, cast<SwitchStmt>(Term)->getCond(),
390 this);
391
392 ExprEng.processSwitch(builder);
393 return;
394 }
395
396 case Stmt::WhileStmtClass:
397 HandleBranch(cast<WhileStmt>(Term)->getCond(), Term, B, Pred);
398 return;
399
400 case Stmt::GCCAsmStmtClass:
401 assert(cast<GCCAsmStmt>(Term)->isAsmGoto() && "Encountered GCCAsmStmt without labels");
402 // TODO: Handle jumping to labels
403 return;
404 }
405 }
406
407 if (B->getTerminator().isVirtualBaseBranch()) {
408 HandleVirtualBaseBranch(B, Pred);
409 return;
410 }
411
412 assert(B->succ_size() == 1 &&
413 "Blocks with no terminator should have at most 1 successor.");
414
415 generateNode(BlockEdge(B, *(B->succ_begin()), Pred->getLocationContext()),
416 Pred->State, Pred);
417 }
418
HandleCallEnter(const CallEnter & CE,ExplodedNode * Pred)419 void CoreEngine::HandleCallEnter(const CallEnter &CE, ExplodedNode *Pred) {
420 NodeBuilderContext BuilderCtx(*this, CE.getEntry(), Pred);
421 ExprEng.processCallEnter(BuilderCtx, CE, Pred);
422 }
423
HandleBranch(const Stmt * Cond,const Stmt * Term,const CFGBlock * B,ExplodedNode * Pred)424 void CoreEngine::HandleBranch(const Stmt *Cond, const Stmt *Term,
425 const CFGBlock * B, ExplodedNode *Pred) {
426 assert(B->succ_size() == 2);
427 NodeBuilderContext Ctx(*this, B, Pred);
428 ExplodedNodeSet Dst;
429 ExprEng.processBranch(Cond, Ctx, Pred, Dst, *(B->succ_begin()),
430 *(B->succ_begin() + 1));
431 // Enqueue the new frontier onto the worklist.
432 enqueue(Dst);
433 }
434
HandleCleanupTemporaryBranch(const CXXBindTemporaryExpr * BTE,const CFGBlock * B,ExplodedNode * Pred)435 void CoreEngine::HandleCleanupTemporaryBranch(const CXXBindTemporaryExpr *BTE,
436 const CFGBlock *B,
437 ExplodedNode *Pred) {
438 assert(B->succ_size() == 2);
439 NodeBuilderContext Ctx(*this, B, Pred);
440 ExplodedNodeSet Dst;
441 ExprEng.processCleanupTemporaryBranch(BTE, Ctx, Pred, Dst, *(B->succ_begin()),
442 *(B->succ_begin() + 1));
443 // Enqueue the new frontier onto the worklist.
444 enqueue(Dst);
445 }
446
HandleStaticInit(const DeclStmt * DS,const CFGBlock * B,ExplodedNode * Pred)447 void CoreEngine::HandleStaticInit(const DeclStmt *DS, const CFGBlock *B,
448 ExplodedNode *Pred) {
449 assert(B->succ_size() == 2);
450 NodeBuilderContext Ctx(*this, B, Pred);
451 ExplodedNodeSet Dst;
452 ExprEng.processStaticInitializer(DS, Ctx, Pred, Dst,
453 *(B->succ_begin()), *(B->succ_begin()+1));
454 // Enqueue the new frontier onto the worklist.
455 enqueue(Dst);
456 }
457
HandlePostStmt(const CFGBlock * B,unsigned StmtIdx,ExplodedNode * Pred)458 void CoreEngine::HandlePostStmt(const CFGBlock *B, unsigned StmtIdx,
459 ExplodedNode *Pred) {
460 assert(B);
461 assert(!B->empty());
462
463 if (StmtIdx == B->size())
464 HandleBlockExit(B, Pred);
465 else {
466 NodeBuilderContext Ctx(*this, B, Pred);
467 ExprEng.processCFGElement((*B)[StmtIdx], Pred, StmtIdx, &Ctx);
468 }
469 }
470
HandleVirtualBaseBranch(const CFGBlock * B,ExplodedNode * Pred)471 void CoreEngine::HandleVirtualBaseBranch(const CFGBlock *B,
472 ExplodedNode *Pred) {
473 const LocationContext *LCtx = Pred->getLocationContext();
474 if (const auto *CallerCtor = dyn_cast_or_null<CXXConstructExpr>(
475 LCtx->getStackFrame()->getCallSite())) {
476 switch (CallerCtor->getConstructionKind()) {
477 case CXXConstructExpr::CK_NonVirtualBase:
478 case CXXConstructExpr::CK_VirtualBase: {
479 BlockEdge Loc(B, *B->succ_begin(), LCtx);
480 HandleBlockEdge(Loc, Pred);
481 return;
482 }
483 default:
484 break;
485 }
486 }
487
488 // We either don't see a parent stack frame because we're in the top frame,
489 // or the parent stack frame doesn't initialize our virtual bases.
490 BlockEdge Loc(B, *(B->succ_begin() + 1), LCtx);
491 HandleBlockEdge(Loc, Pred);
492 }
493
494 /// generateNode - Utility method to generate nodes, hook up successors,
495 /// and add nodes to the worklist.
generateNode(const ProgramPoint & Loc,ProgramStateRef State,ExplodedNode * Pred)496 void CoreEngine::generateNode(const ProgramPoint &Loc,
497 ProgramStateRef State,
498 ExplodedNode *Pred) {
499 bool IsNew;
500 ExplodedNode *Node = G.getNode(Loc, State, false, &IsNew);
501
502 if (Pred)
503 Node->addPredecessor(Pred, G); // Link 'Node' with its predecessor.
504 else {
505 assert(IsNew);
506 G.addRoot(Node); // 'Node' has no predecessor. Make it a root.
507 }
508
509 // Only add 'Node' to the worklist if it was freshly generated.
510 if (IsNew) WList->enqueue(Node);
511 }
512
enqueueStmtNode(ExplodedNode * N,const CFGBlock * Block,unsigned Idx)513 void CoreEngine::enqueueStmtNode(ExplodedNode *N,
514 const CFGBlock *Block, unsigned Idx) {
515 assert(Block);
516 assert(!N->isSink());
517
518 // Check if this node entered a callee.
519 if (N->getLocation().getAs<CallEnter>()) {
520 // Still use the index of the CallExpr. It's needed to create the callee
521 // StackFrameContext.
522 WList->enqueue(N, Block, Idx);
523 return;
524 }
525
526 // Do not create extra nodes. Move to the next CFG element.
527 if (N->getLocation().getAs<PostInitializer>() ||
528 N->getLocation().getAs<PostImplicitCall>()||
529 N->getLocation().getAs<LoopExit>()) {
530 WList->enqueue(N, Block, Idx+1);
531 return;
532 }
533
534 if (N->getLocation().getAs<EpsilonPoint>()) {
535 WList->enqueue(N, Block, Idx);
536 return;
537 }
538
539 if ((*Block)[Idx].getKind() == CFGElement::NewAllocator) {
540 WList->enqueue(N, Block, Idx+1);
541 return;
542 }
543
544 // At this point, we know we're processing a normal statement.
545 CFGStmt CS = (*Block)[Idx].castAs<CFGStmt>();
546 PostStmt Loc(CS.getStmt(), N->getLocationContext());
547
548 if (Loc == N->getLocation().withTag(nullptr)) {
549 // Note: 'N' should be a fresh node because otherwise it shouldn't be
550 // a member of Deferred.
551 WList->enqueue(N, Block, Idx+1);
552 return;
553 }
554
555 bool IsNew;
556 ExplodedNode *Succ = G.getNode(Loc, N->getState(), false, &IsNew);
557 Succ->addPredecessor(N, G);
558
559 if (IsNew)
560 WList->enqueue(Succ, Block, Idx+1);
561 }
562
generateCallExitBeginNode(ExplodedNode * N,const ReturnStmt * RS)563 ExplodedNode *CoreEngine::generateCallExitBeginNode(ExplodedNode *N,
564 const ReturnStmt *RS) {
565 // Create a CallExitBegin node and enqueue it.
566 const auto *LocCtx = cast<StackFrameContext>(N->getLocationContext());
567
568 // Use the callee location context.
569 CallExitBegin Loc(LocCtx, RS);
570
571 bool isNew;
572 ExplodedNode *Node = G.getNode(Loc, N->getState(), false, &isNew);
573 Node->addPredecessor(N, G);
574 return isNew ? Node : nullptr;
575 }
576
enqueue(ExplodedNodeSet & Set)577 void CoreEngine::enqueue(ExplodedNodeSet &Set) {
578 for (const auto I : Set)
579 WList->enqueue(I);
580 }
581
enqueue(ExplodedNodeSet & Set,const CFGBlock * Block,unsigned Idx)582 void CoreEngine::enqueue(ExplodedNodeSet &Set,
583 const CFGBlock *Block, unsigned Idx) {
584 for (const auto I : Set)
585 enqueueStmtNode(I, Block, Idx);
586 }
587
enqueueEndOfFunction(ExplodedNodeSet & Set,const ReturnStmt * RS)588 void CoreEngine::enqueueEndOfFunction(ExplodedNodeSet &Set, const ReturnStmt *RS) {
589 for (auto I : Set) {
590 // If we are in an inlined call, generate CallExitBegin node.
591 if (I->getLocationContext()->getParent()) {
592 I = generateCallExitBeginNode(I, RS);
593 if (I)
594 WList->enqueue(I);
595 } else {
596 // TODO: We should run remove dead bindings here.
597 G.addEndOfPath(I);
598 NumPathsExplored++;
599 }
600 }
601 }
602
anchor()603 void NodeBuilder::anchor() {}
604
generateNodeImpl(const ProgramPoint & Loc,ProgramStateRef State,ExplodedNode * FromN,bool MarkAsSink)605 ExplodedNode* NodeBuilder::generateNodeImpl(const ProgramPoint &Loc,
606 ProgramStateRef State,
607 ExplodedNode *FromN,
608 bool MarkAsSink) {
609 HasGeneratedNodes = true;
610 bool IsNew;
611 ExplodedNode *N = C.Eng.G.getNode(Loc, State, MarkAsSink, &IsNew);
612 N->addPredecessor(FromN, C.Eng.G);
613 Frontier.erase(FromN);
614
615 if (!IsNew)
616 return nullptr;
617
618 if (!MarkAsSink)
619 Frontier.Add(N);
620
621 return N;
622 }
623
anchor()624 void NodeBuilderWithSinks::anchor() {}
625
~StmtNodeBuilder()626 StmtNodeBuilder::~StmtNodeBuilder() {
627 if (EnclosingBldr)
628 for (const auto I : Frontier)
629 EnclosingBldr->addNodes(I);
630 }
631
anchor()632 void BranchNodeBuilder::anchor() {}
633
generateNode(ProgramStateRef State,bool branch,ExplodedNode * NodePred)634 ExplodedNode *BranchNodeBuilder::generateNode(ProgramStateRef State,
635 bool branch,
636 ExplodedNode *NodePred) {
637 // If the branch has been marked infeasible we should not generate a node.
638 if (!isFeasible(branch))
639 return nullptr;
640
641 ProgramPoint Loc = BlockEdge(C.Block, branch ? DstT:DstF,
642 NodePred->getLocationContext());
643 ExplodedNode *Succ = generateNodeImpl(Loc, State, NodePred);
644 return Succ;
645 }
646
647 ExplodedNode*
generateNode(const iterator & I,ProgramStateRef St,bool IsSink)648 IndirectGotoNodeBuilder::generateNode(const iterator &I,
649 ProgramStateRef St,
650 bool IsSink) {
651 bool IsNew;
652 ExplodedNode *Succ =
653 Eng.G.getNode(BlockEdge(Src, I.getBlock(), Pred->getLocationContext()),
654 St, IsSink, &IsNew);
655 Succ->addPredecessor(Pred, Eng.G);
656
657 if (!IsNew)
658 return nullptr;
659
660 if (!IsSink)
661 Eng.WList->enqueue(Succ);
662
663 return Succ;
664 }
665
666 ExplodedNode*
generateCaseStmtNode(const iterator & I,ProgramStateRef St)667 SwitchNodeBuilder::generateCaseStmtNode(const iterator &I,
668 ProgramStateRef St) {
669 bool IsNew;
670 ExplodedNode *Succ =
671 Eng.G.getNode(BlockEdge(Src, I.getBlock(), Pred->getLocationContext()),
672 St, false, &IsNew);
673 Succ->addPredecessor(Pred, Eng.G);
674 if (!IsNew)
675 return nullptr;
676
677 Eng.WList->enqueue(Succ);
678 return Succ;
679 }
680
681 ExplodedNode*
generateDefaultCaseNode(ProgramStateRef St,bool IsSink)682 SwitchNodeBuilder::generateDefaultCaseNode(ProgramStateRef St,
683 bool IsSink) {
684 // Get the block for the default case.
685 assert(Src->succ_rbegin() != Src->succ_rend());
686 CFGBlock *DefaultBlock = *Src->succ_rbegin();
687
688 // Sanity check for default blocks that are unreachable and not caught
689 // by earlier stages.
690 if (!DefaultBlock)
691 return nullptr;
692
693 bool IsNew;
694 ExplodedNode *Succ =
695 Eng.G.getNode(BlockEdge(Src, DefaultBlock, Pred->getLocationContext()),
696 St, IsSink, &IsNew);
697 Succ->addPredecessor(Pred, Eng.G);
698
699 if (!IsNew)
700 return nullptr;
701
702 if (!IsSink)
703 Eng.WList->enqueue(Succ);
704
705 return Succ;
706 }
707