1 //===- CoreEngine.cpp - Path-Sensitive Dataflow Engine --------------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file defines a generic engine for intraprocedural, path-sensitive, 10 // dataflow analysis via graph reachability engine. 11 // 12 //===----------------------------------------------------------------------===// 13 14 #include "clang/StaticAnalyzer/Core/PathSensitive/CoreEngine.h" 15 #include "clang/AST/Expr.h" 16 #include "clang/AST/ExprCXX.h" 17 #include "clang/AST/Stmt.h" 18 #include "clang/AST/StmtCXX.h" 19 #include "clang/Analysis/AnalysisDeclContext.h" 20 #include "clang/Analysis/CFG.h" 21 #include "clang/Analysis/ProgramPoint.h" 22 #include "clang/Basic/LLVM.h" 23 #include "clang/StaticAnalyzer/Core/AnalyzerOptions.h" 24 #include "clang/StaticAnalyzer/Core/PathSensitive/BlockCounter.h" 25 #include "clang/StaticAnalyzer/Core/PathSensitive/ExplodedGraph.h" 26 #include "clang/StaticAnalyzer/Core/PathSensitive/FunctionSummary.h" 27 #include "clang/StaticAnalyzer/Core/PathSensitive/SubEngine.h" 28 #include "clang/StaticAnalyzer/Core/PathSensitive/WorkList.h" 29 #include "llvm/ADT/Optional.h" 30 #include "llvm/ADT/STLExtras.h" 31 #include "llvm/ADT/Statistic.h" 32 #include "llvm/Support/Casting.h" 33 #include "llvm/Support/ErrorHandling.h" 34 #include <algorithm> 35 #include <cassert> 36 #include <memory> 37 #include <utility> 38 39 using namespace clang; 40 using namespace ento; 41 42 #define DEBUG_TYPE "CoreEngine" 43 44 STATISTIC(NumSteps, 45 "The # of steps executed."); 46 STATISTIC(NumReachedMaxSteps, 47 "The # of times we reached the max number of steps."); 48 STATISTIC(NumPathsExplored, 49 "The # of paths explored by the analyzer."); 50 51 //===----------------------------------------------------------------------===// 52 // Core analysis engine. 53 //===----------------------------------------------------------------------===// 54 55 static std::unique_ptr<WorkList> generateWorkList(AnalyzerOptions &Opts, 56 SubEngine &subengine) { 57 switch (Opts.getExplorationStrategy()) { 58 case ExplorationStrategyKind::DFS: 59 return WorkList::makeDFS(); 60 case ExplorationStrategyKind::BFS: 61 return WorkList::makeBFS(); 62 case ExplorationStrategyKind::BFSBlockDFSContents: 63 return WorkList::makeBFSBlockDFSContents(); 64 case ExplorationStrategyKind::UnexploredFirst: 65 return WorkList::makeUnexploredFirst(); 66 case ExplorationStrategyKind::UnexploredFirstQueue: 67 return WorkList::makeUnexploredFirstPriorityQueue(); 68 case ExplorationStrategyKind::UnexploredFirstLocationQueue: 69 return WorkList::makeUnexploredFirstPriorityLocationQueue(); 70 } 71 llvm_unreachable("Unknown AnalyzerOptions::ExplorationStrategyKind"); 72 } 73 74 CoreEngine::CoreEngine(SubEngine &subengine, FunctionSummariesTy *FS, 75 AnalyzerOptions &Opts) 76 : SubEng(subengine), WList(generateWorkList(Opts, subengine)), 77 BCounterFactory(G.getAllocator()), FunctionSummaries(FS) {} 78 79 /// ExecuteWorkList - Run the worklist algorithm for a maximum number of steps. 80 bool CoreEngine::ExecuteWorkList(const LocationContext *L, unsigned Steps, 81 ProgramStateRef InitState) { 82 if (G.num_roots() == 0) { // Initialize the analysis by constructing 83 // the root if none exists. 84 85 const CFGBlock *Entry = &(L->getCFG()->getEntry()); 86 87 assert(Entry->empty() && "Entry block must be empty."); 88 89 assert(Entry->succ_size() == 1 && "Entry block must have 1 successor."); 90 91 // Mark the entry block as visited. 92 FunctionSummaries->markVisitedBasicBlock(Entry->getBlockID(), 93 L->getDecl(), 94 L->getCFG()->getNumBlockIDs()); 95 96 // Get the solitary successor. 97 const CFGBlock *Succ = *(Entry->succ_begin()); 98 99 // Construct an edge representing the 100 // starting location in the function. 101 BlockEdge StartLoc(Entry, Succ, L); 102 103 // Set the current block counter to being empty. 104 WList->setBlockCounter(BCounterFactory.GetEmptyCounter()); 105 106 if (!InitState) 107 InitState = SubEng.getInitialState(L); 108 109 bool IsNew; 110 ExplodedNode *Node = G.getNode(StartLoc, InitState, false, &IsNew); 111 assert(IsNew); 112 G.addRoot(Node); 113 114 NodeBuilderContext BuilderCtx(*this, StartLoc.getDst(), Node); 115 ExplodedNodeSet DstBegin; 116 SubEng.processBeginOfFunction(BuilderCtx, Node, DstBegin, StartLoc); 117 118 enqueue(DstBegin); 119 } 120 121 // Check if we have a steps limit 122 bool UnlimitedSteps = Steps == 0; 123 // Cap our pre-reservation in the event that the user specifies 124 // a very large number of maximum steps. 125 const unsigned PreReservationCap = 4000000; 126 if(!UnlimitedSteps) 127 G.reserve(std::min(Steps,PreReservationCap)); 128 129 while (WList->hasWork()) { 130 if (!UnlimitedSteps) { 131 if (Steps == 0) { 132 NumReachedMaxSteps++; 133 break; 134 } 135 --Steps; 136 } 137 138 NumSteps++; 139 140 const WorkListUnit& WU = WList->dequeue(); 141 142 // Set the current block counter. 143 WList->setBlockCounter(WU.getBlockCounter()); 144 145 // Retrieve the node. 146 ExplodedNode *Node = WU.getNode(); 147 148 dispatchWorkItem(Node, Node->getLocation(), WU); 149 } 150 SubEng.processEndWorklist(); 151 return WList->hasWork(); 152 } 153 154 void CoreEngine::dispatchWorkItem(ExplodedNode* Pred, ProgramPoint Loc, 155 const WorkListUnit& WU) { 156 // Dispatch on the location type. 157 switch (Loc.getKind()) { 158 case ProgramPoint::BlockEdgeKind: 159 HandleBlockEdge(Loc.castAs<BlockEdge>(), Pred); 160 break; 161 162 case ProgramPoint::BlockEntranceKind: 163 HandleBlockEntrance(Loc.castAs<BlockEntrance>(), Pred); 164 break; 165 166 case ProgramPoint::BlockExitKind: 167 assert(false && "BlockExit location never occur in forward analysis."); 168 break; 169 170 case ProgramPoint::CallEnterKind: 171 HandleCallEnter(Loc.castAs<CallEnter>(), Pred); 172 break; 173 174 case ProgramPoint::CallExitBeginKind: 175 SubEng.processCallExit(Pred); 176 break; 177 178 case ProgramPoint::EpsilonKind: { 179 assert(Pred->hasSinglePred() && 180 "Assume epsilon has exactly one predecessor by construction"); 181 ExplodedNode *PNode = Pred->getFirstPred(); 182 dispatchWorkItem(Pred, PNode->getLocation(), WU); 183 break; 184 } 185 default: 186 assert(Loc.getAs<PostStmt>() || 187 Loc.getAs<PostInitializer>() || 188 Loc.getAs<PostImplicitCall>() || 189 Loc.getAs<CallExitEnd>() || 190 Loc.getAs<LoopExit>() || 191 Loc.getAs<PostAllocatorCall>()); 192 HandlePostStmt(WU.getBlock(), WU.getIndex(), Pred); 193 break; 194 } 195 } 196 197 bool CoreEngine::ExecuteWorkListWithInitialState(const LocationContext *L, 198 unsigned Steps, 199 ProgramStateRef InitState, 200 ExplodedNodeSet &Dst) { 201 bool DidNotFinish = ExecuteWorkList(L, Steps, InitState); 202 for (ExplodedGraph::eop_iterator I = G.eop_begin(), E = G.eop_end(); I != E; 203 ++I) { 204 Dst.Add(*I); 205 } 206 return DidNotFinish; 207 } 208 209 void CoreEngine::HandleBlockEdge(const BlockEdge &L, ExplodedNode *Pred) { 210 const CFGBlock *Blk = L.getDst(); 211 NodeBuilderContext BuilderCtx(*this, Blk, Pred); 212 213 // Mark this block as visited. 214 const LocationContext *LC = Pred->getLocationContext(); 215 FunctionSummaries->markVisitedBasicBlock(Blk->getBlockID(), 216 LC->getDecl(), 217 LC->getCFG()->getNumBlockIDs()); 218 219 // Check if we are entering the EXIT block. 220 if (Blk == &(L.getLocationContext()->getCFG()->getExit())) { 221 assert(L.getLocationContext()->getCFG()->getExit().empty() && 222 "EXIT block cannot contain Stmts."); 223 224 // Get return statement.. 225 const ReturnStmt *RS = nullptr; 226 if (!L.getSrc()->empty()) { 227 CFGElement LastElement = L.getSrc()->back(); 228 if (Optional<CFGStmt> LastStmt = LastElement.getAs<CFGStmt>()) { 229 RS = dyn_cast<ReturnStmt>(LastStmt->getStmt()); 230 } else if (Optional<CFGAutomaticObjDtor> AutoDtor = 231 LastElement.getAs<CFGAutomaticObjDtor>()) { 232 RS = dyn_cast<ReturnStmt>(AutoDtor->getTriggerStmt()); 233 } 234 } 235 236 // Process the final state transition. 237 SubEng.processEndOfFunction(BuilderCtx, Pred, RS); 238 239 // This path is done. Don't enqueue any more nodes. 240 return; 241 } 242 243 // Call into the SubEngine to process entering the CFGBlock. 244 ExplodedNodeSet dstNodes; 245 BlockEntrance BE(Blk, Pred->getLocationContext()); 246 NodeBuilderWithSinks nodeBuilder(Pred, dstNodes, BuilderCtx, BE); 247 SubEng.processCFGBlockEntrance(L, nodeBuilder, Pred); 248 249 // Auto-generate a node. 250 if (!nodeBuilder.hasGeneratedNodes()) { 251 nodeBuilder.generateNode(Pred->State, Pred); 252 } 253 254 // Enqueue nodes onto the worklist. 255 enqueue(dstNodes); 256 } 257 258 void CoreEngine::HandleBlockEntrance(const BlockEntrance &L, 259 ExplodedNode *Pred) { 260 // Increment the block counter. 261 const LocationContext *LC = Pred->getLocationContext(); 262 unsigned BlockId = L.getBlock()->getBlockID(); 263 BlockCounter Counter = WList->getBlockCounter(); 264 Counter = BCounterFactory.IncrementCount(Counter, LC->getStackFrame(), 265 BlockId); 266 WList->setBlockCounter(Counter); 267 268 // Process the entrance of the block. 269 if (Optional<CFGElement> E = L.getFirstElement()) { 270 NodeBuilderContext Ctx(*this, L.getBlock(), Pred); 271 SubEng.processCFGElement(*E, Pred, 0, &Ctx); 272 } 273 else 274 HandleBlockExit(L.getBlock(), Pred); 275 } 276 277 void CoreEngine::HandleBlockExit(const CFGBlock * B, ExplodedNode *Pred) { 278 if (const Stmt *Term = B->getTerminatorStmt()) { 279 switch (Term->getStmtClass()) { 280 default: 281 llvm_unreachable("Analysis for this terminator not implemented."); 282 283 case Stmt::CXXBindTemporaryExprClass: 284 HandleCleanupTemporaryBranch( 285 cast<CXXBindTemporaryExpr>(Term), B, Pred); 286 return; 287 288 // Model static initializers. 289 case Stmt::DeclStmtClass: 290 HandleStaticInit(cast<DeclStmt>(Term), B, Pred); 291 return; 292 293 case Stmt::BinaryOperatorClass: // '&&' and '||' 294 HandleBranch(cast<BinaryOperator>(Term)->getLHS(), Term, B, Pred); 295 return; 296 297 case Stmt::BinaryConditionalOperatorClass: 298 case Stmt::ConditionalOperatorClass: 299 HandleBranch(cast<AbstractConditionalOperator>(Term)->getCond(), 300 Term, B, Pred); 301 return; 302 303 // FIXME: Use constant-folding in CFG construction to simplify this 304 // case. 305 306 case Stmt::ChooseExprClass: 307 HandleBranch(cast<ChooseExpr>(Term)->getCond(), Term, B, Pred); 308 return; 309 310 case Stmt::CXXTryStmtClass: 311 // Generate a node for each of the successors. 312 // Our logic for EH analysis can certainly be improved. 313 for (CFGBlock::const_succ_iterator it = B->succ_begin(), 314 et = B->succ_end(); it != et; ++it) { 315 if (const CFGBlock *succ = *it) { 316 generateNode(BlockEdge(B, succ, Pred->getLocationContext()), 317 Pred->State, Pred); 318 } 319 } 320 return; 321 322 case Stmt::DoStmtClass: 323 HandleBranch(cast<DoStmt>(Term)->getCond(), Term, B, Pred); 324 return; 325 326 case Stmt::CXXForRangeStmtClass: 327 HandleBranch(cast<CXXForRangeStmt>(Term)->getCond(), Term, B, Pred); 328 return; 329 330 case Stmt::ForStmtClass: 331 HandleBranch(cast<ForStmt>(Term)->getCond(), Term, B, Pred); 332 return; 333 334 case Stmt::ContinueStmtClass: 335 case Stmt::BreakStmtClass: 336 case Stmt::GotoStmtClass: 337 break; 338 339 case Stmt::IfStmtClass: 340 HandleBranch(cast<IfStmt>(Term)->getCond(), Term, B, Pred); 341 return; 342 343 case Stmt::IndirectGotoStmtClass: { 344 // Only 1 successor: the indirect goto dispatch block. 345 assert(B->succ_size() == 1); 346 347 IndirectGotoNodeBuilder 348 builder(Pred, B, cast<IndirectGotoStmt>(Term)->getTarget(), 349 *(B->succ_begin()), this); 350 351 SubEng.processIndirectGoto(builder); 352 return; 353 } 354 355 case Stmt::ObjCForCollectionStmtClass: 356 // In the case of ObjCForCollectionStmt, it appears twice in a CFG: 357 // 358 // (1) inside a basic block, which represents the binding of the 359 // 'element' variable to a value. 360 // (2) in a terminator, which represents the branch. 361 // 362 // For (1), subengines will bind a value (i.e., 0 or 1) indicating 363 // whether or not collection contains any more elements. We cannot 364 // just test to see if the element is nil because a container can 365 // contain nil elements. 366 HandleBranch(Term, Term, B, Pred); 367 return; 368 369 case Stmt::SwitchStmtClass: { 370 SwitchNodeBuilder builder(Pred, B, cast<SwitchStmt>(Term)->getCond(), 371 this); 372 373 SubEng.processSwitch(builder); 374 return; 375 } 376 377 case Stmt::WhileStmtClass: 378 HandleBranch(cast<WhileStmt>(Term)->getCond(), Term, B, Pred); 379 return; 380 } 381 } 382 383 if (B->getTerminator().isVirtualBaseBranch()) { 384 HandleVirtualBaseBranch(B, Pred); 385 return; 386 } 387 388 assert(B->succ_size() == 1 && 389 "Blocks with no terminator should have at most 1 successor."); 390 391 generateNode(BlockEdge(B, *(B->succ_begin()), Pred->getLocationContext()), 392 Pred->State, Pred); 393 } 394 395 void CoreEngine::HandleCallEnter(const CallEnter &CE, ExplodedNode *Pred) { 396 NodeBuilderContext BuilderCtx(*this, CE.getEntry(), Pred); 397 SubEng.processCallEnter(BuilderCtx, CE, Pred); 398 } 399 400 void CoreEngine::HandleBranch(const Stmt *Cond, const Stmt *Term, 401 const CFGBlock * B, ExplodedNode *Pred) { 402 assert(B->succ_size() == 2); 403 NodeBuilderContext Ctx(*this, B, Pred); 404 ExplodedNodeSet Dst; 405 SubEng.processBranch(Cond, Ctx, Pred, Dst, *(B->succ_begin()), 406 *(B->succ_begin() + 1)); 407 // Enqueue the new frontier onto the worklist. 408 enqueue(Dst); 409 } 410 411 void CoreEngine::HandleCleanupTemporaryBranch(const CXXBindTemporaryExpr *BTE, 412 const CFGBlock *B, 413 ExplodedNode *Pred) { 414 assert(B->succ_size() == 2); 415 NodeBuilderContext Ctx(*this, B, Pred); 416 ExplodedNodeSet Dst; 417 SubEng.processCleanupTemporaryBranch(BTE, Ctx, Pred, Dst, *(B->succ_begin()), 418 *(B->succ_begin() + 1)); 419 // Enqueue the new frontier onto the worklist. 420 enqueue(Dst); 421 } 422 423 void CoreEngine::HandleStaticInit(const DeclStmt *DS, const CFGBlock *B, 424 ExplodedNode *Pred) { 425 assert(B->succ_size() == 2); 426 NodeBuilderContext Ctx(*this, B, Pred); 427 ExplodedNodeSet Dst; 428 SubEng.processStaticInitializer(DS, Ctx, Pred, Dst, 429 *(B->succ_begin()), *(B->succ_begin()+1)); 430 // Enqueue the new frontier onto the worklist. 431 enqueue(Dst); 432 } 433 434 void CoreEngine::HandlePostStmt(const CFGBlock *B, unsigned StmtIdx, 435 ExplodedNode *Pred) { 436 assert(B); 437 assert(!B->empty()); 438 439 if (StmtIdx == B->size()) 440 HandleBlockExit(B, Pred); 441 else { 442 NodeBuilderContext Ctx(*this, B, Pred); 443 SubEng.processCFGElement((*B)[StmtIdx], Pred, StmtIdx, &Ctx); 444 } 445 } 446 447 void CoreEngine::HandleVirtualBaseBranch(const CFGBlock *B, 448 ExplodedNode *Pred) { 449 const LocationContext *LCtx = Pred->getLocationContext(); 450 if (const auto *CallerCtor = dyn_cast_or_null<CXXConstructExpr>( 451 LCtx->getStackFrame()->getCallSite())) { 452 switch (CallerCtor->getConstructionKind()) { 453 case CXXConstructExpr::CK_NonVirtualBase: 454 case CXXConstructExpr::CK_VirtualBase: { 455 BlockEdge Loc(B, *B->succ_begin(), LCtx); 456 HandleBlockEdge(Loc, Pred); 457 return; 458 } 459 default: 460 break; 461 } 462 } 463 464 // We either don't see a parent stack frame because we're in the top frame, 465 // or the parent stack frame doesn't initialize our virtual bases. 466 BlockEdge Loc(B, *(B->succ_begin() + 1), LCtx); 467 HandleBlockEdge(Loc, Pred); 468 } 469 470 /// generateNode - Utility method to generate nodes, hook up successors, 471 /// and add nodes to the worklist. 472 void CoreEngine::generateNode(const ProgramPoint &Loc, 473 ProgramStateRef State, 474 ExplodedNode *Pred) { 475 bool IsNew; 476 ExplodedNode *Node = G.getNode(Loc, State, false, &IsNew); 477 478 if (Pred) 479 Node->addPredecessor(Pred, G); // Link 'Node' with its predecessor. 480 else { 481 assert(IsNew); 482 G.addRoot(Node); // 'Node' has no predecessor. Make it a root. 483 } 484 485 // Only add 'Node' to the worklist if it was freshly generated. 486 if (IsNew) WList->enqueue(Node); 487 } 488 489 void CoreEngine::enqueueStmtNode(ExplodedNode *N, 490 const CFGBlock *Block, unsigned Idx) { 491 assert(Block); 492 assert(!N->isSink()); 493 494 // Check if this node entered a callee. 495 if (N->getLocation().getAs<CallEnter>()) { 496 // Still use the index of the CallExpr. It's needed to create the callee 497 // StackFrameContext. 498 WList->enqueue(N, Block, Idx); 499 return; 500 } 501 502 // Do not create extra nodes. Move to the next CFG element. 503 if (N->getLocation().getAs<PostInitializer>() || 504 N->getLocation().getAs<PostImplicitCall>()|| 505 N->getLocation().getAs<LoopExit>()) { 506 WList->enqueue(N, Block, Idx+1); 507 return; 508 } 509 510 if (N->getLocation().getAs<EpsilonPoint>()) { 511 WList->enqueue(N, Block, Idx); 512 return; 513 } 514 515 if ((*Block)[Idx].getKind() == CFGElement::NewAllocator) { 516 WList->enqueue(N, Block, Idx+1); 517 return; 518 } 519 520 // At this point, we know we're processing a normal statement. 521 CFGStmt CS = (*Block)[Idx].castAs<CFGStmt>(); 522 PostStmt Loc(CS.getStmt(), N->getLocationContext()); 523 524 if (Loc == N->getLocation().withTag(nullptr)) { 525 // Note: 'N' should be a fresh node because otherwise it shouldn't be 526 // a member of Deferred. 527 WList->enqueue(N, Block, Idx+1); 528 return; 529 } 530 531 bool IsNew; 532 ExplodedNode *Succ = G.getNode(Loc, N->getState(), false, &IsNew); 533 Succ->addPredecessor(N, G); 534 535 if (IsNew) 536 WList->enqueue(Succ, Block, Idx+1); 537 } 538 539 ExplodedNode *CoreEngine::generateCallExitBeginNode(ExplodedNode *N, 540 const ReturnStmt *RS) { 541 // Create a CallExitBegin node and enqueue it. 542 const auto *LocCtx = cast<StackFrameContext>(N->getLocationContext()); 543 544 // Use the callee location context. 545 CallExitBegin Loc(LocCtx, RS); 546 547 bool isNew; 548 ExplodedNode *Node = G.getNode(Loc, N->getState(), false, &isNew); 549 Node->addPredecessor(N, G); 550 return isNew ? Node : nullptr; 551 } 552 553 void CoreEngine::enqueue(ExplodedNodeSet &Set) { 554 for (const auto I : Set) 555 WList->enqueue(I); 556 } 557 558 void CoreEngine::enqueue(ExplodedNodeSet &Set, 559 const CFGBlock *Block, unsigned Idx) { 560 for (const auto I : Set) 561 enqueueStmtNode(I, Block, Idx); 562 } 563 564 void CoreEngine::enqueueEndOfFunction(ExplodedNodeSet &Set, const ReturnStmt *RS) { 565 for (auto I : Set) { 566 // If we are in an inlined call, generate CallExitBegin node. 567 if (I->getLocationContext()->getParent()) { 568 I = generateCallExitBeginNode(I, RS); 569 if (I) 570 WList->enqueue(I); 571 } else { 572 // TODO: We should run remove dead bindings here. 573 G.addEndOfPath(I); 574 NumPathsExplored++; 575 } 576 } 577 } 578 579 void NodeBuilder::anchor() {} 580 581 ExplodedNode* NodeBuilder::generateNodeImpl(const ProgramPoint &Loc, 582 ProgramStateRef State, 583 ExplodedNode *FromN, 584 bool MarkAsSink) { 585 HasGeneratedNodes = true; 586 bool IsNew; 587 ExplodedNode *N = C.Eng.G.getNode(Loc, State, MarkAsSink, &IsNew); 588 N->addPredecessor(FromN, C.Eng.G); 589 Frontier.erase(FromN); 590 591 if (!IsNew) 592 return nullptr; 593 594 if (!MarkAsSink) 595 Frontier.Add(N); 596 597 return N; 598 } 599 600 void NodeBuilderWithSinks::anchor() {} 601 602 StmtNodeBuilder::~StmtNodeBuilder() { 603 if (EnclosingBldr) 604 for (const auto I : Frontier) 605 EnclosingBldr->addNodes(I); 606 } 607 608 void BranchNodeBuilder::anchor() {} 609 610 ExplodedNode *BranchNodeBuilder::generateNode(ProgramStateRef State, 611 bool branch, 612 ExplodedNode *NodePred) { 613 // If the branch has been marked infeasible we should not generate a node. 614 if (!isFeasible(branch)) 615 return nullptr; 616 617 ProgramPoint Loc = BlockEdge(C.Block, branch ? DstT:DstF, 618 NodePred->getLocationContext()); 619 ExplodedNode *Succ = generateNodeImpl(Loc, State, NodePred); 620 return Succ; 621 } 622 623 ExplodedNode* 624 IndirectGotoNodeBuilder::generateNode(const iterator &I, 625 ProgramStateRef St, 626 bool IsSink) { 627 bool IsNew; 628 ExplodedNode *Succ = 629 Eng.G.getNode(BlockEdge(Src, I.getBlock(), Pred->getLocationContext()), 630 St, IsSink, &IsNew); 631 Succ->addPredecessor(Pred, Eng.G); 632 633 if (!IsNew) 634 return nullptr; 635 636 if (!IsSink) 637 Eng.WList->enqueue(Succ); 638 639 return Succ; 640 } 641 642 ExplodedNode* 643 SwitchNodeBuilder::generateCaseStmtNode(const iterator &I, 644 ProgramStateRef St) { 645 bool IsNew; 646 ExplodedNode *Succ = 647 Eng.G.getNode(BlockEdge(Src, I.getBlock(), Pred->getLocationContext()), 648 St, false, &IsNew); 649 Succ->addPredecessor(Pred, Eng.G); 650 if (!IsNew) 651 return nullptr; 652 653 Eng.WList->enqueue(Succ); 654 return Succ; 655 } 656 657 ExplodedNode* 658 SwitchNodeBuilder::generateDefaultCaseNode(ProgramStateRef St, 659 bool IsSink) { 660 // Get the block for the default case. 661 assert(Src->succ_rbegin() != Src->succ_rend()); 662 CFGBlock *DefaultBlock = *Src->succ_rbegin(); 663 664 // Sanity check for default blocks that are unreachable and not caught 665 // by earlier stages. 666 if (!DefaultBlock) 667 return nullptr; 668 669 bool IsNew; 670 ExplodedNode *Succ = 671 Eng.G.getNode(BlockEdge(Src, DefaultBlock, Pred->getLocationContext()), 672 St, IsSink, &IsNew); 673 Succ->addPredecessor(Pred, Eng.G); 674 675 if (!IsNew) 676 return nullptr; 677 678 if (!IsSink) 679 Eng.WList->enqueue(Succ); 680 681 return Succ; 682 } 683