1 //===- CoreEngine.cpp - Path-Sensitive Dataflow Engine --------------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file defines a generic engine for intraprocedural, path-sensitive, 10 // dataflow analysis via graph reachability engine. 11 // 12 //===----------------------------------------------------------------------===// 13 14 #include "clang/StaticAnalyzer/Core/PathSensitive/CoreEngine.h" 15 #include "clang/AST/Expr.h" 16 #include "clang/AST/ExprCXX.h" 17 #include "clang/AST/Stmt.h" 18 #include "clang/AST/StmtCXX.h" 19 #include "clang/Analysis/AnalysisDeclContext.h" 20 #include "clang/Analysis/CFG.h" 21 #include "clang/Analysis/ProgramPoint.h" 22 #include "clang/Basic/LLVM.h" 23 #include "clang/StaticAnalyzer/Core/AnalyzerOptions.h" 24 #include "clang/StaticAnalyzer/Core/PathSensitive/BlockCounter.h" 25 #include "clang/StaticAnalyzer/Core/PathSensitive/ExplodedGraph.h" 26 #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h" 27 #include "clang/StaticAnalyzer/Core/PathSensitive/FunctionSummary.h" 28 #include "clang/StaticAnalyzer/Core/PathSensitive/WorkList.h" 29 #include "llvm/ADT/Optional.h" 30 #include "llvm/ADT/STLExtras.h" 31 #include "llvm/ADT/Statistic.h" 32 #include "llvm/Support/Casting.h" 33 #include "llvm/Support/ErrorHandling.h" 34 #include <algorithm> 35 #include <cassert> 36 #include <memory> 37 #include <optional> 38 #include <utility> 39 40 using namespace clang; 41 using namespace ento; 42 43 #define DEBUG_TYPE "CoreEngine" 44 45 STATISTIC(NumSteps, 46 "The # of steps executed."); 47 STATISTIC(NumSTUSteps, "The # of STU steps executed."); 48 STATISTIC(NumCTUSteps, "The # of CTU steps executed."); 49 STATISTIC(NumReachedMaxSteps, 50 "The # of times we reached the max number of steps."); 51 STATISTIC(NumPathsExplored, 52 "The # of paths explored by the analyzer."); 53 54 //===----------------------------------------------------------------------===// 55 // Core analysis engine. 56 //===----------------------------------------------------------------------===// 57 58 static std::unique_ptr<WorkList> generateWorkList(AnalyzerOptions &Opts) { 59 switch (Opts.getExplorationStrategy()) { 60 case ExplorationStrategyKind::DFS: 61 return WorkList::makeDFS(); 62 case ExplorationStrategyKind::BFS: 63 return WorkList::makeBFS(); 64 case ExplorationStrategyKind::BFSBlockDFSContents: 65 return WorkList::makeBFSBlockDFSContents(); 66 case ExplorationStrategyKind::UnexploredFirst: 67 return WorkList::makeUnexploredFirst(); 68 case ExplorationStrategyKind::UnexploredFirstQueue: 69 return WorkList::makeUnexploredFirstPriorityQueue(); 70 case ExplorationStrategyKind::UnexploredFirstLocationQueue: 71 return WorkList::makeUnexploredFirstPriorityLocationQueue(); 72 } 73 llvm_unreachable("Unknown AnalyzerOptions::ExplorationStrategyKind"); 74 } 75 76 CoreEngine::CoreEngine(ExprEngine &exprengine, FunctionSummariesTy *FS, 77 AnalyzerOptions &Opts) 78 : ExprEng(exprengine), WList(generateWorkList(Opts)), 79 CTUWList(Opts.IsNaiveCTUEnabled ? generateWorkList(Opts) : nullptr), 80 BCounterFactory(G.getAllocator()), FunctionSummaries(FS) {} 81 82 void CoreEngine::setBlockCounter(BlockCounter C) { 83 WList->setBlockCounter(C); 84 if (CTUWList) 85 CTUWList->setBlockCounter(C); 86 } 87 88 /// ExecuteWorkList - Run the worklist algorithm for a maximum number of steps. 89 bool CoreEngine::ExecuteWorkList(const LocationContext *L, unsigned MaxSteps, 90 ProgramStateRef InitState) { 91 if (G.num_roots() == 0) { // Initialize the analysis by constructing 92 // the root if none exists. 93 94 const CFGBlock *Entry = &(L->getCFG()->getEntry()); 95 96 assert(Entry->empty() && "Entry block must be empty."); 97 98 assert(Entry->succ_size() == 1 && "Entry block must have 1 successor."); 99 100 // Mark the entry block as visited. 101 FunctionSummaries->markVisitedBasicBlock(Entry->getBlockID(), 102 L->getDecl(), 103 L->getCFG()->getNumBlockIDs()); 104 105 // Get the solitary successor. 106 const CFGBlock *Succ = *(Entry->succ_begin()); 107 108 // Construct an edge representing the 109 // starting location in the function. 110 BlockEdge StartLoc(Entry, Succ, L); 111 112 // Set the current block counter to being empty. 113 setBlockCounter(BCounterFactory.GetEmptyCounter()); 114 115 if (!InitState) 116 InitState = ExprEng.getInitialState(L); 117 118 bool IsNew; 119 ExplodedNode *Node = G.getNode(StartLoc, InitState, false, &IsNew); 120 assert(IsNew); 121 G.addRoot(Node); 122 123 NodeBuilderContext BuilderCtx(*this, StartLoc.getDst(), Node); 124 ExplodedNodeSet DstBegin; 125 ExprEng.processBeginOfFunction(BuilderCtx, Node, DstBegin, StartLoc); 126 127 enqueue(DstBegin); 128 } 129 130 // Check if we have a steps limit 131 bool UnlimitedSteps = MaxSteps == 0; 132 133 // Cap our pre-reservation in the event that the user specifies 134 // a very large number of maximum steps. 135 const unsigned PreReservationCap = 4000000; 136 if(!UnlimitedSteps) 137 G.reserve(std::min(MaxSteps, PreReservationCap)); 138 139 auto ProcessWList = [this, UnlimitedSteps](unsigned MaxSteps) { 140 unsigned Steps = MaxSteps; 141 while (WList->hasWork()) { 142 if (!UnlimitedSteps) { 143 if (Steps == 0) { 144 NumReachedMaxSteps++; 145 break; 146 } 147 --Steps; 148 } 149 150 NumSteps++; 151 152 const WorkListUnit &WU = WList->dequeue(); 153 154 // Set the current block counter. 155 setBlockCounter(WU.getBlockCounter()); 156 157 // Retrieve the node. 158 ExplodedNode *Node = WU.getNode(); 159 160 dispatchWorkItem(Node, Node->getLocation(), WU); 161 } 162 return MaxSteps - Steps; 163 }; 164 const unsigned STUSteps = ProcessWList(MaxSteps); 165 166 if (CTUWList) { 167 NumSTUSteps += STUSteps; 168 const unsigned MinCTUSteps = 169 this->ExprEng.getAnalysisManager().options.CTUMaxNodesMin; 170 const unsigned Pct = 171 this->ExprEng.getAnalysisManager().options.CTUMaxNodesPercentage; 172 unsigned MaxCTUSteps = std::max(STUSteps * Pct / 100, MinCTUSteps); 173 174 WList = std::move(CTUWList); 175 const unsigned CTUSteps = ProcessWList(MaxCTUSteps); 176 NumCTUSteps += CTUSteps; 177 } 178 179 ExprEng.processEndWorklist(); 180 return WList->hasWork(); 181 } 182 183 void CoreEngine::dispatchWorkItem(ExplodedNode* Pred, ProgramPoint Loc, 184 const WorkListUnit& WU) { 185 // Dispatch on the location type. 186 switch (Loc.getKind()) { 187 case ProgramPoint::BlockEdgeKind: 188 HandleBlockEdge(Loc.castAs<BlockEdge>(), Pred); 189 break; 190 191 case ProgramPoint::BlockEntranceKind: 192 HandleBlockEntrance(Loc.castAs<BlockEntrance>(), Pred); 193 break; 194 195 case ProgramPoint::BlockExitKind: 196 assert(false && "BlockExit location never occur in forward analysis."); 197 break; 198 199 case ProgramPoint::CallEnterKind: 200 HandleCallEnter(Loc.castAs<CallEnter>(), Pred); 201 break; 202 203 case ProgramPoint::CallExitBeginKind: 204 ExprEng.processCallExit(Pred); 205 break; 206 207 case ProgramPoint::EpsilonKind: { 208 assert(Pred->hasSinglePred() && 209 "Assume epsilon has exactly one predecessor by construction"); 210 ExplodedNode *PNode = Pred->getFirstPred(); 211 dispatchWorkItem(Pred, PNode->getLocation(), WU); 212 break; 213 } 214 default: 215 assert(Loc.getAs<PostStmt>() || 216 Loc.getAs<PostInitializer>() || 217 Loc.getAs<PostImplicitCall>() || 218 Loc.getAs<CallExitEnd>() || 219 Loc.getAs<LoopExit>() || 220 Loc.getAs<PostAllocatorCall>()); 221 HandlePostStmt(WU.getBlock(), WU.getIndex(), Pred); 222 break; 223 } 224 } 225 226 bool CoreEngine::ExecuteWorkListWithInitialState(const LocationContext *L, 227 unsigned Steps, 228 ProgramStateRef InitState, 229 ExplodedNodeSet &Dst) { 230 bool DidNotFinish = ExecuteWorkList(L, Steps, InitState); 231 for (ExplodedGraph::eop_iterator I = G.eop_begin(), E = G.eop_end(); I != E; 232 ++I) { 233 Dst.Add(*I); 234 } 235 return DidNotFinish; 236 } 237 238 void CoreEngine::HandleBlockEdge(const BlockEdge &L, ExplodedNode *Pred) { 239 const CFGBlock *Blk = L.getDst(); 240 NodeBuilderContext BuilderCtx(*this, Blk, Pred); 241 242 // Mark this block as visited. 243 const LocationContext *LC = Pred->getLocationContext(); 244 FunctionSummaries->markVisitedBasicBlock(Blk->getBlockID(), 245 LC->getDecl(), 246 LC->getCFG()->getNumBlockIDs()); 247 248 // Display a prunable path note to the user if it's a virtual bases branch 249 // and we're taking the path that skips virtual base constructors. 250 if (L.getSrc()->getTerminator().isVirtualBaseBranch() && 251 L.getDst() == *L.getSrc()->succ_begin()) { 252 ProgramPoint P = L.withTag(getDataTags().make<NoteTag>( 253 [](BugReporterContext &, PathSensitiveBugReport &) -> std::string { 254 // TODO: Just call out the name of the most derived class 255 // when we know it. 256 return "Virtual base initialization skipped because " 257 "it has already been handled by the most derived class"; 258 }, 259 /*IsPrunable=*/true)); 260 // Perform the transition. 261 ExplodedNodeSet Dst; 262 NodeBuilder Bldr(Pred, Dst, BuilderCtx); 263 Pred = Bldr.generateNode(P, Pred->getState(), Pred); 264 if (!Pred) 265 return; 266 } 267 268 // Check if we are entering the EXIT block. 269 if (Blk == &(L.getLocationContext()->getCFG()->getExit())) { 270 assert(L.getLocationContext()->getCFG()->getExit().empty() && 271 "EXIT block cannot contain Stmts."); 272 273 // Get return statement.. 274 const ReturnStmt *RS = nullptr; 275 if (!L.getSrc()->empty()) { 276 CFGElement LastElement = L.getSrc()->back(); 277 if (Optional<CFGStmt> LastStmt = LastElement.getAs<CFGStmt>()) { 278 RS = dyn_cast<ReturnStmt>(LastStmt->getStmt()); 279 } else if (Optional<CFGAutomaticObjDtor> AutoDtor = 280 LastElement.getAs<CFGAutomaticObjDtor>()) { 281 RS = dyn_cast<ReturnStmt>(AutoDtor->getTriggerStmt()); 282 } 283 } 284 285 // Process the final state transition. 286 ExprEng.processEndOfFunction(BuilderCtx, Pred, RS); 287 288 // This path is done. Don't enqueue any more nodes. 289 return; 290 } 291 292 // Call into the ExprEngine to process entering the CFGBlock. 293 ExplodedNodeSet dstNodes; 294 BlockEntrance BE(Blk, Pred->getLocationContext()); 295 NodeBuilderWithSinks nodeBuilder(Pred, dstNodes, BuilderCtx, BE); 296 ExprEng.processCFGBlockEntrance(L, nodeBuilder, Pred); 297 298 // Auto-generate a node. 299 if (!nodeBuilder.hasGeneratedNodes()) { 300 nodeBuilder.generateNode(Pred->State, Pred); 301 } 302 303 // Enqueue nodes onto the worklist. 304 enqueue(dstNodes); 305 } 306 307 void CoreEngine::HandleBlockEntrance(const BlockEntrance &L, 308 ExplodedNode *Pred) { 309 // Increment the block counter. 310 const LocationContext *LC = Pred->getLocationContext(); 311 unsigned BlockId = L.getBlock()->getBlockID(); 312 BlockCounter Counter = WList->getBlockCounter(); 313 Counter = BCounterFactory.IncrementCount(Counter, LC->getStackFrame(), 314 BlockId); 315 setBlockCounter(Counter); 316 317 // Process the entrance of the block. 318 if (Optional<CFGElement> E = L.getFirstElement()) { 319 NodeBuilderContext Ctx(*this, L.getBlock(), Pred); 320 ExprEng.processCFGElement(*E, Pred, 0, &Ctx); 321 } 322 else 323 HandleBlockExit(L.getBlock(), Pred); 324 } 325 326 void CoreEngine::HandleBlockExit(const CFGBlock * B, ExplodedNode *Pred) { 327 if (const Stmt *Term = B->getTerminatorStmt()) { 328 switch (Term->getStmtClass()) { 329 default: 330 llvm_unreachable("Analysis for this terminator not implemented."); 331 332 case Stmt::CXXBindTemporaryExprClass: 333 HandleCleanupTemporaryBranch( 334 cast<CXXBindTemporaryExpr>(Term), B, Pred); 335 return; 336 337 // Model static initializers. 338 case Stmt::DeclStmtClass: 339 HandleStaticInit(cast<DeclStmt>(Term), B, Pred); 340 return; 341 342 case Stmt::BinaryOperatorClass: // '&&' and '||' 343 HandleBranch(cast<BinaryOperator>(Term)->getLHS(), Term, B, Pred); 344 return; 345 346 case Stmt::BinaryConditionalOperatorClass: 347 case Stmt::ConditionalOperatorClass: 348 HandleBranch(cast<AbstractConditionalOperator>(Term)->getCond(), 349 Term, B, Pred); 350 return; 351 352 // FIXME: Use constant-folding in CFG construction to simplify this 353 // case. 354 355 case Stmt::ChooseExprClass: 356 HandleBranch(cast<ChooseExpr>(Term)->getCond(), Term, B, Pred); 357 return; 358 359 case Stmt::CXXTryStmtClass: 360 // Generate a node for each of the successors. 361 // Our logic for EH analysis can certainly be improved. 362 for (CFGBlock::const_succ_iterator it = B->succ_begin(), 363 et = B->succ_end(); it != et; ++it) { 364 if (const CFGBlock *succ = *it) { 365 generateNode(BlockEdge(B, succ, Pred->getLocationContext()), 366 Pred->State, Pred); 367 } 368 } 369 return; 370 371 case Stmt::DoStmtClass: 372 HandleBranch(cast<DoStmt>(Term)->getCond(), Term, B, Pred); 373 return; 374 375 case Stmt::CXXForRangeStmtClass: 376 HandleBranch(cast<CXXForRangeStmt>(Term)->getCond(), Term, B, Pred); 377 return; 378 379 case Stmt::ForStmtClass: 380 HandleBranch(cast<ForStmt>(Term)->getCond(), Term, B, Pred); 381 return; 382 383 case Stmt::SEHLeaveStmtClass: 384 case Stmt::ContinueStmtClass: 385 case Stmt::BreakStmtClass: 386 case Stmt::GotoStmtClass: 387 break; 388 389 case Stmt::IfStmtClass: 390 HandleBranch(cast<IfStmt>(Term)->getCond(), Term, B, Pred); 391 return; 392 393 case Stmt::IndirectGotoStmtClass: { 394 // Only 1 successor: the indirect goto dispatch block. 395 assert(B->succ_size() == 1); 396 397 IndirectGotoNodeBuilder 398 builder(Pred, B, cast<IndirectGotoStmt>(Term)->getTarget(), 399 *(B->succ_begin()), this); 400 401 ExprEng.processIndirectGoto(builder); 402 return; 403 } 404 405 case Stmt::ObjCForCollectionStmtClass: 406 // In the case of ObjCForCollectionStmt, it appears twice in a CFG: 407 // 408 // (1) inside a basic block, which represents the binding of the 409 // 'element' variable to a value. 410 // (2) in a terminator, which represents the branch. 411 // 412 // For (1), ExprEngine will bind a value (i.e., 0 or 1) indicating 413 // whether or not collection contains any more elements. We cannot 414 // just test to see if the element is nil because a container can 415 // contain nil elements. 416 HandleBranch(Term, Term, B, Pred); 417 return; 418 419 case Stmt::SwitchStmtClass: { 420 SwitchNodeBuilder builder(Pred, B, cast<SwitchStmt>(Term)->getCond(), 421 this); 422 423 ExprEng.processSwitch(builder); 424 return; 425 } 426 427 case Stmt::WhileStmtClass: 428 HandleBranch(cast<WhileStmt>(Term)->getCond(), Term, B, Pred); 429 return; 430 431 case Stmt::GCCAsmStmtClass: 432 assert(cast<GCCAsmStmt>(Term)->isAsmGoto() && "Encountered GCCAsmStmt without labels"); 433 // TODO: Handle jumping to labels 434 return; 435 } 436 } 437 438 if (B->getTerminator().isVirtualBaseBranch()) { 439 HandleVirtualBaseBranch(B, Pred); 440 return; 441 } 442 443 assert(B->succ_size() == 1 && 444 "Blocks with no terminator should have at most 1 successor."); 445 446 generateNode(BlockEdge(B, *(B->succ_begin()), Pred->getLocationContext()), 447 Pred->State, Pred); 448 } 449 450 void CoreEngine::HandleCallEnter(const CallEnter &CE, ExplodedNode *Pred) { 451 NodeBuilderContext BuilderCtx(*this, CE.getEntry(), Pred); 452 ExprEng.processCallEnter(BuilderCtx, CE, Pred); 453 } 454 455 void CoreEngine::HandleBranch(const Stmt *Cond, const Stmt *Term, 456 const CFGBlock * B, ExplodedNode *Pred) { 457 assert(B->succ_size() == 2); 458 NodeBuilderContext Ctx(*this, B, Pred); 459 ExplodedNodeSet Dst; 460 ExprEng.processBranch(Cond, Ctx, Pred, Dst, *(B->succ_begin()), 461 *(B->succ_begin() + 1)); 462 // Enqueue the new frontier onto the worklist. 463 enqueue(Dst); 464 } 465 466 void CoreEngine::HandleCleanupTemporaryBranch(const CXXBindTemporaryExpr *BTE, 467 const CFGBlock *B, 468 ExplodedNode *Pred) { 469 assert(B->succ_size() == 2); 470 NodeBuilderContext Ctx(*this, B, Pred); 471 ExplodedNodeSet Dst; 472 ExprEng.processCleanupTemporaryBranch(BTE, Ctx, Pred, Dst, *(B->succ_begin()), 473 *(B->succ_begin() + 1)); 474 // Enqueue the new frontier onto the worklist. 475 enqueue(Dst); 476 } 477 478 void CoreEngine::HandleStaticInit(const DeclStmt *DS, const CFGBlock *B, 479 ExplodedNode *Pred) { 480 assert(B->succ_size() == 2); 481 NodeBuilderContext Ctx(*this, B, Pred); 482 ExplodedNodeSet Dst; 483 ExprEng.processStaticInitializer(DS, Ctx, Pred, Dst, 484 *(B->succ_begin()), *(B->succ_begin()+1)); 485 // Enqueue the new frontier onto the worklist. 486 enqueue(Dst); 487 } 488 489 void CoreEngine::HandlePostStmt(const CFGBlock *B, unsigned StmtIdx, 490 ExplodedNode *Pred) { 491 assert(B); 492 assert(!B->empty()); 493 494 if (StmtIdx == B->size()) 495 HandleBlockExit(B, Pred); 496 else { 497 NodeBuilderContext Ctx(*this, B, Pred); 498 ExprEng.processCFGElement((*B)[StmtIdx], Pred, StmtIdx, &Ctx); 499 } 500 } 501 502 void CoreEngine::HandleVirtualBaseBranch(const CFGBlock *B, 503 ExplodedNode *Pred) { 504 const LocationContext *LCtx = Pred->getLocationContext(); 505 if (const auto *CallerCtor = dyn_cast_or_null<CXXConstructExpr>( 506 LCtx->getStackFrame()->getCallSite())) { 507 switch (CallerCtor->getConstructionKind()) { 508 case CXXConstructExpr::CK_NonVirtualBase: 509 case CXXConstructExpr::CK_VirtualBase: { 510 BlockEdge Loc(B, *B->succ_begin(), LCtx); 511 HandleBlockEdge(Loc, Pred); 512 return; 513 } 514 default: 515 break; 516 } 517 } 518 519 // We either don't see a parent stack frame because we're in the top frame, 520 // or the parent stack frame doesn't initialize our virtual bases. 521 BlockEdge Loc(B, *(B->succ_begin() + 1), LCtx); 522 HandleBlockEdge(Loc, Pred); 523 } 524 525 /// generateNode - Utility method to generate nodes, hook up successors, 526 /// and add nodes to the worklist. 527 void CoreEngine::generateNode(const ProgramPoint &Loc, 528 ProgramStateRef State, 529 ExplodedNode *Pred) { 530 bool IsNew; 531 ExplodedNode *Node = G.getNode(Loc, State, false, &IsNew); 532 533 if (Pred) 534 Node->addPredecessor(Pred, G); // Link 'Node' with its predecessor. 535 else { 536 assert(IsNew); 537 G.addRoot(Node); // 'Node' has no predecessor. Make it a root. 538 } 539 540 // Only add 'Node' to the worklist if it was freshly generated. 541 if (IsNew) WList->enqueue(Node); 542 } 543 544 void CoreEngine::enqueueStmtNode(ExplodedNode *N, 545 const CFGBlock *Block, unsigned Idx) { 546 assert(Block); 547 assert(!N->isSink()); 548 549 // Check if this node entered a callee. 550 if (N->getLocation().getAs<CallEnter>()) { 551 // Still use the index of the CallExpr. It's needed to create the callee 552 // StackFrameContext. 553 WList->enqueue(N, Block, Idx); 554 return; 555 } 556 557 // Do not create extra nodes. Move to the next CFG element. 558 if (N->getLocation().getAs<PostInitializer>() || 559 N->getLocation().getAs<PostImplicitCall>()|| 560 N->getLocation().getAs<LoopExit>()) { 561 WList->enqueue(N, Block, Idx+1); 562 return; 563 } 564 565 if (N->getLocation().getAs<EpsilonPoint>()) { 566 WList->enqueue(N, Block, Idx); 567 return; 568 } 569 570 if ((*Block)[Idx].getKind() == CFGElement::NewAllocator) { 571 WList->enqueue(N, Block, Idx+1); 572 return; 573 } 574 575 // At this point, we know we're processing a normal statement. 576 CFGStmt CS = (*Block)[Idx].castAs<CFGStmt>(); 577 PostStmt Loc(CS.getStmt(), N->getLocationContext()); 578 579 if (Loc == N->getLocation().withTag(nullptr)) { 580 // Note: 'N' should be a fresh node because otherwise it shouldn't be 581 // a member of Deferred. 582 WList->enqueue(N, Block, Idx+1); 583 return; 584 } 585 586 bool IsNew; 587 ExplodedNode *Succ = G.getNode(Loc, N->getState(), false, &IsNew); 588 Succ->addPredecessor(N, G); 589 590 if (IsNew) 591 WList->enqueue(Succ, Block, Idx+1); 592 } 593 594 ExplodedNode *CoreEngine::generateCallExitBeginNode(ExplodedNode *N, 595 const ReturnStmt *RS) { 596 // Create a CallExitBegin node and enqueue it. 597 const auto *LocCtx = cast<StackFrameContext>(N->getLocationContext()); 598 599 // Use the callee location context. 600 CallExitBegin Loc(LocCtx, RS); 601 602 bool isNew; 603 ExplodedNode *Node = G.getNode(Loc, N->getState(), false, &isNew); 604 Node->addPredecessor(N, G); 605 return isNew ? Node : nullptr; 606 } 607 608 void CoreEngine::enqueue(ExplodedNodeSet &Set) { 609 for (const auto I : Set) 610 WList->enqueue(I); 611 } 612 613 void CoreEngine::enqueue(ExplodedNodeSet &Set, 614 const CFGBlock *Block, unsigned Idx) { 615 for (const auto I : Set) 616 enqueueStmtNode(I, Block, Idx); 617 } 618 619 void CoreEngine::enqueueEndOfFunction(ExplodedNodeSet &Set, const ReturnStmt *RS) { 620 for (auto *I : Set) { 621 // If we are in an inlined call, generate CallExitBegin node. 622 if (I->getLocationContext()->getParent()) { 623 I = generateCallExitBeginNode(I, RS); 624 if (I) 625 WList->enqueue(I); 626 } else { 627 // TODO: We should run remove dead bindings here. 628 G.addEndOfPath(I); 629 NumPathsExplored++; 630 } 631 } 632 } 633 634 void NodeBuilder::anchor() {} 635 636 ExplodedNode* NodeBuilder::generateNodeImpl(const ProgramPoint &Loc, 637 ProgramStateRef State, 638 ExplodedNode *FromN, 639 bool MarkAsSink) { 640 HasGeneratedNodes = true; 641 bool IsNew; 642 ExplodedNode *N = C.Eng.G.getNode(Loc, State, MarkAsSink, &IsNew); 643 N->addPredecessor(FromN, C.Eng.G); 644 Frontier.erase(FromN); 645 646 if (!IsNew) 647 return nullptr; 648 649 if (!MarkAsSink) 650 Frontier.Add(N); 651 652 return N; 653 } 654 655 void NodeBuilderWithSinks::anchor() {} 656 657 StmtNodeBuilder::~StmtNodeBuilder() { 658 if (EnclosingBldr) 659 for (const auto I : Frontier) 660 EnclosingBldr->addNodes(I); 661 } 662 663 void BranchNodeBuilder::anchor() {} 664 665 ExplodedNode *BranchNodeBuilder::generateNode(ProgramStateRef State, 666 bool branch, 667 ExplodedNode *NodePred) { 668 // If the branch has been marked infeasible we should not generate a node. 669 if (!isFeasible(branch)) 670 return nullptr; 671 672 ProgramPoint Loc = BlockEdge(C.Block, branch ? DstT:DstF, 673 NodePred->getLocationContext()); 674 ExplodedNode *Succ = generateNodeImpl(Loc, State, NodePred); 675 return Succ; 676 } 677 678 ExplodedNode* 679 IndirectGotoNodeBuilder::generateNode(const iterator &I, 680 ProgramStateRef St, 681 bool IsSink) { 682 bool IsNew; 683 ExplodedNode *Succ = 684 Eng.G.getNode(BlockEdge(Src, I.getBlock(), Pred->getLocationContext()), 685 St, IsSink, &IsNew); 686 Succ->addPredecessor(Pred, Eng.G); 687 688 if (!IsNew) 689 return nullptr; 690 691 if (!IsSink) 692 Eng.WList->enqueue(Succ); 693 694 return Succ; 695 } 696 697 ExplodedNode* 698 SwitchNodeBuilder::generateCaseStmtNode(const iterator &I, 699 ProgramStateRef St) { 700 bool IsNew; 701 ExplodedNode *Succ = 702 Eng.G.getNode(BlockEdge(Src, I.getBlock(), Pred->getLocationContext()), 703 St, false, &IsNew); 704 Succ->addPredecessor(Pred, Eng.G); 705 if (!IsNew) 706 return nullptr; 707 708 Eng.WList->enqueue(Succ); 709 return Succ; 710 } 711 712 ExplodedNode* 713 SwitchNodeBuilder::generateDefaultCaseNode(ProgramStateRef St, 714 bool IsSink) { 715 // Get the block for the default case. 716 assert(Src->succ_rbegin() != Src->succ_rend()); 717 CFGBlock *DefaultBlock = *Src->succ_rbegin(); 718 719 // Basic correctness check for default blocks that are unreachable and not 720 // caught by earlier stages. 721 if (!DefaultBlock) 722 return nullptr; 723 724 bool IsNew; 725 ExplodedNode *Succ = 726 Eng.G.getNode(BlockEdge(Src, DefaultBlock, Pred->getLocationContext()), 727 St, IsSink, &IsNew); 728 Succ->addPredecessor(Pred, Eng.G); 729 730 if (!IsNew) 731 return nullptr; 732 733 if (!IsSink) 734 Eng.WList->enqueue(Succ); 735 736 return Succ; 737 } 738