1 //===- CoreEngine.cpp - Path-Sensitive Dataflow Engine --------------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file defines a generic engine for intraprocedural, path-sensitive, 10 // dataflow analysis via graph reachability engine. 11 // 12 //===----------------------------------------------------------------------===// 13 14 #include "clang/StaticAnalyzer/Core/PathSensitive/CoreEngine.h" 15 #include "clang/AST/Expr.h" 16 #include "clang/AST/ExprCXX.h" 17 #include "clang/AST/Stmt.h" 18 #include "clang/AST/StmtCXX.h" 19 #include "clang/Analysis/AnalysisDeclContext.h" 20 #include "clang/Analysis/CFG.h" 21 #include "clang/Analysis/ProgramPoint.h" 22 #include "clang/Basic/LLVM.h" 23 #include "clang/StaticAnalyzer/Core/AnalyzerOptions.h" 24 #include "clang/StaticAnalyzer/Core/PathSensitive/BlockCounter.h" 25 #include "clang/StaticAnalyzer/Core/PathSensitive/ExplodedGraph.h" 26 #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h" 27 #include "clang/StaticAnalyzer/Core/PathSensitive/FunctionSummary.h" 28 #include "clang/StaticAnalyzer/Core/PathSensitive/WorkList.h" 29 #include "llvm/ADT/Optional.h" 30 #include "llvm/ADT/STLExtras.h" 31 #include "llvm/ADT/Statistic.h" 32 #include "llvm/Support/Casting.h" 33 #include "llvm/Support/ErrorHandling.h" 34 #include <algorithm> 35 #include <cassert> 36 #include <memory> 37 #include <optional> 38 #include <utility> 39 40 using namespace clang; 41 using namespace ento; 42 43 #define DEBUG_TYPE "CoreEngine" 44 45 STATISTIC(NumSteps, 46 "The # of steps executed."); 47 STATISTIC(NumSTUSteps, "The # of STU steps executed."); 48 STATISTIC(NumCTUSteps, "The # of CTU steps executed."); 49 STATISTIC(NumReachedMaxSteps, 50 "The # of times we reached the max number of steps."); 51 STATISTIC(NumPathsExplored, 52 "The # of paths explored by the analyzer."); 53 54 //===----------------------------------------------------------------------===// 55 // Core analysis engine. 56 //===----------------------------------------------------------------------===// 57 58 static std::unique_ptr<WorkList> generateWorkList(AnalyzerOptions &Opts) { 59 switch (Opts.getExplorationStrategy()) { 60 case ExplorationStrategyKind::DFS: 61 return WorkList::makeDFS(); 62 case ExplorationStrategyKind::BFS: 63 return WorkList::makeBFS(); 64 case ExplorationStrategyKind::BFSBlockDFSContents: 65 return WorkList::makeBFSBlockDFSContents(); 66 case ExplorationStrategyKind::UnexploredFirst: 67 return WorkList::makeUnexploredFirst(); 68 case ExplorationStrategyKind::UnexploredFirstQueue: 69 return WorkList::makeUnexploredFirstPriorityQueue(); 70 case ExplorationStrategyKind::UnexploredFirstLocationQueue: 71 return WorkList::makeUnexploredFirstPriorityLocationQueue(); 72 } 73 llvm_unreachable("Unknown AnalyzerOptions::ExplorationStrategyKind"); 74 } 75 76 CoreEngine::CoreEngine(ExprEngine &exprengine, FunctionSummariesTy *FS, 77 AnalyzerOptions &Opts) 78 : ExprEng(exprengine), WList(generateWorkList(Opts)), 79 CTUWList(Opts.IsNaiveCTUEnabled ? generateWorkList(Opts) : nullptr), 80 BCounterFactory(G.getAllocator()), FunctionSummaries(FS) {} 81 82 void CoreEngine::setBlockCounter(BlockCounter C) { 83 WList->setBlockCounter(C); 84 if (CTUWList) 85 CTUWList->setBlockCounter(C); 86 } 87 88 /// ExecuteWorkList - Run the worklist algorithm for a maximum number of steps. 89 bool CoreEngine::ExecuteWorkList(const LocationContext *L, unsigned MaxSteps, 90 ProgramStateRef InitState) { 91 if (G.num_roots() == 0) { // Initialize the analysis by constructing 92 // the root if none exists. 93 94 const CFGBlock *Entry = &(L->getCFG()->getEntry()); 95 96 assert(Entry->empty() && "Entry block must be empty."); 97 98 assert(Entry->succ_size() == 1 && "Entry block must have 1 successor."); 99 100 // Mark the entry block as visited. 101 FunctionSummaries->markVisitedBasicBlock(Entry->getBlockID(), 102 L->getDecl(), 103 L->getCFG()->getNumBlockIDs()); 104 105 // Get the solitary successor. 106 const CFGBlock *Succ = *(Entry->succ_begin()); 107 108 // Construct an edge representing the 109 // starting location in the function. 110 BlockEdge StartLoc(Entry, Succ, L); 111 112 // Set the current block counter to being empty. 113 setBlockCounter(BCounterFactory.GetEmptyCounter()); 114 115 if (!InitState) 116 InitState = ExprEng.getInitialState(L); 117 118 bool IsNew; 119 ExplodedNode *Node = G.getNode(StartLoc, InitState, false, &IsNew); 120 assert(IsNew); 121 G.addRoot(Node); 122 123 NodeBuilderContext BuilderCtx(*this, StartLoc.getDst(), Node); 124 ExplodedNodeSet DstBegin; 125 ExprEng.processBeginOfFunction(BuilderCtx, Node, DstBegin, StartLoc); 126 127 enqueue(DstBegin); 128 } 129 130 // Check if we have a steps limit 131 bool UnlimitedSteps = MaxSteps == 0; 132 133 // Cap our pre-reservation in the event that the user specifies 134 // a very large number of maximum steps. 135 const unsigned PreReservationCap = 4000000; 136 if(!UnlimitedSteps) 137 G.reserve(std::min(MaxSteps, PreReservationCap)); 138 139 auto ProcessWList = [this, UnlimitedSteps](unsigned MaxSteps) { 140 unsigned Steps = MaxSteps; 141 while (WList->hasWork()) { 142 if (!UnlimitedSteps) { 143 if (Steps == 0) { 144 NumReachedMaxSteps++; 145 break; 146 } 147 --Steps; 148 } 149 150 NumSteps++; 151 152 const WorkListUnit &WU = WList->dequeue(); 153 154 // Set the current block counter. 155 setBlockCounter(WU.getBlockCounter()); 156 157 // Retrieve the node. 158 ExplodedNode *Node = WU.getNode(); 159 160 dispatchWorkItem(Node, Node->getLocation(), WU); 161 } 162 return MaxSteps - Steps; 163 }; 164 const unsigned STUSteps = ProcessWList(MaxSteps); 165 166 if (CTUWList) { 167 NumSTUSteps += STUSteps; 168 const unsigned MinCTUSteps = 169 this->ExprEng.getAnalysisManager().options.CTUMaxNodesMin; 170 const unsigned Pct = 171 this->ExprEng.getAnalysisManager().options.CTUMaxNodesPercentage; 172 unsigned MaxCTUSteps = std::max(STUSteps * Pct / 100, MinCTUSteps); 173 174 WList = std::move(CTUWList); 175 const unsigned CTUSteps = ProcessWList(MaxCTUSteps); 176 NumCTUSteps += CTUSteps; 177 } 178 179 ExprEng.processEndWorklist(); 180 return WList->hasWork(); 181 } 182 183 void CoreEngine::dispatchWorkItem(ExplodedNode* Pred, ProgramPoint Loc, 184 const WorkListUnit& WU) { 185 // Dispatch on the location type. 186 switch (Loc.getKind()) { 187 case ProgramPoint::BlockEdgeKind: 188 HandleBlockEdge(Loc.castAs<BlockEdge>(), Pred); 189 break; 190 191 case ProgramPoint::BlockEntranceKind: 192 HandleBlockEntrance(Loc.castAs<BlockEntrance>(), Pred); 193 break; 194 195 case ProgramPoint::BlockExitKind: 196 assert(false && "BlockExit location never occur in forward analysis."); 197 break; 198 199 case ProgramPoint::CallEnterKind: 200 HandleCallEnter(Loc.castAs<CallEnter>(), Pred); 201 break; 202 203 case ProgramPoint::CallExitBeginKind: 204 ExprEng.processCallExit(Pred); 205 break; 206 207 case ProgramPoint::EpsilonKind: { 208 assert(Pred->hasSinglePred() && 209 "Assume epsilon has exactly one predecessor by construction"); 210 ExplodedNode *PNode = Pred->getFirstPred(); 211 dispatchWorkItem(Pred, PNode->getLocation(), WU); 212 break; 213 } 214 default: 215 assert(Loc.getAs<PostStmt>() || 216 Loc.getAs<PostInitializer>() || 217 Loc.getAs<PostImplicitCall>() || 218 Loc.getAs<CallExitEnd>() || 219 Loc.getAs<LoopExit>() || 220 Loc.getAs<PostAllocatorCall>()); 221 HandlePostStmt(WU.getBlock(), WU.getIndex(), Pred); 222 break; 223 } 224 } 225 226 bool CoreEngine::ExecuteWorkListWithInitialState(const LocationContext *L, 227 unsigned Steps, 228 ProgramStateRef InitState, 229 ExplodedNodeSet &Dst) { 230 bool DidNotFinish = ExecuteWorkList(L, Steps, InitState); 231 for (ExplodedGraph::eop_iterator I = G.eop_begin(), E = G.eop_end(); I != E; 232 ++I) { 233 Dst.Add(*I); 234 } 235 return DidNotFinish; 236 } 237 238 void CoreEngine::HandleBlockEdge(const BlockEdge &L, ExplodedNode *Pred) { 239 const CFGBlock *Blk = L.getDst(); 240 NodeBuilderContext BuilderCtx(*this, Blk, Pred); 241 242 // Mark this block as visited. 243 const LocationContext *LC = Pred->getLocationContext(); 244 FunctionSummaries->markVisitedBasicBlock(Blk->getBlockID(), 245 LC->getDecl(), 246 LC->getCFG()->getNumBlockIDs()); 247 248 // Display a prunable path note to the user if it's a virtual bases branch 249 // and we're taking the path that skips virtual base constructors. 250 if (L.getSrc()->getTerminator().isVirtualBaseBranch() && 251 L.getDst() == *L.getSrc()->succ_begin()) { 252 ProgramPoint P = L.withTag(getDataTags().make<NoteTag>( 253 [](BugReporterContext &, PathSensitiveBugReport &) -> std::string { 254 // TODO: Just call out the name of the most derived class 255 // when we know it. 256 return "Virtual base initialization skipped because " 257 "it has already been handled by the most derived class"; 258 }, 259 /*IsPrunable=*/true)); 260 // Perform the transition. 261 ExplodedNodeSet Dst; 262 NodeBuilder Bldr(Pred, Dst, BuilderCtx); 263 Pred = Bldr.generateNode(P, Pred->getState(), Pred); 264 if (!Pred) 265 return; 266 } 267 268 // Check if we are entering the EXIT block. 269 if (Blk == &(L.getLocationContext()->getCFG()->getExit())) { 270 assert(L.getLocationContext()->getCFG()->getExit().empty() && 271 "EXIT block cannot contain Stmts."); 272 273 // Get return statement.. 274 const ReturnStmt *RS = nullptr; 275 if (!L.getSrc()->empty()) { 276 CFGElement LastElement = L.getSrc()->back(); 277 if (std::optional<CFGStmt> LastStmt = LastElement.getAs<CFGStmt>()) { 278 RS = dyn_cast<ReturnStmt>(LastStmt->getStmt()); 279 } else if (std::optional<CFGAutomaticObjDtor> AutoDtor = 280 LastElement.getAs<CFGAutomaticObjDtor>()) { 281 RS = dyn_cast<ReturnStmt>(AutoDtor->getTriggerStmt()); 282 } 283 } 284 285 // Process the final state transition. 286 ExprEng.processEndOfFunction(BuilderCtx, Pred, RS); 287 288 // This path is done. Don't enqueue any more nodes. 289 return; 290 } 291 292 // Call into the ExprEngine to process entering the CFGBlock. 293 ExplodedNodeSet dstNodes; 294 BlockEntrance BE(Blk, Pred->getLocationContext()); 295 NodeBuilderWithSinks nodeBuilder(Pred, dstNodes, BuilderCtx, BE); 296 ExprEng.processCFGBlockEntrance(L, nodeBuilder, Pred); 297 298 // Auto-generate a node. 299 if (!nodeBuilder.hasGeneratedNodes()) { 300 nodeBuilder.generateNode(Pred->State, Pred); 301 } 302 303 // Enqueue nodes onto the worklist. 304 enqueue(dstNodes); 305 } 306 307 void CoreEngine::HandleBlockEntrance(const BlockEntrance &L, 308 ExplodedNode *Pred) { 309 // Increment the block counter. 310 const LocationContext *LC = Pred->getLocationContext(); 311 unsigned BlockId = L.getBlock()->getBlockID(); 312 BlockCounter Counter = WList->getBlockCounter(); 313 Counter = BCounterFactory.IncrementCount(Counter, LC->getStackFrame(), 314 BlockId); 315 setBlockCounter(Counter); 316 317 // Process the entrance of the block. 318 if (std::optional<CFGElement> E = L.getFirstElement()) { 319 NodeBuilderContext Ctx(*this, L.getBlock(), Pred); 320 ExprEng.processCFGElement(*E, Pred, 0, &Ctx); 321 } else 322 HandleBlockExit(L.getBlock(), Pred); 323 } 324 325 void CoreEngine::HandleBlockExit(const CFGBlock * B, ExplodedNode *Pred) { 326 if (const Stmt *Term = B->getTerminatorStmt()) { 327 switch (Term->getStmtClass()) { 328 default: 329 llvm_unreachable("Analysis for this terminator not implemented."); 330 331 case Stmt::CXXBindTemporaryExprClass: 332 HandleCleanupTemporaryBranch( 333 cast<CXXBindTemporaryExpr>(Term), B, Pred); 334 return; 335 336 // Model static initializers. 337 case Stmt::DeclStmtClass: 338 HandleStaticInit(cast<DeclStmt>(Term), B, Pred); 339 return; 340 341 case Stmt::BinaryOperatorClass: // '&&' and '||' 342 HandleBranch(cast<BinaryOperator>(Term)->getLHS(), Term, B, Pred); 343 return; 344 345 case Stmt::BinaryConditionalOperatorClass: 346 case Stmt::ConditionalOperatorClass: 347 HandleBranch(cast<AbstractConditionalOperator>(Term)->getCond(), 348 Term, B, Pred); 349 return; 350 351 // FIXME: Use constant-folding in CFG construction to simplify this 352 // case. 353 354 case Stmt::ChooseExprClass: 355 HandleBranch(cast<ChooseExpr>(Term)->getCond(), Term, B, Pred); 356 return; 357 358 case Stmt::CXXTryStmtClass: 359 // Generate a node for each of the successors. 360 // Our logic for EH analysis can certainly be improved. 361 for (CFGBlock::const_succ_iterator it = B->succ_begin(), 362 et = B->succ_end(); it != et; ++it) { 363 if (const CFGBlock *succ = *it) { 364 generateNode(BlockEdge(B, succ, Pred->getLocationContext()), 365 Pred->State, Pred); 366 } 367 } 368 return; 369 370 case Stmt::DoStmtClass: 371 HandleBranch(cast<DoStmt>(Term)->getCond(), Term, B, Pred); 372 return; 373 374 case Stmt::CXXForRangeStmtClass: 375 HandleBranch(cast<CXXForRangeStmt>(Term)->getCond(), Term, B, Pred); 376 return; 377 378 case Stmt::ForStmtClass: 379 HandleBranch(cast<ForStmt>(Term)->getCond(), Term, B, Pred); 380 return; 381 382 case Stmt::SEHLeaveStmtClass: 383 case Stmt::ContinueStmtClass: 384 case Stmt::BreakStmtClass: 385 case Stmt::GotoStmtClass: 386 break; 387 388 case Stmt::IfStmtClass: 389 HandleBranch(cast<IfStmt>(Term)->getCond(), Term, B, Pred); 390 return; 391 392 case Stmt::IndirectGotoStmtClass: { 393 // Only 1 successor: the indirect goto dispatch block. 394 assert(B->succ_size() == 1); 395 396 IndirectGotoNodeBuilder 397 builder(Pred, B, cast<IndirectGotoStmt>(Term)->getTarget(), 398 *(B->succ_begin()), this); 399 400 ExprEng.processIndirectGoto(builder); 401 return; 402 } 403 404 case Stmt::ObjCForCollectionStmtClass: 405 // In the case of ObjCForCollectionStmt, it appears twice in a CFG: 406 // 407 // (1) inside a basic block, which represents the binding of the 408 // 'element' variable to a value. 409 // (2) in a terminator, which represents the branch. 410 // 411 // For (1), ExprEngine will bind a value (i.e., 0 or 1) indicating 412 // whether or not collection contains any more elements. We cannot 413 // just test to see if the element is nil because a container can 414 // contain nil elements. 415 HandleBranch(Term, Term, B, Pred); 416 return; 417 418 case Stmt::SwitchStmtClass: { 419 SwitchNodeBuilder builder(Pred, B, cast<SwitchStmt>(Term)->getCond(), 420 this); 421 422 ExprEng.processSwitch(builder); 423 return; 424 } 425 426 case Stmt::WhileStmtClass: 427 HandleBranch(cast<WhileStmt>(Term)->getCond(), Term, B, Pred); 428 return; 429 430 case Stmt::GCCAsmStmtClass: 431 assert(cast<GCCAsmStmt>(Term)->isAsmGoto() && "Encountered GCCAsmStmt without labels"); 432 // TODO: Handle jumping to labels 433 return; 434 } 435 } 436 437 if (B->getTerminator().isVirtualBaseBranch()) { 438 HandleVirtualBaseBranch(B, Pred); 439 return; 440 } 441 442 assert(B->succ_size() == 1 && 443 "Blocks with no terminator should have at most 1 successor."); 444 445 generateNode(BlockEdge(B, *(B->succ_begin()), Pred->getLocationContext()), 446 Pred->State, Pred); 447 } 448 449 void CoreEngine::HandleCallEnter(const CallEnter &CE, ExplodedNode *Pred) { 450 NodeBuilderContext BuilderCtx(*this, CE.getEntry(), Pred); 451 ExprEng.processCallEnter(BuilderCtx, CE, Pred); 452 } 453 454 void CoreEngine::HandleBranch(const Stmt *Cond, const Stmt *Term, 455 const CFGBlock * B, ExplodedNode *Pred) { 456 assert(B->succ_size() == 2); 457 NodeBuilderContext Ctx(*this, B, Pred); 458 ExplodedNodeSet Dst; 459 ExprEng.processBranch(Cond, Ctx, Pred, Dst, *(B->succ_begin()), 460 *(B->succ_begin() + 1)); 461 // Enqueue the new frontier onto the worklist. 462 enqueue(Dst); 463 } 464 465 void CoreEngine::HandleCleanupTemporaryBranch(const CXXBindTemporaryExpr *BTE, 466 const CFGBlock *B, 467 ExplodedNode *Pred) { 468 assert(B->succ_size() == 2); 469 NodeBuilderContext Ctx(*this, B, Pred); 470 ExplodedNodeSet Dst; 471 ExprEng.processCleanupTemporaryBranch(BTE, Ctx, Pred, Dst, *(B->succ_begin()), 472 *(B->succ_begin() + 1)); 473 // Enqueue the new frontier onto the worklist. 474 enqueue(Dst); 475 } 476 477 void CoreEngine::HandleStaticInit(const DeclStmt *DS, const CFGBlock *B, 478 ExplodedNode *Pred) { 479 assert(B->succ_size() == 2); 480 NodeBuilderContext Ctx(*this, B, Pred); 481 ExplodedNodeSet Dst; 482 ExprEng.processStaticInitializer(DS, Ctx, Pred, Dst, 483 *(B->succ_begin()), *(B->succ_begin()+1)); 484 // Enqueue the new frontier onto the worklist. 485 enqueue(Dst); 486 } 487 488 void CoreEngine::HandlePostStmt(const CFGBlock *B, unsigned StmtIdx, 489 ExplodedNode *Pred) { 490 assert(B); 491 assert(!B->empty()); 492 493 if (StmtIdx == B->size()) 494 HandleBlockExit(B, Pred); 495 else { 496 NodeBuilderContext Ctx(*this, B, Pred); 497 ExprEng.processCFGElement((*B)[StmtIdx], Pred, StmtIdx, &Ctx); 498 } 499 } 500 501 void CoreEngine::HandleVirtualBaseBranch(const CFGBlock *B, 502 ExplodedNode *Pred) { 503 const LocationContext *LCtx = Pred->getLocationContext(); 504 if (const auto *CallerCtor = dyn_cast_or_null<CXXConstructExpr>( 505 LCtx->getStackFrame()->getCallSite())) { 506 switch (CallerCtor->getConstructionKind()) { 507 case CXXConstructExpr::CK_NonVirtualBase: 508 case CXXConstructExpr::CK_VirtualBase: { 509 BlockEdge Loc(B, *B->succ_begin(), LCtx); 510 HandleBlockEdge(Loc, Pred); 511 return; 512 } 513 default: 514 break; 515 } 516 } 517 518 // We either don't see a parent stack frame because we're in the top frame, 519 // or the parent stack frame doesn't initialize our virtual bases. 520 BlockEdge Loc(B, *(B->succ_begin() + 1), LCtx); 521 HandleBlockEdge(Loc, Pred); 522 } 523 524 /// generateNode - Utility method to generate nodes, hook up successors, 525 /// and add nodes to the worklist. 526 void CoreEngine::generateNode(const ProgramPoint &Loc, 527 ProgramStateRef State, 528 ExplodedNode *Pred) { 529 bool IsNew; 530 ExplodedNode *Node = G.getNode(Loc, State, false, &IsNew); 531 532 if (Pred) 533 Node->addPredecessor(Pred, G); // Link 'Node' with its predecessor. 534 else { 535 assert(IsNew); 536 G.addRoot(Node); // 'Node' has no predecessor. Make it a root. 537 } 538 539 // Only add 'Node' to the worklist if it was freshly generated. 540 if (IsNew) WList->enqueue(Node); 541 } 542 543 void CoreEngine::enqueueStmtNode(ExplodedNode *N, 544 const CFGBlock *Block, unsigned Idx) { 545 assert(Block); 546 assert(!N->isSink()); 547 548 // Check if this node entered a callee. 549 if (N->getLocation().getAs<CallEnter>()) { 550 // Still use the index of the CallExpr. It's needed to create the callee 551 // StackFrameContext. 552 WList->enqueue(N, Block, Idx); 553 return; 554 } 555 556 // Do not create extra nodes. Move to the next CFG element. 557 if (N->getLocation().getAs<PostInitializer>() || 558 N->getLocation().getAs<PostImplicitCall>()|| 559 N->getLocation().getAs<LoopExit>()) { 560 WList->enqueue(N, Block, Idx+1); 561 return; 562 } 563 564 if (N->getLocation().getAs<EpsilonPoint>()) { 565 WList->enqueue(N, Block, Idx); 566 return; 567 } 568 569 if ((*Block)[Idx].getKind() == CFGElement::NewAllocator) { 570 WList->enqueue(N, Block, Idx+1); 571 return; 572 } 573 574 // At this point, we know we're processing a normal statement. 575 CFGStmt CS = (*Block)[Idx].castAs<CFGStmt>(); 576 PostStmt Loc(CS.getStmt(), N->getLocationContext()); 577 578 if (Loc == N->getLocation().withTag(nullptr)) { 579 // Note: 'N' should be a fresh node because otherwise it shouldn't be 580 // a member of Deferred. 581 WList->enqueue(N, Block, Idx+1); 582 return; 583 } 584 585 bool IsNew; 586 ExplodedNode *Succ = G.getNode(Loc, N->getState(), false, &IsNew); 587 Succ->addPredecessor(N, G); 588 589 if (IsNew) 590 WList->enqueue(Succ, Block, Idx+1); 591 } 592 593 ExplodedNode *CoreEngine::generateCallExitBeginNode(ExplodedNode *N, 594 const ReturnStmt *RS) { 595 // Create a CallExitBegin node and enqueue it. 596 const auto *LocCtx = cast<StackFrameContext>(N->getLocationContext()); 597 598 // Use the callee location context. 599 CallExitBegin Loc(LocCtx, RS); 600 601 bool isNew; 602 ExplodedNode *Node = G.getNode(Loc, N->getState(), false, &isNew); 603 Node->addPredecessor(N, G); 604 return isNew ? Node : nullptr; 605 } 606 607 void CoreEngine::enqueue(ExplodedNodeSet &Set) { 608 for (const auto I : Set) 609 WList->enqueue(I); 610 } 611 612 void CoreEngine::enqueue(ExplodedNodeSet &Set, 613 const CFGBlock *Block, unsigned Idx) { 614 for (const auto I : Set) 615 enqueueStmtNode(I, Block, Idx); 616 } 617 618 void CoreEngine::enqueueEndOfFunction(ExplodedNodeSet &Set, const ReturnStmt *RS) { 619 for (auto *I : Set) { 620 // If we are in an inlined call, generate CallExitBegin node. 621 if (I->getLocationContext()->getParent()) { 622 I = generateCallExitBeginNode(I, RS); 623 if (I) 624 WList->enqueue(I); 625 } else { 626 // TODO: We should run remove dead bindings here. 627 G.addEndOfPath(I); 628 NumPathsExplored++; 629 } 630 } 631 } 632 633 void NodeBuilder::anchor() {} 634 635 ExplodedNode* NodeBuilder::generateNodeImpl(const ProgramPoint &Loc, 636 ProgramStateRef State, 637 ExplodedNode *FromN, 638 bool MarkAsSink) { 639 HasGeneratedNodes = true; 640 bool IsNew; 641 ExplodedNode *N = C.Eng.G.getNode(Loc, State, MarkAsSink, &IsNew); 642 N->addPredecessor(FromN, C.Eng.G); 643 Frontier.erase(FromN); 644 645 if (!IsNew) 646 return nullptr; 647 648 if (!MarkAsSink) 649 Frontier.Add(N); 650 651 return N; 652 } 653 654 void NodeBuilderWithSinks::anchor() {} 655 656 StmtNodeBuilder::~StmtNodeBuilder() { 657 if (EnclosingBldr) 658 for (const auto I : Frontier) 659 EnclosingBldr->addNodes(I); 660 } 661 662 void BranchNodeBuilder::anchor() {} 663 664 ExplodedNode *BranchNodeBuilder::generateNode(ProgramStateRef State, 665 bool branch, 666 ExplodedNode *NodePred) { 667 // If the branch has been marked infeasible we should not generate a node. 668 if (!isFeasible(branch)) 669 return nullptr; 670 671 ProgramPoint Loc = BlockEdge(C.Block, branch ? DstT:DstF, 672 NodePred->getLocationContext()); 673 ExplodedNode *Succ = generateNodeImpl(Loc, State, NodePred); 674 return Succ; 675 } 676 677 ExplodedNode* 678 IndirectGotoNodeBuilder::generateNode(const iterator &I, 679 ProgramStateRef St, 680 bool IsSink) { 681 bool IsNew; 682 ExplodedNode *Succ = 683 Eng.G.getNode(BlockEdge(Src, I.getBlock(), Pred->getLocationContext()), 684 St, IsSink, &IsNew); 685 Succ->addPredecessor(Pred, Eng.G); 686 687 if (!IsNew) 688 return nullptr; 689 690 if (!IsSink) 691 Eng.WList->enqueue(Succ); 692 693 return Succ; 694 } 695 696 ExplodedNode* 697 SwitchNodeBuilder::generateCaseStmtNode(const iterator &I, 698 ProgramStateRef St) { 699 bool IsNew; 700 ExplodedNode *Succ = 701 Eng.G.getNode(BlockEdge(Src, I.getBlock(), Pred->getLocationContext()), 702 St, false, &IsNew); 703 Succ->addPredecessor(Pred, Eng.G); 704 if (!IsNew) 705 return nullptr; 706 707 Eng.WList->enqueue(Succ); 708 return Succ; 709 } 710 711 ExplodedNode* 712 SwitchNodeBuilder::generateDefaultCaseNode(ProgramStateRef St, 713 bool IsSink) { 714 // Get the block for the default case. 715 assert(Src->succ_rbegin() != Src->succ_rend()); 716 CFGBlock *DefaultBlock = *Src->succ_rbegin(); 717 718 // Basic correctness check for default blocks that are unreachable and not 719 // caught by earlier stages. 720 if (!DefaultBlock) 721 return nullptr; 722 723 bool IsNew; 724 ExplodedNode *Succ = 725 Eng.G.getNode(BlockEdge(Src, DefaultBlock, Pred->getLocationContext()), 726 St, IsSink, &IsNew); 727 Succ->addPredecessor(Pred, Eng.G); 728 729 if (!IsNew) 730 return nullptr; 731 732 if (!IsSink) 733 Eng.WList->enqueue(Succ); 734 735 return Succ; 736 } 737