1 //===- CoreEngine.cpp - Path-Sensitive Dataflow Engine --------------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file defines a generic engine for intraprocedural, path-sensitive, 10 // dataflow analysis via graph reachability engine. 11 // 12 //===----------------------------------------------------------------------===// 13 14 #include "clang/StaticAnalyzer/Core/PathSensitive/CoreEngine.h" 15 #include "clang/AST/Expr.h" 16 #include "clang/AST/ExprCXX.h" 17 #include "clang/AST/Stmt.h" 18 #include "clang/AST/StmtCXX.h" 19 #include "clang/Analysis/AnalysisDeclContext.h" 20 #include "clang/Analysis/CFG.h" 21 #include "clang/Analysis/ProgramPoint.h" 22 #include "clang/Basic/LLVM.h" 23 #include "clang/StaticAnalyzer/Core/AnalyzerOptions.h" 24 #include "clang/StaticAnalyzer/Core/PathSensitive/BlockCounter.h" 25 #include "clang/StaticAnalyzer/Core/PathSensitive/ExplodedGraph.h" 26 #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h" 27 #include "clang/StaticAnalyzer/Core/PathSensitive/FunctionSummary.h" 28 #include "clang/StaticAnalyzer/Core/PathSensitive/WorkList.h" 29 #include "llvm/ADT/STLExtras.h" 30 #include "llvm/ADT/Statistic.h" 31 #include "llvm/Support/Casting.h" 32 #include "llvm/Support/ErrorHandling.h" 33 #include <algorithm> 34 #include <cassert> 35 #include <memory> 36 #include <optional> 37 #include <utility> 38 39 using namespace clang; 40 using namespace ento; 41 42 #define DEBUG_TYPE "CoreEngine" 43 44 STATISTIC(NumSteps, 45 "The # of steps executed."); 46 STATISTIC(NumSTUSteps, "The # of STU steps executed."); 47 STATISTIC(NumCTUSteps, "The # of CTU steps executed."); 48 STATISTIC(NumReachedMaxSteps, 49 "The # of times we reached the max number of steps."); 50 STATISTIC(NumPathsExplored, 51 "The # of paths explored by the analyzer."); 52 53 //===----------------------------------------------------------------------===// 54 // Core analysis engine. 55 //===----------------------------------------------------------------------===// 56 57 static std::unique_ptr<WorkList> generateWorkList(AnalyzerOptions &Opts) { 58 switch (Opts.getExplorationStrategy()) { 59 case ExplorationStrategyKind::DFS: 60 return WorkList::makeDFS(); 61 case ExplorationStrategyKind::BFS: 62 return WorkList::makeBFS(); 63 case ExplorationStrategyKind::BFSBlockDFSContents: 64 return WorkList::makeBFSBlockDFSContents(); 65 case ExplorationStrategyKind::UnexploredFirst: 66 return WorkList::makeUnexploredFirst(); 67 case ExplorationStrategyKind::UnexploredFirstQueue: 68 return WorkList::makeUnexploredFirstPriorityQueue(); 69 case ExplorationStrategyKind::UnexploredFirstLocationQueue: 70 return WorkList::makeUnexploredFirstPriorityLocationQueue(); 71 } 72 llvm_unreachable("Unknown AnalyzerOptions::ExplorationStrategyKind"); 73 } 74 75 CoreEngine::CoreEngine(ExprEngine &exprengine, FunctionSummariesTy *FS, 76 AnalyzerOptions &Opts) 77 : ExprEng(exprengine), WList(generateWorkList(Opts)), 78 CTUWList(Opts.IsNaiveCTUEnabled ? generateWorkList(Opts) : nullptr), 79 BCounterFactory(G.getAllocator()), FunctionSummaries(FS) {} 80 81 void CoreEngine::setBlockCounter(BlockCounter C) { 82 WList->setBlockCounter(C); 83 if (CTUWList) 84 CTUWList->setBlockCounter(C); 85 } 86 87 /// ExecuteWorkList - Run the worklist algorithm for a maximum number of steps. 88 bool CoreEngine::ExecuteWorkList(const LocationContext *L, unsigned MaxSteps, 89 ProgramStateRef InitState) { 90 if (G.num_roots() == 0) { // Initialize the analysis by constructing 91 // the root if none exists. 92 93 const CFGBlock *Entry = &(L->getCFG()->getEntry()); 94 95 assert(Entry->empty() && "Entry block must be empty."); 96 97 assert(Entry->succ_size() == 1 && "Entry block must have 1 successor."); 98 99 // Mark the entry block as visited. 100 FunctionSummaries->markVisitedBasicBlock(Entry->getBlockID(), 101 L->getDecl(), 102 L->getCFG()->getNumBlockIDs()); 103 104 // Get the solitary successor. 105 const CFGBlock *Succ = *(Entry->succ_begin()); 106 107 // Construct an edge representing the 108 // starting location in the function. 109 BlockEdge StartLoc(Entry, Succ, L); 110 111 // Set the current block counter to being empty. 112 setBlockCounter(BCounterFactory.GetEmptyCounter()); 113 114 if (!InitState) 115 InitState = ExprEng.getInitialState(L); 116 117 bool IsNew; 118 ExplodedNode *Node = G.getNode(StartLoc, InitState, false, &IsNew); 119 assert(IsNew); 120 G.addRoot(Node); 121 122 NodeBuilderContext BuilderCtx(*this, StartLoc.getDst(), Node); 123 ExplodedNodeSet DstBegin; 124 ExprEng.processBeginOfFunction(BuilderCtx, Node, DstBegin, StartLoc); 125 126 enqueue(DstBegin); 127 } 128 129 // Check if we have a steps limit 130 bool UnlimitedSteps = MaxSteps == 0; 131 132 // Cap our pre-reservation in the event that the user specifies 133 // a very large number of maximum steps. 134 const unsigned PreReservationCap = 4000000; 135 if(!UnlimitedSteps) 136 G.reserve(std::min(MaxSteps, PreReservationCap)); 137 138 auto ProcessWList = [this, UnlimitedSteps](unsigned MaxSteps) { 139 unsigned Steps = MaxSteps; 140 while (WList->hasWork()) { 141 if (!UnlimitedSteps) { 142 if (Steps == 0) { 143 NumReachedMaxSteps++; 144 break; 145 } 146 --Steps; 147 } 148 149 NumSteps++; 150 151 const WorkListUnit &WU = WList->dequeue(); 152 153 // Set the current block counter. 154 setBlockCounter(WU.getBlockCounter()); 155 156 // Retrieve the node. 157 ExplodedNode *Node = WU.getNode(); 158 159 dispatchWorkItem(Node, Node->getLocation(), WU); 160 } 161 return MaxSteps - Steps; 162 }; 163 const unsigned STUSteps = ProcessWList(MaxSteps); 164 165 if (CTUWList) { 166 NumSTUSteps += STUSteps; 167 const unsigned MinCTUSteps = 168 this->ExprEng.getAnalysisManager().options.CTUMaxNodesMin; 169 const unsigned Pct = 170 this->ExprEng.getAnalysisManager().options.CTUMaxNodesPercentage; 171 unsigned MaxCTUSteps = std::max(STUSteps * Pct / 100, MinCTUSteps); 172 173 WList = std::move(CTUWList); 174 const unsigned CTUSteps = ProcessWList(MaxCTUSteps); 175 NumCTUSteps += CTUSteps; 176 } 177 178 ExprEng.processEndWorklist(); 179 return WList->hasWork(); 180 } 181 182 void CoreEngine::dispatchWorkItem(ExplodedNode* Pred, ProgramPoint Loc, 183 const WorkListUnit& WU) { 184 // Dispatch on the location type. 185 switch (Loc.getKind()) { 186 case ProgramPoint::BlockEdgeKind: 187 HandleBlockEdge(Loc.castAs<BlockEdge>(), Pred); 188 break; 189 190 case ProgramPoint::BlockEntranceKind: 191 HandleBlockEntrance(Loc.castAs<BlockEntrance>(), Pred); 192 break; 193 194 case ProgramPoint::BlockExitKind: 195 assert(false && "BlockExit location never occur in forward analysis."); 196 break; 197 198 case ProgramPoint::CallEnterKind: 199 HandleCallEnter(Loc.castAs<CallEnter>(), Pred); 200 break; 201 202 case ProgramPoint::CallExitBeginKind: 203 ExprEng.processCallExit(Pred); 204 break; 205 206 case ProgramPoint::EpsilonKind: { 207 assert(Pred->hasSinglePred() && 208 "Assume epsilon has exactly one predecessor by construction"); 209 ExplodedNode *PNode = Pred->getFirstPred(); 210 dispatchWorkItem(Pred, PNode->getLocation(), WU); 211 break; 212 } 213 default: 214 assert(Loc.getAs<PostStmt>() || 215 Loc.getAs<PostInitializer>() || 216 Loc.getAs<PostImplicitCall>() || 217 Loc.getAs<CallExitEnd>() || 218 Loc.getAs<LoopExit>() || 219 Loc.getAs<PostAllocatorCall>()); 220 HandlePostStmt(WU.getBlock(), WU.getIndex(), Pred); 221 break; 222 } 223 } 224 225 void CoreEngine::HandleBlockEdge(const BlockEdge &L, ExplodedNode *Pred) { 226 const CFGBlock *Blk = L.getDst(); 227 NodeBuilderContext BuilderCtx(*this, Blk, Pred); 228 229 // Mark this block as visited. 230 const LocationContext *LC = Pred->getLocationContext(); 231 FunctionSummaries->markVisitedBasicBlock(Blk->getBlockID(), 232 LC->getDecl(), 233 LC->getCFG()->getNumBlockIDs()); 234 235 // Display a prunable path note to the user if it's a virtual bases branch 236 // and we're taking the path that skips virtual base constructors. 237 if (L.getSrc()->getTerminator().isVirtualBaseBranch() && 238 L.getDst() == *L.getSrc()->succ_begin()) { 239 ProgramPoint P = L.withTag(getDataTags().make<NoteTag>( 240 [](BugReporterContext &, PathSensitiveBugReport &) -> std::string { 241 // TODO: Just call out the name of the most derived class 242 // when we know it. 243 return "Virtual base initialization skipped because " 244 "it has already been handled by the most derived class"; 245 }, 246 /*IsPrunable=*/true)); 247 // Perform the transition. 248 ExplodedNodeSet Dst; 249 NodeBuilder Bldr(Pred, Dst, BuilderCtx); 250 Pred = Bldr.generateNode(P, Pred->getState(), Pred); 251 if (!Pred) 252 return; 253 } 254 255 // Check if we are entering the EXIT block. 256 if (Blk == &(L.getLocationContext()->getCFG()->getExit())) { 257 assert(L.getLocationContext()->getCFG()->getExit().empty() && 258 "EXIT block cannot contain Stmts."); 259 260 // Get return statement.. 261 const ReturnStmt *RS = nullptr; 262 if (!L.getSrc()->empty()) { 263 CFGElement LastElement = L.getSrc()->back(); 264 if (std::optional<CFGStmt> LastStmt = LastElement.getAs<CFGStmt>()) { 265 RS = dyn_cast<ReturnStmt>(LastStmt->getStmt()); 266 } else if (std::optional<CFGAutomaticObjDtor> AutoDtor = 267 LastElement.getAs<CFGAutomaticObjDtor>()) { 268 RS = dyn_cast<ReturnStmt>(AutoDtor->getTriggerStmt()); 269 } 270 } 271 272 // Process the final state transition. 273 ExprEng.processEndOfFunction(BuilderCtx, Pred, RS); 274 275 // This path is done. Don't enqueue any more nodes. 276 return; 277 } 278 279 // Call into the ExprEngine to process entering the CFGBlock. 280 ExplodedNodeSet dstNodes; 281 BlockEntrance BE(Blk, Pred->getLocationContext()); 282 NodeBuilderWithSinks nodeBuilder(Pred, dstNodes, BuilderCtx, BE); 283 ExprEng.processCFGBlockEntrance(L, nodeBuilder, Pred); 284 285 // Auto-generate a node. 286 if (!nodeBuilder.hasGeneratedNodes()) { 287 nodeBuilder.generateNode(Pred->State, Pred); 288 } 289 290 // Enqueue nodes onto the worklist. 291 enqueue(dstNodes); 292 } 293 294 void CoreEngine::HandleBlockEntrance(const BlockEntrance &L, 295 ExplodedNode *Pred) { 296 // Increment the block counter. 297 const LocationContext *LC = Pred->getLocationContext(); 298 unsigned BlockId = L.getBlock()->getBlockID(); 299 BlockCounter Counter = WList->getBlockCounter(); 300 Counter = BCounterFactory.IncrementCount(Counter, LC->getStackFrame(), 301 BlockId); 302 setBlockCounter(Counter); 303 304 // Process the entrance of the block. 305 if (std::optional<CFGElement> E = L.getFirstElement()) { 306 NodeBuilderContext Ctx(*this, L.getBlock(), Pred); 307 ExprEng.processCFGElement(*E, Pred, 0, &Ctx); 308 } else 309 HandleBlockExit(L.getBlock(), Pred); 310 } 311 312 void CoreEngine::HandleBlockExit(const CFGBlock * B, ExplodedNode *Pred) { 313 if (const Stmt *Term = B->getTerminatorStmt()) { 314 switch (Term->getStmtClass()) { 315 default: 316 llvm_unreachable("Analysis for this terminator not implemented."); 317 318 case Stmt::CXXBindTemporaryExprClass: 319 HandleCleanupTemporaryBranch( 320 cast<CXXBindTemporaryExpr>(Term), B, Pred); 321 return; 322 323 // Model static initializers. 324 case Stmt::DeclStmtClass: 325 HandleStaticInit(cast<DeclStmt>(Term), B, Pred); 326 return; 327 328 case Stmt::BinaryOperatorClass: // '&&' and '||' 329 HandleBranch(cast<BinaryOperator>(Term)->getLHS(), Term, B, Pred); 330 return; 331 332 case Stmt::BinaryConditionalOperatorClass: 333 case Stmt::ConditionalOperatorClass: 334 HandleBranch(cast<AbstractConditionalOperator>(Term)->getCond(), 335 Term, B, Pred); 336 return; 337 338 // FIXME: Use constant-folding in CFG construction to simplify this 339 // case. 340 341 case Stmt::ChooseExprClass: 342 HandleBranch(cast<ChooseExpr>(Term)->getCond(), Term, B, Pred); 343 return; 344 345 case Stmt::CXXTryStmtClass: 346 // Generate a node for each of the successors. 347 // Our logic for EH analysis can certainly be improved. 348 for (CFGBlock::const_succ_iterator it = B->succ_begin(), 349 et = B->succ_end(); it != et; ++it) { 350 if (const CFGBlock *succ = *it) { 351 generateNode(BlockEdge(B, succ, Pred->getLocationContext()), 352 Pred->State, Pred); 353 } 354 } 355 return; 356 357 case Stmt::DoStmtClass: 358 HandleBranch(cast<DoStmt>(Term)->getCond(), Term, B, Pred); 359 return; 360 361 case Stmt::CXXForRangeStmtClass: 362 HandleBranch(cast<CXXForRangeStmt>(Term)->getCond(), Term, B, Pred); 363 return; 364 365 case Stmt::ForStmtClass: 366 HandleBranch(cast<ForStmt>(Term)->getCond(), Term, B, Pred); 367 return; 368 369 case Stmt::SEHLeaveStmtClass: 370 case Stmt::ContinueStmtClass: 371 case Stmt::BreakStmtClass: 372 case Stmt::GotoStmtClass: 373 break; 374 375 case Stmt::IfStmtClass: 376 HandleBranch(cast<IfStmt>(Term)->getCond(), Term, B, Pred); 377 return; 378 379 case Stmt::IndirectGotoStmtClass: { 380 // Only 1 successor: the indirect goto dispatch block. 381 assert(B->succ_size() == 1); 382 383 IndirectGotoNodeBuilder 384 builder(Pred, B, cast<IndirectGotoStmt>(Term)->getTarget(), 385 *(B->succ_begin()), this); 386 387 ExprEng.processIndirectGoto(builder); 388 return; 389 } 390 391 case Stmt::ObjCForCollectionStmtClass: 392 // In the case of ObjCForCollectionStmt, it appears twice in a CFG: 393 // 394 // (1) inside a basic block, which represents the binding of the 395 // 'element' variable to a value. 396 // (2) in a terminator, which represents the branch. 397 // 398 // For (1), ExprEngine will bind a value (i.e., 0 or 1) indicating 399 // whether or not collection contains any more elements. We cannot 400 // just test to see if the element is nil because a container can 401 // contain nil elements. 402 HandleBranch(Term, Term, B, Pred); 403 return; 404 405 case Stmt::SwitchStmtClass: { 406 SwitchNodeBuilder builder(Pred, B, cast<SwitchStmt>(Term)->getCond(), 407 this); 408 409 ExprEng.processSwitch(builder); 410 return; 411 } 412 413 case Stmt::WhileStmtClass: 414 HandleBranch(cast<WhileStmt>(Term)->getCond(), Term, B, Pred); 415 return; 416 417 case Stmt::GCCAsmStmtClass: 418 assert(cast<GCCAsmStmt>(Term)->isAsmGoto() && "Encountered GCCAsmStmt without labels"); 419 // TODO: Handle jumping to labels 420 return; 421 } 422 } 423 424 if (B->getTerminator().isVirtualBaseBranch()) { 425 HandleVirtualBaseBranch(B, Pred); 426 return; 427 } 428 429 assert(B->succ_size() == 1 && 430 "Blocks with no terminator should have at most 1 successor."); 431 432 generateNode(BlockEdge(B, *(B->succ_begin()), Pred->getLocationContext()), 433 Pred->State, Pred); 434 } 435 436 void CoreEngine::HandleCallEnter(const CallEnter &CE, ExplodedNode *Pred) { 437 NodeBuilderContext BuilderCtx(*this, CE.getEntry(), Pred); 438 ExprEng.processCallEnter(BuilderCtx, CE, Pred); 439 } 440 441 void CoreEngine::HandleBranch(const Stmt *Cond, const Stmt *Term, 442 const CFGBlock * B, ExplodedNode *Pred) { 443 assert(B->succ_size() == 2); 444 NodeBuilderContext Ctx(*this, B, Pred); 445 ExplodedNodeSet Dst; 446 ExprEng.processBranch(Cond, Ctx, Pred, Dst, *(B->succ_begin()), 447 *(B->succ_begin() + 1), 448 getCompletedIterationCount(B, Pred)); 449 // Enqueue the new frontier onto the worklist. 450 enqueue(Dst); 451 } 452 453 void CoreEngine::HandleCleanupTemporaryBranch(const CXXBindTemporaryExpr *BTE, 454 const CFGBlock *B, 455 ExplodedNode *Pred) { 456 assert(B->succ_size() == 2); 457 NodeBuilderContext Ctx(*this, B, Pred); 458 ExplodedNodeSet Dst; 459 ExprEng.processCleanupTemporaryBranch(BTE, Ctx, Pred, Dst, *(B->succ_begin()), 460 *(B->succ_begin() + 1)); 461 // Enqueue the new frontier onto the worklist. 462 enqueue(Dst); 463 } 464 465 void CoreEngine::HandleStaticInit(const DeclStmt *DS, const CFGBlock *B, 466 ExplodedNode *Pred) { 467 assert(B->succ_size() == 2); 468 NodeBuilderContext Ctx(*this, B, Pred); 469 ExplodedNodeSet Dst; 470 ExprEng.processStaticInitializer(DS, Ctx, Pred, Dst, 471 *(B->succ_begin()), *(B->succ_begin()+1)); 472 // Enqueue the new frontier onto the worklist. 473 enqueue(Dst); 474 } 475 476 void CoreEngine::HandlePostStmt(const CFGBlock *B, unsigned StmtIdx, 477 ExplodedNode *Pred) { 478 assert(B); 479 assert(!B->empty()); 480 481 if (StmtIdx == B->size()) 482 HandleBlockExit(B, Pred); 483 else { 484 NodeBuilderContext Ctx(*this, B, Pred); 485 ExprEng.processCFGElement((*B)[StmtIdx], Pred, StmtIdx, &Ctx); 486 } 487 } 488 489 void CoreEngine::HandleVirtualBaseBranch(const CFGBlock *B, 490 ExplodedNode *Pred) { 491 const LocationContext *LCtx = Pred->getLocationContext(); 492 if (const auto *CallerCtor = dyn_cast_or_null<CXXConstructExpr>( 493 LCtx->getStackFrame()->getCallSite())) { 494 switch (CallerCtor->getConstructionKind()) { 495 case CXXConstructionKind::NonVirtualBase: 496 case CXXConstructionKind::VirtualBase: { 497 BlockEdge Loc(B, *B->succ_begin(), LCtx); 498 HandleBlockEdge(Loc, Pred); 499 return; 500 } 501 default: 502 break; 503 } 504 } 505 506 // We either don't see a parent stack frame because we're in the top frame, 507 // or the parent stack frame doesn't initialize our virtual bases. 508 BlockEdge Loc(B, *(B->succ_begin() + 1), LCtx); 509 HandleBlockEdge(Loc, Pred); 510 } 511 512 /// generateNode - Utility method to generate nodes, hook up successors, 513 /// and add nodes to the worklist. 514 void CoreEngine::generateNode(const ProgramPoint &Loc, 515 ProgramStateRef State, 516 ExplodedNode *Pred) { 517 bool IsNew; 518 ExplodedNode *Node = G.getNode(Loc, State, false, &IsNew); 519 520 if (Pred) 521 Node->addPredecessor(Pred, G); // Link 'Node' with its predecessor. 522 else { 523 assert(IsNew); 524 G.addRoot(Node); // 'Node' has no predecessor. Make it a root. 525 } 526 527 // Only add 'Node' to the worklist if it was freshly generated. 528 if (IsNew) WList->enqueue(Node); 529 } 530 531 void CoreEngine::enqueueStmtNode(ExplodedNode *N, 532 const CFGBlock *Block, unsigned Idx) { 533 assert(Block); 534 assert(!N->isSink()); 535 536 // Check if this node entered a callee. 537 if (N->getLocation().getAs<CallEnter>()) { 538 // Still use the index of the CallExpr. It's needed to create the callee 539 // StackFrameContext. 540 WList->enqueue(N, Block, Idx); 541 return; 542 } 543 544 // Do not create extra nodes. Move to the next CFG element. 545 if (N->getLocation().getAs<PostInitializer>() || 546 N->getLocation().getAs<PostImplicitCall>()|| 547 N->getLocation().getAs<LoopExit>()) { 548 WList->enqueue(N, Block, Idx+1); 549 return; 550 } 551 552 if (N->getLocation().getAs<EpsilonPoint>()) { 553 WList->enqueue(N, Block, Idx); 554 return; 555 } 556 557 if ((*Block)[Idx].getKind() == CFGElement::NewAllocator) { 558 WList->enqueue(N, Block, Idx+1); 559 return; 560 } 561 562 // At this point, we know we're processing a normal statement. 563 CFGStmt CS = (*Block)[Idx].castAs<CFGStmt>(); 564 PostStmt Loc(CS.getStmt(), N->getLocationContext()); 565 566 if (Loc == N->getLocation().withTag(nullptr)) { 567 // Note: 'N' should be a fresh node because otherwise it shouldn't be 568 // a member of Deferred. 569 WList->enqueue(N, Block, Idx+1); 570 return; 571 } 572 573 bool IsNew; 574 ExplodedNode *Succ = G.getNode(Loc, N->getState(), false, &IsNew); 575 Succ->addPredecessor(N, G); 576 577 if (IsNew) 578 WList->enqueue(Succ, Block, Idx+1); 579 } 580 581 ExplodedNode *CoreEngine::generateCallExitBeginNode(ExplodedNode *N, 582 const ReturnStmt *RS) { 583 // Create a CallExitBegin node and enqueue it. 584 const auto *LocCtx = cast<StackFrameContext>(N->getLocationContext()); 585 586 // Use the callee location context. 587 CallExitBegin Loc(LocCtx, RS); 588 589 bool isNew; 590 ExplodedNode *Node = G.getNode(Loc, N->getState(), false, &isNew); 591 Node->addPredecessor(N, G); 592 return isNew ? Node : nullptr; 593 } 594 595 std::optional<unsigned> 596 CoreEngine::getCompletedIterationCount(const CFGBlock *B, 597 ExplodedNode *Pred) const { 598 const LocationContext *LC = Pred->getLocationContext(); 599 BlockCounter Counter = WList->getBlockCounter(); 600 unsigned BlockCount = 601 Counter.getNumVisited(LC->getStackFrame(), B->getBlockID()); 602 603 const Stmt *Term = B->getTerminatorStmt(); 604 if (isa<ForStmt, WhileStmt, CXXForRangeStmt>(Term)) { 605 assert(BlockCount >= 1 && 606 "Block count of currently analyzed block must be >= 1"); 607 return BlockCount - 1; 608 } 609 if (isa<DoStmt>(Term)) { 610 // In a do-while loop one iteration happens before the first evaluation of 611 // the loop condition, so we don't subtract one. 612 return BlockCount; 613 } 614 // ObjCForCollectionStmt is skipped intentionally because the current 615 // application of the iteration counts is not relevant for it. 616 return std::nullopt; 617 } 618 619 void CoreEngine::enqueue(ExplodedNodeSet &Set) { 620 for (const auto I : Set) 621 WList->enqueue(I); 622 } 623 624 void CoreEngine::enqueue(ExplodedNodeSet &Set, 625 const CFGBlock *Block, unsigned Idx) { 626 for (const auto I : Set) 627 enqueueStmtNode(I, Block, Idx); 628 } 629 630 void CoreEngine::enqueueEndOfFunction(ExplodedNodeSet &Set, const ReturnStmt *RS) { 631 for (auto *I : Set) { 632 // If we are in an inlined call, generate CallExitBegin node. 633 if (I->getLocationContext()->getParent()) { 634 I = generateCallExitBeginNode(I, RS); 635 if (I) 636 WList->enqueue(I); 637 } else { 638 // TODO: We should run remove dead bindings here. 639 G.addEndOfPath(I); 640 NumPathsExplored++; 641 } 642 } 643 } 644 645 void NodeBuilder::anchor() {} 646 647 ExplodedNode* NodeBuilder::generateNodeImpl(const ProgramPoint &Loc, 648 ProgramStateRef State, 649 ExplodedNode *FromN, 650 bool MarkAsSink) { 651 HasGeneratedNodes = true; 652 bool IsNew; 653 ExplodedNode *N = C.getEngine().G.getNode(Loc, State, MarkAsSink, &IsNew); 654 N->addPredecessor(FromN, C.getEngine().G); 655 Frontier.erase(FromN); 656 657 if (!IsNew) 658 return nullptr; 659 660 if (!MarkAsSink) 661 Frontier.Add(N); 662 663 return N; 664 } 665 666 void NodeBuilderWithSinks::anchor() {} 667 668 StmtNodeBuilder::~StmtNodeBuilder() { 669 if (EnclosingBldr) 670 for (const auto I : Frontier) 671 EnclosingBldr->addNodes(I); 672 } 673 674 void BranchNodeBuilder::anchor() {} 675 676 ExplodedNode *BranchNodeBuilder::generateNode(ProgramStateRef State, 677 bool Branch, 678 ExplodedNode *NodePred) { 679 const CFGBlock *Dst = Branch ? DstT : DstF; 680 681 if (!Dst) 682 return nullptr; 683 684 ProgramPoint Loc = 685 BlockEdge(C.getBlock(), Dst, NodePred->getLocationContext()); 686 ExplodedNode *Succ = generateNodeImpl(Loc, State, NodePred); 687 return Succ; 688 } 689 690 ExplodedNode* 691 IndirectGotoNodeBuilder::generateNode(const iterator &I, 692 ProgramStateRef St, 693 bool IsSink) { 694 bool IsNew; 695 ExplodedNode *Succ = 696 Eng.G.getNode(BlockEdge(Src, I.getBlock(), Pred->getLocationContext()), 697 St, IsSink, &IsNew); 698 Succ->addPredecessor(Pred, Eng.G); 699 700 if (!IsNew) 701 return nullptr; 702 703 if (!IsSink) 704 Eng.WList->enqueue(Succ); 705 706 return Succ; 707 } 708 709 ExplodedNode* 710 SwitchNodeBuilder::generateCaseStmtNode(const iterator &I, 711 ProgramStateRef St) { 712 bool IsNew; 713 ExplodedNode *Succ = 714 Eng.G.getNode(BlockEdge(Src, I.getBlock(), Pred->getLocationContext()), 715 St, false, &IsNew); 716 Succ->addPredecessor(Pred, Eng.G); 717 if (!IsNew) 718 return nullptr; 719 720 Eng.WList->enqueue(Succ); 721 return Succ; 722 } 723 724 ExplodedNode* 725 SwitchNodeBuilder::generateDefaultCaseNode(ProgramStateRef St, 726 bool IsSink) { 727 // Get the block for the default case. 728 assert(Src->succ_rbegin() != Src->succ_rend()); 729 CFGBlock *DefaultBlock = *Src->succ_rbegin(); 730 731 // Basic correctness check for default blocks that are unreachable and not 732 // caught by earlier stages. 733 if (!DefaultBlock) 734 return nullptr; 735 736 bool IsNew; 737 ExplodedNode *Succ = 738 Eng.G.getNode(BlockEdge(Src, DefaultBlock, Pred->getLocationContext()), 739 St, IsSink, &IsNew); 740 Succ->addPredecessor(Pred, Eng.G); 741 742 if (!IsNew) 743 return nullptr; 744 745 if (!IsSink) 746 Eng.WList->enqueue(Succ); 747 748 return Succ; 749 } 750