1 //===- ExprEngineCXX.cpp - ExprEngine support for C++ -----------*- C++ -*-===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file defines the C++ expression evaluation engine. 10 // 11 //===----------------------------------------------------------------------===// 12 13 #include "clang/AST/DeclCXX.h" 14 #include "clang/AST/ParentMap.h" 15 #include "clang/AST/StmtCXX.h" 16 #include "clang/Analysis/ConstructionContext.h" 17 #include "clang/Basic/PrettyStackTrace.h" 18 #include "clang/StaticAnalyzer/Core/CheckerManager.h" 19 #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h" 20 #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h" 21 #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h" 22 #include "clang/StaticAnalyzer/Core/PathSensitive/SVals.h" 23 #include <optional> 24 25 using namespace clang; 26 using namespace ento; 27 28 void ExprEngine::CreateCXXTemporaryObject(const MaterializeTemporaryExpr *ME, 29 ExplodedNode *Pred, 30 ExplodedNodeSet &Dst) { 31 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 32 const Expr *tempExpr = ME->getSubExpr()->IgnoreParens(); 33 ProgramStateRef state = Pred->getState(); 34 const LocationContext *LCtx = Pred->getLocationContext(); 35 36 state = createTemporaryRegionIfNeeded(state, LCtx, tempExpr, ME); 37 Bldr.generateNode(ME, Pred, state); 38 } 39 40 // FIXME: This is the sort of code that should eventually live in a Core 41 // checker rather than as a special case in ExprEngine. 42 void ExprEngine::performTrivialCopy(NodeBuilder &Bldr, ExplodedNode *Pred, 43 const CallEvent &Call) { 44 SVal ThisVal; 45 bool AlwaysReturnsLValue; 46 const CXXRecordDecl *ThisRD = nullptr; 47 if (const CXXConstructorCall *Ctor = dyn_cast<CXXConstructorCall>(&Call)) { 48 assert(Ctor->getDecl()->isTrivial()); 49 assert(Ctor->getDecl()->isCopyOrMoveConstructor()); 50 ThisVal = Ctor->getCXXThisVal(); 51 ThisRD = Ctor->getDecl()->getParent(); 52 AlwaysReturnsLValue = false; 53 } else { 54 assert(cast<CXXMethodDecl>(Call.getDecl())->isTrivial()); 55 assert(cast<CXXMethodDecl>(Call.getDecl())->getOverloadedOperator() == 56 OO_Equal); 57 ThisVal = cast<CXXInstanceCall>(Call).getCXXThisVal(); 58 ThisRD = cast<CXXMethodDecl>(Call.getDecl())->getParent(); 59 AlwaysReturnsLValue = true; 60 } 61 62 assert(ThisRD); 63 if (ThisRD->isEmpty()) { 64 // Do nothing for empty classes. Otherwise it'd retrieve an UnknownVal 65 // and bind it and RegionStore would think that the actual value 66 // in this region at this offset is unknown. 67 return; 68 } 69 70 const LocationContext *LCtx = Pred->getLocationContext(); 71 72 ExplodedNodeSet Dst; 73 Bldr.takeNodes(Pred); 74 75 SVal V = Call.getArgSVal(0); 76 77 // If the value being copied is not unknown, load from its location to get 78 // an aggregate rvalue. 79 if (std::optional<Loc> L = V.getAs<Loc>()) 80 V = Pred->getState()->getSVal(*L); 81 else 82 assert(V.isUnknownOrUndef()); 83 84 const Expr *CallExpr = Call.getOriginExpr(); 85 evalBind(Dst, CallExpr, Pred, ThisVal, V, true); 86 87 PostStmt PS(CallExpr, LCtx); 88 for (ExplodedNodeSet::iterator I = Dst.begin(), E = Dst.end(); 89 I != E; ++I) { 90 ProgramStateRef State = (*I)->getState(); 91 if (AlwaysReturnsLValue) 92 State = State->BindExpr(CallExpr, LCtx, ThisVal); 93 else 94 State = bindReturnValue(Call, LCtx, State); 95 Bldr.generateNode(PS, State, *I); 96 } 97 } 98 99 SVal ExprEngine::makeElementRegion(ProgramStateRef State, SVal LValue, 100 QualType &Ty, bool &IsArray, unsigned Idx) { 101 SValBuilder &SVB = State->getStateManager().getSValBuilder(); 102 ASTContext &Ctx = SVB.getContext(); 103 104 if (const ArrayType *AT = Ctx.getAsArrayType(Ty)) { 105 while (AT) { 106 Ty = AT->getElementType(); 107 AT = dyn_cast<ArrayType>(AT->getElementType()); 108 } 109 LValue = State->getLValue(Ty, SVB.makeArrayIndex(Idx), LValue); 110 IsArray = true; 111 } 112 113 return LValue; 114 } 115 116 // In case when the prvalue is returned from the function (kind is one of 117 // SimpleReturnedValueKind, CXX17ElidedCopyReturnedValueKind), then 118 // it's materialization happens in context of the caller. 119 // We pass BldrCtx explicitly, as currBldrCtx always refers to callee's context. 120 SVal ExprEngine::computeObjectUnderConstruction( 121 const Expr *E, ProgramStateRef State, const NodeBuilderContext *BldrCtx, 122 const LocationContext *LCtx, const ConstructionContext *CC, 123 EvalCallOptions &CallOpts, unsigned Idx) { 124 125 SValBuilder &SVB = getSValBuilder(); 126 MemRegionManager &MRMgr = SVB.getRegionManager(); 127 ASTContext &ACtx = SVB.getContext(); 128 129 // Compute the target region by exploring the construction context. 130 if (CC) { 131 switch (CC->getKind()) { 132 case ConstructionContext::CXX17ElidedCopyVariableKind: 133 case ConstructionContext::SimpleVariableKind: { 134 const auto *DSCC = cast<VariableConstructionContext>(CC); 135 const auto *DS = DSCC->getDeclStmt(); 136 const auto *Var = cast<VarDecl>(DS->getSingleDecl()); 137 QualType Ty = Var->getType(); 138 return makeElementRegion(State, State->getLValue(Var, LCtx), Ty, 139 CallOpts.IsArrayCtorOrDtor, Idx); 140 } 141 case ConstructionContext::CXX17ElidedCopyConstructorInitializerKind: 142 case ConstructionContext::SimpleConstructorInitializerKind: { 143 const auto *ICC = cast<ConstructorInitializerConstructionContext>(CC); 144 const auto *Init = ICC->getCXXCtorInitializer(); 145 const CXXMethodDecl *CurCtor = cast<CXXMethodDecl>(LCtx->getDecl()); 146 Loc ThisPtr = SVB.getCXXThis(CurCtor, LCtx->getStackFrame()); 147 SVal ThisVal = State->getSVal(ThisPtr); 148 if (Init->isBaseInitializer()) { 149 const auto *ThisReg = cast<SubRegion>(ThisVal.getAsRegion()); 150 const CXXRecordDecl *BaseClass = 151 Init->getBaseClass()->getAsCXXRecordDecl(); 152 const auto *BaseReg = 153 MRMgr.getCXXBaseObjectRegion(BaseClass, ThisReg, 154 Init->isBaseVirtual()); 155 return SVB.makeLoc(BaseReg); 156 } 157 if (Init->isDelegatingInitializer()) 158 return ThisVal; 159 160 const ValueDecl *Field; 161 SVal FieldVal; 162 if (Init->isIndirectMemberInitializer()) { 163 Field = Init->getIndirectMember(); 164 FieldVal = State->getLValue(Init->getIndirectMember(), ThisVal); 165 } else { 166 Field = Init->getMember(); 167 FieldVal = State->getLValue(Init->getMember(), ThisVal); 168 } 169 170 QualType Ty = Field->getType(); 171 return makeElementRegion(State, FieldVal, Ty, CallOpts.IsArrayCtorOrDtor, 172 Idx); 173 } 174 case ConstructionContext::NewAllocatedObjectKind: { 175 if (AMgr.getAnalyzerOptions().MayInlineCXXAllocator) { 176 const auto *NECC = cast<NewAllocatedObjectConstructionContext>(CC); 177 const auto *NE = NECC->getCXXNewExpr(); 178 SVal V = *getObjectUnderConstruction(State, NE, LCtx); 179 if (const SubRegion *MR = 180 dyn_cast_or_null<SubRegion>(V.getAsRegion())) { 181 if (NE->isArray()) { 182 CallOpts.IsArrayCtorOrDtor = true; 183 184 auto Ty = NE->getType()->getPointeeType(); 185 while (const auto *AT = getContext().getAsArrayType(Ty)) 186 Ty = AT->getElementType(); 187 188 auto R = MRMgr.getElementRegion(Ty, svalBuilder.makeArrayIndex(Idx), 189 MR, SVB.getContext()); 190 191 return loc::MemRegionVal(R); 192 } 193 return V; 194 } 195 // TODO: Detect when the allocator returns a null pointer. 196 // Constructor shall not be called in this case. 197 } 198 break; 199 } 200 case ConstructionContext::SimpleReturnedValueKind: 201 case ConstructionContext::CXX17ElidedCopyReturnedValueKind: { 202 // The temporary is to be managed by the parent stack frame. 203 // So build it in the parent stack frame if we're not in the 204 // top frame of the analysis. 205 const StackFrameContext *SFC = LCtx->getStackFrame(); 206 if (const LocationContext *CallerLCtx = SFC->getParent()) { 207 auto RTC = (*SFC->getCallSiteBlock())[SFC->getIndex()] 208 .getAs<CFGCXXRecordTypedCall>(); 209 if (!RTC) { 210 // We were unable to find the correct construction context for the 211 // call in the parent stack frame. This is equivalent to not being 212 // able to find construction context at all. 213 break; 214 } 215 if (isa<BlockInvocationContext>(CallerLCtx)) { 216 // Unwrap block invocation contexts. They're mostly part of 217 // the current stack frame. 218 CallerLCtx = CallerLCtx->getParent(); 219 assert(!isa<BlockInvocationContext>(CallerLCtx)); 220 } 221 222 NodeBuilderContext CallerBldrCtx(getCoreEngine(), 223 SFC->getCallSiteBlock(), CallerLCtx); 224 return computeObjectUnderConstruction( 225 cast<Expr>(SFC->getCallSite()), State, &CallerBldrCtx, CallerLCtx, 226 RTC->getConstructionContext(), CallOpts); 227 } else { 228 // We are on the top frame of the analysis. We do not know where is the 229 // object returned to. Conjure a symbolic region for the return value. 230 // TODO: We probably need a new MemRegion kind to represent the storage 231 // of that SymbolicRegion, so that we cound produce a fancy symbol 232 // instead of an anonymous conjured symbol. 233 // TODO: Do we need to track the region to avoid having it dead 234 // too early? It does die too early, at least in C++17, but because 235 // putting anything into a SymbolicRegion causes an immediate escape, 236 // it doesn't cause any leak false positives. 237 const auto *RCC = cast<ReturnedValueConstructionContext>(CC); 238 // Make sure that this doesn't coincide with any other symbol 239 // conjured for the returned expression. 240 static const int TopLevelSymRegionTag = 0; 241 const Expr *RetE = RCC->getReturnStmt()->getRetValue(); 242 assert(RetE && "Void returns should not have a construction context"); 243 QualType ReturnTy = RetE->getType(); 244 QualType RegionTy = ACtx.getPointerType(ReturnTy); 245 return SVB.conjureSymbolVal(&TopLevelSymRegionTag, RetE, SFC, RegionTy, 246 currBldrCtx->blockCount()); 247 } 248 llvm_unreachable("Unhandled return value construction context!"); 249 } 250 case ConstructionContext::ElidedTemporaryObjectKind: { 251 assert(AMgr.getAnalyzerOptions().ShouldElideConstructors); 252 const auto *TCC = cast<ElidedTemporaryObjectConstructionContext>(CC); 253 254 // Support pre-C++17 copy elision. We'll have the elidable copy 255 // constructor in the AST and in the CFG, but we'll skip it 256 // and construct directly into the final object. This call 257 // also sets the CallOpts flags for us. 258 // If the elided copy/move constructor is not supported, there's still 259 // benefit in trying to model the non-elided constructor. 260 // Stash our state before trying to elide, as it'll get overwritten. 261 ProgramStateRef PreElideState = State; 262 EvalCallOptions PreElideCallOpts = CallOpts; 263 264 SVal V = computeObjectUnderConstruction( 265 TCC->getConstructorAfterElision(), State, BldrCtx, LCtx, 266 TCC->getConstructionContextAfterElision(), CallOpts); 267 268 // FIXME: This definition of "copy elision has not failed" is unreliable. 269 // It doesn't indicate that the constructor will actually be inlined 270 // later; this is still up to evalCall() to decide. 271 if (!CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion) 272 return V; 273 274 // Copy elision failed. Revert the changes and proceed as if we have 275 // a simple temporary. 276 CallOpts = PreElideCallOpts; 277 CallOpts.IsElidableCtorThatHasNotBeenElided = true; 278 [[fallthrough]]; 279 } 280 case ConstructionContext::SimpleTemporaryObjectKind: { 281 const auto *TCC = cast<TemporaryObjectConstructionContext>(CC); 282 const MaterializeTemporaryExpr *MTE = TCC->getMaterializedTemporaryExpr(); 283 284 CallOpts.IsTemporaryCtorOrDtor = true; 285 if (MTE) { 286 if (const ValueDecl *VD = MTE->getExtendingDecl()) { 287 assert(MTE->getStorageDuration() != SD_FullExpression); 288 if (!VD->getType()->isReferenceType()) { 289 // We're lifetime-extended by a surrounding aggregate. 290 // Automatic destructors aren't quite working in this case 291 // on the CFG side. We should warn the caller about that. 292 // FIXME: Is there a better way to retrieve this information from 293 // the MaterializeTemporaryExpr? 294 CallOpts.IsTemporaryLifetimeExtendedViaAggregate = true; 295 } 296 } 297 298 if (MTE->getStorageDuration() == SD_Static || 299 MTE->getStorageDuration() == SD_Thread) 300 return loc::MemRegionVal(MRMgr.getCXXStaticTempObjectRegion(E)); 301 } 302 303 return loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(E, LCtx)); 304 } 305 case ConstructionContext::LambdaCaptureKind: { 306 CallOpts.IsTemporaryCtorOrDtor = true; 307 308 const auto *LCC = cast<LambdaCaptureConstructionContext>(CC); 309 310 SVal Base = loc::MemRegionVal( 311 MRMgr.getCXXTempObjectRegion(LCC->getInitializer(), LCtx)); 312 313 const auto *CE = dyn_cast_or_null<CXXConstructExpr>(E); 314 if (getIndexOfElementToConstruct(State, CE, LCtx)) { 315 CallOpts.IsArrayCtorOrDtor = true; 316 Base = State->getLValue(E->getType(), svalBuilder.makeArrayIndex(Idx), 317 Base); 318 } 319 320 return Base; 321 } 322 case ConstructionContext::ArgumentKind: { 323 // Arguments are technically temporaries. 324 CallOpts.IsTemporaryCtorOrDtor = true; 325 326 const auto *ACC = cast<ArgumentConstructionContext>(CC); 327 const Expr *E = ACC->getCallLikeExpr(); 328 unsigned Idx = ACC->getIndex(); 329 330 CallEventManager &CEMgr = getStateManager().getCallEventManager(); 331 auto getArgLoc = [&](CallEventRef<> Caller) -> std::optional<SVal> { 332 const LocationContext *FutureSFC = 333 Caller->getCalleeStackFrame(BldrCtx->blockCount()); 334 // Return early if we are unable to reliably foresee 335 // the future stack frame. 336 if (!FutureSFC) 337 return std::nullopt; 338 339 // This should be equivalent to Caller->getDecl() for now, but 340 // FutureSFC->getDecl() is likely to support better stuff (like 341 // virtual functions) earlier. 342 const Decl *CalleeD = FutureSFC->getDecl(); 343 344 // FIXME: Support for variadic arguments is not implemented here yet. 345 if (CallEvent::isVariadic(CalleeD)) 346 return std::nullopt; 347 348 // Operator arguments do not correspond to operator parameters 349 // because this-argument is implemented as a normal argument in 350 // operator call expressions but not in operator declarations. 351 const TypedValueRegion *TVR = Caller->getParameterLocation( 352 *Caller->getAdjustedParameterIndex(Idx), BldrCtx->blockCount()); 353 if (!TVR) 354 return std::nullopt; 355 356 return loc::MemRegionVal(TVR); 357 }; 358 359 if (const auto *CE = dyn_cast<CallExpr>(E)) { 360 CallEventRef<> Caller = CEMgr.getSimpleCall(CE, State, LCtx); 361 if (std::optional<SVal> V = getArgLoc(Caller)) 362 return *V; 363 else 364 break; 365 } else if (const auto *CCE = dyn_cast<CXXConstructExpr>(E)) { 366 // Don't bother figuring out the target region for the future 367 // constructor because we won't need it. 368 CallEventRef<> Caller = 369 CEMgr.getCXXConstructorCall(CCE, /*Target=*/nullptr, State, LCtx); 370 if (std::optional<SVal> V = getArgLoc(Caller)) 371 return *V; 372 else 373 break; 374 } else if (const auto *ME = dyn_cast<ObjCMessageExpr>(E)) { 375 CallEventRef<> Caller = CEMgr.getObjCMethodCall(ME, State, LCtx); 376 if (std::optional<SVal> V = getArgLoc(Caller)) 377 return *V; 378 else 379 break; 380 } 381 } 382 } // switch (CC->getKind()) 383 } 384 385 // If we couldn't find an existing region to construct into, assume we're 386 // constructing a temporary. Notify the caller of our failure. 387 CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true; 388 return loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(E, LCtx)); 389 } 390 391 ProgramStateRef ExprEngine::updateObjectsUnderConstruction( 392 SVal V, const Expr *E, ProgramStateRef State, const LocationContext *LCtx, 393 const ConstructionContext *CC, const EvalCallOptions &CallOpts) { 394 if (CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion) { 395 // Sounds like we failed to find the target region and therefore 396 // copy elision failed. There's nothing we can do about it here. 397 return State; 398 } 399 400 // See if we're constructing an existing region by looking at the 401 // current construction context. 402 assert(CC && "Computed target region without construction context?"); 403 switch (CC->getKind()) { 404 case ConstructionContext::CXX17ElidedCopyVariableKind: 405 case ConstructionContext::SimpleVariableKind: { 406 const auto *DSCC = cast<VariableConstructionContext>(CC); 407 return addObjectUnderConstruction(State, DSCC->getDeclStmt(), LCtx, V); 408 } 409 case ConstructionContext::CXX17ElidedCopyConstructorInitializerKind: 410 case ConstructionContext::SimpleConstructorInitializerKind: { 411 const auto *ICC = cast<ConstructorInitializerConstructionContext>(CC); 412 const auto *Init = ICC->getCXXCtorInitializer(); 413 // Base and delegating initializers handled above 414 assert(Init->isAnyMemberInitializer() && 415 "Base and delegating initializers should have been handled by" 416 "computeObjectUnderConstruction()"); 417 return addObjectUnderConstruction(State, Init, LCtx, V); 418 } 419 case ConstructionContext::NewAllocatedObjectKind: { 420 return State; 421 } 422 case ConstructionContext::SimpleReturnedValueKind: 423 case ConstructionContext::CXX17ElidedCopyReturnedValueKind: { 424 const StackFrameContext *SFC = LCtx->getStackFrame(); 425 const LocationContext *CallerLCtx = SFC->getParent(); 426 if (!CallerLCtx) { 427 // No extra work is necessary in top frame. 428 return State; 429 } 430 431 auto RTC = (*SFC->getCallSiteBlock())[SFC->getIndex()] 432 .getAs<CFGCXXRecordTypedCall>(); 433 assert(RTC && "Could not have had a target region without it"); 434 if (isa<BlockInvocationContext>(CallerLCtx)) { 435 // Unwrap block invocation contexts. They're mostly part of 436 // the current stack frame. 437 CallerLCtx = CallerLCtx->getParent(); 438 assert(!isa<BlockInvocationContext>(CallerLCtx)); 439 } 440 441 return updateObjectsUnderConstruction(V, 442 cast<Expr>(SFC->getCallSite()), State, CallerLCtx, 443 RTC->getConstructionContext(), CallOpts); 444 } 445 case ConstructionContext::ElidedTemporaryObjectKind: { 446 assert(AMgr.getAnalyzerOptions().ShouldElideConstructors); 447 if (!CallOpts.IsElidableCtorThatHasNotBeenElided) { 448 const auto *TCC = cast<ElidedTemporaryObjectConstructionContext>(CC); 449 State = updateObjectsUnderConstruction( 450 V, TCC->getConstructorAfterElision(), State, LCtx, 451 TCC->getConstructionContextAfterElision(), CallOpts); 452 453 // Remember that we've elided the constructor. 454 State = addObjectUnderConstruction( 455 State, TCC->getConstructorAfterElision(), LCtx, V); 456 457 // Remember that we've elided the destructor. 458 if (const auto *BTE = TCC->getCXXBindTemporaryExpr()) 459 State = elideDestructor(State, BTE, LCtx); 460 461 // Instead of materialization, shamelessly return 462 // the final object destination. 463 if (const auto *MTE = TCC->getMaterializedTemporaryExpr()) 464 State = addObjectUnderConstruction(State, MTE, LCtx, V); 465 466 return State; 467 } 468 // If we decided not to elide the constructor, proceed as if 469 // it's a simple temporary. 470 [[fallthrough]]; 471 } 472 case ConstructionContext::SimpleTemporaryObjectKind: { 473 const auto *TCC = cast<TemporaryObjectConstructionContext>(CC); 474 if (const auto *BTE = TCC->getCXXBindTemporaryExpr()) 475 State = addObjectUnderConstruction(State, BTE, LCtx, V); 476 477 if (const auto *MTE = TCC->getMaterializedTemporaryExpr()) 478 State = addObjectUnderConstruction(State, MTE, LCtx, V); 479 480 return State; 481 } 482 case ConstructionContext::LambdaCaptureKind: { 483 const auto *LCC = cast<LambdaCaptureConstructionContext>(CC); 484 485 // If we capture and array, we want to store the super region, not a 486 // sub-region. 487 if (const auto *EL = dyn_cast_or_null<ElementRegion>(V.getAsRegion())) 488 V = loc::MemRegionVal(EL->getSuperRegion()); 489 490 return addObjectUnderConstruction( 491 State, {LCC->getLambdaExpr(), LCC->getIndex()}, LCtx, V); 492 } 493 case ConstructionContext::ArgumentKind: { 494 const auto *ACC = cast<ArgumentConstructionContext>(CC); 495 if (const auto *BTE = ACC->getCXXBindTemporaryExpr()) 496 State = addObjectUnderConstruction(State, BTE, LCtx, V); 497 498 return addObjectUnderConstruction( 499 State, {ACC->getCallLikeExpr(), ACC->getIndex()}, LCtx, V); 500 } 501 } 502 llvm_unreachable("Unhandled construction context!"); 503 } 504 505 static ProgramStateRef 506 bindRequiredArrayElementToEnvironment(ProgramStateRef State, 507 const ArrayInitLoopExpr *AILE, 508 const LocationContext *LCtx, SVal Idx) { 509 // The ctor in this case is guaranteed to be a copy ctor, otherwise we hit a 510 // compile time error. 511 // 512 // -ArrayInitLoopExpr <-- we're here 513 // |-OpaqueValueExpr 514 // | `-DeclRefExpr <-- match this 515 // `-CXXConstructExpr 516 // `-ImplicitCastExpr 517 // `-ArraySubscriptExpr 518 // |-ImplicitCastExpr 519 // | `-OpaqueValueExpr 520 // | `-DeclRefExpr 521 // `-ArrayInitIndexExpr 522 // 523 // The resulting expression might look like the one below in an implicit 524 // copy/move ctor. 525 // 526 // ArrayInitLoopExpr <-- we're here 527 // |-OpaqueValueExpr 528 // | `-MemberExpr <-- match this 529 // | (`-CXXStaticCastExpr) <-- move ctor only 530 // | `-DeclRefExpr 531 // `-CXXConstructExpr 532 // `-ArraySubscriptExpr 533 // |-ImplicitCastExpr 534 // | `-OpaqueValueExpr 535 // | `-MemberExpr 536 // | `-DeclRefExpr 537 // `-ArrayInitIndexExpr 538 // 539 // The resulting expression for a multidimensional array. 540 // ArrayInitLoopExpr <-- we're here 541 // |-OpaqueValueExpr 542 // | `-DeclRefExpr <-- match this 543 // `-ArrayInitLoopExpr 544 // |-OpaqueValueExpr 545 // | `-ArraySubscriptExpr 546 // | |-ImplicitCastExpr 547 // | | `-OpaqueValueExpr 548 // | | `-DeclRefExpr 549 // | `-ArrayInitIndexExpr 550 // `-CXXConstructExpr <-- extract this 551 // ` ... 552 553 const auto *OVESrc = AILE->getCommonExpr()->getSourceExpr(); 554 555 // HACK: There is no way we can put the index of the array element into the 556 // CFG unless we unroll the loop, so we manually select and bind the required 557 // parameter to the environment. 558 const auto *CE = 559 cast<CXXConstructExpr>(extractElementInitializerFromNestedAILE(AILE)); 560 561 SVal Base = UnknownVal(); 562 if (const auto *ME = dyn_cast<MemberExpr>(OVESrc)) 563 Base = State->getSVal(ME, LCtx); 564 else if (const auto *DRE = dyn_cast<DeclRefExpr>(OVESrc)) 565 Base = State->getLValue(cast<VarDecl>(DRE->getDecl()), LCtx); 566 else 567 llvm_unreachable("ArrayInitLoopExpr contains unexpected source expression"); 568 569 SVal NthElem = State->getLValue(CE->getType(), Idx, Base); 570 571 return State->BindExpr(CE->getArg(0), LCtx, NthElem); 572 } 573 574 void ExprEngine::handleConstructor(const Expr *E, 575 ExplodedNode *Pred, 576 ExplodedNodeSet &destNodes) { 577 const auto *CE = dyn_cast<CXXConstructExpr>(E); 578 const auto *CIE = dyn_cast<CXXInheritedCtorInitExpr>(E); 579 assert(CE || CIE); 580 581 const LocationContext *LCtx = Pred->getLocationContext(); 582 ProgramStateRef State = Pred->getState(); 583 584 SVal Target = UnknownVal(); 585 586 if (CE) { 587 if (std::optional<SVal> ElidedTarget = 588 getObjectUnderConstruction(State, CE, LCtx)) { 589 // We've previously modeled an elidable constructor by pretending that 590 // it in fact constructs into the correct target. This constructor can 591 // therefore be skipped. 592 Target = *ElidedTarget; 593 StmtNodeBuilder Bldr(Pred, destNodes, *currBldrCtx); 594 State = finishObjectConstruction(State, CE, LCtx); 595 if (auto L = Target.getAs<Loc>()) 596 State = State->BindExpr(CE, LCtx, State->getSVal(*L, CE->getType())); 597 Bldr.generateNode(CE, Pred, State); 598 return; 599 } 600 } 601 602 EvalCallOptions CallOpts; 603 auto C = getCurrentCFGElement().getAs<CFGConstructor>(); 604 assert(C || getCurrentCFGElement().getAs<CFGStmt>()); 605 const ConstructionContext *CC = C ? C->getConstructionContext() : nullptr; 606 607 const CXXConstructExpr::ConstructionKind CK = 608 CE ? CE->getConstructionKind() : CIE->getConstructionKind(); 609 switch (CK) { 610 case CXXConstructExpr::CK_Complete: { 611 // Inherited constructors are always base class constructors. 612 assert(CE && !CIE && "A complete constructor is inherited?!"); 613 614 // If the ctor is part of an ArrayInitLoopExpr, we want to handle it 615 // differently. 616 auto *AILE = CC ? CC->getArrayInitLoop() : nullptr; 617 618 unsigned Idx = 0; 619 if (CE->getType()->isArrayType() || AILE) { 620 621 auto isZeroSizeArray = [&] { 622 uint64_t Size = 1; 623 624 if (const auto *CAT = dyn_cast<ConstantArrayType>(CE->getType())) 625 Size = getContext().getConstantArrayElementCount(CAT); 626 else if (AILE) 627 Size = getContext().getArrayInitLoopExprElementCount(AILE); 628 629 return Size == 0; 630 }; 631 632 // No element construction will happen in a 0 size array. 633 if (isZeroSizeArray()) { 634 StmtNodeBuilder Bldr(Pred, destNodes, *currBldrCtx); 635 static SimpleProgramPointTag T{"ExprEngine", 636 "Skipping 0 size array construction"}; 637 Bldr.generateNode(CE, Pred, State, &T); 638 return; 639 } 640 641 Idx = getIndexOfElementToConstruct(State, CE, LCtx).value_or(0u); 642 State = setIndexOfElementToConstruct(State, CE, LCtx, Idx + 1); 643 } 644 645 if (AILE) { 646 // Only set this once even though we loop through it multiple times. 647 if (!getPendingInitLoop(State, CE, LCtx)) 648 State = setPendingInitLoop( 649 State, CE, LCtx, 650 getContext().getArrayInitLoopExprElementCount(AILE)); 651 652 State = bindRequiredArrayElementToEnvironment( 653 State, AILE, LCtx, svalBuilder.makeArrayIndex(Idx)); 654 } 655 656 // The target region is found from construction context. 657 std::tie(State, Target) = handleConstructionContext( 658 CE, State, currBldrCtx, LCtx, CC, CallOpts, Idx); 659 break; 660 } 661 case CXXConstructExpr::CK_VirtualBase: { 662 // Make sure we are not calling virtual base class initializers twice. 663 // Only the most-derived object should initialize virtual base classes. 664 const auto *OuterCtor = dyn_cast_or_null<CXXConstructExpr>( 665 LCtx->getStackFrame()->getCallSite()); 666 assert( 667 (!OuterCtor || 668 OuterCtor->getConstructionKind() == CXXConstructExpr::CK_Complete || 669 OuterCtor->getConstructionKind() == CXXConstructExpr::CK_Delegating) && 670 ("This virtual base should have already been initialized by " 671 "the most derived class!")); 672 (void)OuterCtor; 673 [[fallthrough]]; 674 } 675 case CXXConstructExpr::CK_NonVirtualBase: 676 // In C++17, classes with non-virtual bases may be aggregates, so they would 677 // be initialized as aggregates without a constructor call, so we may have 678 // a base class constructed directly into an initializer list without 679 // having the derived-class constructor call on the previous stack frame. 680 // Initializer lists may be nested into more initializer lists that 681 // correspond to surrounding aggregate initializations. 682 // FIXME: For now this code essentially bails out. We need to find the 683 // correct target region and set it. 684 // FIXME: Instead of relying on the ParentMap, we should have the 685 // trigger-statement (InitListExpr in this case) passed down from CFG or 686 // otherwise always available during construction. 687 if (isa_and_nonnull<InitListExpr>(LCtx->getParentMap().getParent(E))) { 688 MemRegionManager &MRMgr = getSValBuilder().getRegionManager(); 689 Target = loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(E, LCtx)); 690 CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true; 691 break; 692 } 693 [[fallthrough]]; 694 case CXXConstructExpr::CK_Delegating: { 695 const CXXMethodDecl *CurCtor = cast<CXXMethodDecl>(LCtx->getDecl()); 696 Loc ThisPtr = getSValBuilder().getCXXThis(CurCtor, 697 LCtx->getStackFrame()); 698 SVal ThisVal = State->getSVal(ThisPtr); 699 700 if (CK == CXXConstructExpr::CK_Delegating) { 701 Target = ThisVal; 702 } else { 703 // Cast to the base type. 704 bool IsVirtual = (CK == CXXConstructExpr::CK_VirtualBase); 705 SVal BaseVal = 706 getStoreManager().evalDerivedToBase(ThisVal, E->getType(), IsVirtual); 707 Target = BaseVal; 708 } 709 break; 710 } 711 } 712 713 if (State != Pred->getState()) { 714 static SimpleProgramPointTag T("ExprEngine", 715 "Prepare for object construction"); 716 ExplodedNodeSet DstPrepare; 717 StmtNodeBuilder BldrPrepare(Pred, DstPrepare, *currBldrCtx); 718 BldrPrepare.generateNode(E, Pred, State, &T, ProgramPoint::PreStmtKind); 719 assert(DstPrepare.size() <= 1); 720 if (DstPrepare.size() == 0) 721 return; 722 Pred = *BldrPrepare.begin(); 723 } 724 725 const MemRegion *TargetRegion = Target.getAsRegion(); 726 CallEventManager &CEMgr = getStateManager().getCallEventManager(); 727 CallEventRef<> Call = 728 CIE ? (CallEventRef<>)CEMgr.getCXXInheritedConstructorCall( 729 CIE, TargetRegion, State, LCtx) 730 : (CallEventRef<>)CEMgr.getCXXConstructorCall( 731 CE, TargetRegion, State, LCtx); 732 733 ExplodedNodeSet DstPreVisit; 734 getCheckerManager().runCheckersForPreStmt(DstPreVisit, Pred, E, *this); 735 736 ExplodedNodeSet PreInitialized; 737 if (CE) { 738 // FIXME: Is it possible and/or useful to do this before PreStmt? 739 StmtNodeBuilder Bldr(DstPreVisit, PreInitialized, *currBldrCtx); 740 for (ExplodedNodeSet::iterator I = DstPreVisit.begin(), 741 E = DstPreVisit.end(); 742 I != E; ++I) { 743 ProgramStateRef State = (*I)->getState(); 744 if (CE->requiresZeroInitialization()) { 745 // FIXME: Once we properly handle constructors in new-expressions, we'll 746 // need to invalidate the region before setting a default value, to make 747 // sure there aren't any lingering bindings around. This probably needs 748 // to happen regardless of whether or not the object is zero-initialized 749 // to handle random fields of a placement-initialized object picking up 750 // old bindings. We might only want to do it when we need to, though. 751 // FIXME: This isn't actually correct for arrays -- we need to zero- 752 // initialize the entire array, not just the first element -- but our 753 // handling of arrays everywhere else is weak as well, so this shouldn't 754 // actually make things worse. Placement new makes this tricky as well, 755 // since it's then possible to be initializing one part of a multi- 756 // dimensional array. 757 State = State->bindDefaultZero(Target, LCtx); 758 } 759 760 Bldr.generateNode(CE, *I, State, /*tag=*/nullptr, 761 ProgramPoint::PreStmtKind); 762 } 763 } else { 764 PreInitialized = DstPreVisit; 765 } 766 767 ExplodedNodeSet DstPreCall; 768 getCheckerManager().runCheckersForPreCall(DstPreCall, PreInitialized, 769 *Call, *this); 770 771 ExplodedNodeSet DstEvaluated; 772 773 if (CE && CE->getConstructor()->isTrivial() && 774 CE->getConstructor()->isCopyOrMoveConstructor() && 775 !CallOpts.IsArrayCtorOrDtor) { 776 StmtNodeBuilder Bldr(DstPreCall, DstEvaluated, *currBldrCtx); 777 // FIXME: Handle other kinds of trivial constructors as well. 778 for (ExplodedNodeSet::iterator I = DstPreCall.begin(), E = DstPreCall.end(); 779 I != E; ++I) 780 performTrivialCopy(Bldr, *I, *Call); 781 782 } else { 783 for (ExplodedNodeSet::iterator I = DstPreCall.begin(), E = DstPreCall.end(); 784 I != E; ++I) 785 getCheckerManager().runCheckersForEvalCall(DstEvaluated, *I, *Call, *this, 786 CallOpts); 787 } 788 789 // If the CFG was constructed without elements for temporary destructors 790 // and the just-called constructor created a temporary object then 791 // stop exploration if the temporary object has a noreturn constructor. 792 // This can lose coverage because the destructor, if it were present 793 // in the CFG, would be called at the end of the full expression or 794 // later (for life-time extended temporaries) -- but avoids infeasible 795 // paths when no-return temporary destructors are used for assertions. 796 ExplodedNodeSet DstEvaluatedPostProcessed; 797 StmtNodeBuilder Bldr(DstEvaluated, DstEvaluatedPostProcessed, *currBldrCtx); 798 const AnalysisDeclContext *ADC = LCtx->getAnalysisDeclContext(); 799 if (!ADC->getCFGBuildOptions().AddTemporaryDtors) { 800 if (llvm::isa_and_nonnull<CXXTempObjectRegion>(TargetRegion) && 801 cast<CXXConstructorDecl>(Call->getDecl()) 802 ->getParent() 803 ->isAnyDestructorNoReturn()) { 804 805 // If we've inlined the constructor, then DstEvaluated would be empty. 806 // In this case we still want a sink, which could be implemented 807 // in processCallExit. But we don't have that implemented at the moment, 808 // so if you hit this assertion, see if you can avoid inlining 809 // the respective constructor when analyzer-config cfg-temporary-dtors 810 // is set to false. 811 // Otherwise there's nothing wrong with inlining such constructor. 812 assert(!DstEvaluated.empty() && 813 "We should not have inlined this constructor!"); 814 815 for (ExplodedNode *N : DstEvaluated) { 816 Bldr.generateSink(E, N, N->getState()); 817 } 818 819 // There is no need to run the PostCall and PostStmt checker 820 // callbacks because we just generated sinks on all nodes in th 821 // frontier. 822 return; 823 } 824 } 825 826 ExplodedNodeSet DstPostArgumentCleanup; 827 for (ExplodedNode *I : DstEvaluatedPostProcessed) 828 finishArgumentConstruction(DstPostArgumentCleanup, I, *Call); 829 830 // If there were other constructors called for object-type arguments 831 // of this constructor, clean them up. 832 ExplodedNodeSet DstPostCall; 833 getCheckerManager().runCheckersForPostCall(DstPostCall, 834 DstPostArgumentCleanup, 835 *Call, *this); 836 getCheckerManager().runCheckersForPostStmt(destNodes, DstPostCall, E, *this); 837 } 838 839 void ExprEngine::VisitCXXConstructExpr(const CXXConstructExpr *CE, 840 ExplodedNode *Pred, 841 ExplodedNodeSet &Dst) { 842 handleConstructor(CE, Pred, Dst); 843 } 844 845 void ExprEngine::VisitCXXInheritedCtorInitExpr( 846 const CXXInheritedCtorInitExpr *CE, ExplodedNode *Pred, 847 ExplodedNodeSet &Dst) { 848 handleConstructor(CE, Pred, Dst); 849 } 850 851 void ExprEngine::VisitCXXDestructor(QualType ObjectType, 852 const MemRegion *Dest, 853 const Stmt *S, 854 bool IsBaseDtor, 855 ExplodedNode *Pred, 856 ExplodedNodeSet &Dst, 857 EvalCallOptions &CallOpts) { 858 assert(S && "A destructor without a trigger!"); 859 const LocationContext *LCtx = Pred->getLocationContext(); 860 ProgramStateRef State = Pred->getState(); 861 862 const CXXRecordDecl *RecordDecl = ObjectType->getAsCXXRecordDecl(); 863 assert(RecordDecl && "Only CXXRecordDecls should have destructors"); 864 const CXXDestructorDecl *DtorDecl = RecordDecl->getDestructor(); 865 // FIXME: There should always be a Decl, otherwise the destructor call 866 // shouldn't have been added to the CFG in the first place. 867 if (!DtorDecl) { 868 // Skip the invalid destructor. We cannot simply return because 869 // it would interrupt the analysis instead. 870 static SimpleProgramPointTag T("ExprEngine", "SkipInvalidDestructor"); 871 // FIXME: PostImplicitCall with a null decl may crash elsewhere anyway. 872 PostImplicitCall PP(/*Decl=*/nullptr, S->getEndLoc(), LCtx, &T); 873 NodeBuilder Bldr(Pred, Dst, *currBldrCtx); 874 Bldr.generateNode(PP, Pred->getState(), Pred); 875 return; 876 } 877 878 if (!Dest) { 879 // We're trying to destroy something that is not a region. This may happen 880 // for a variety of reasons (unknown target region, concrete integer instead 881 // of target region, etc.). The current code makes an attempt to recover. 882 // FIXME: We probably don't really need to recover when we're dealing 883 // with concrete integers specifically. 884 CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true; 885 if (const Expr *E = dyn_cast_or_null<Expr>(S)) { 886 Dest = MRMgr.getCXXTempObjectRegion(E, Pred->getLocationContext()); 887 } else { 888 static SimpleProgramPointTag T("ExprEngine", "SkipInvalidDestructor"); 889 NodeBuilder Bldr(Pred, Dst, *currBldrCtx); 890 Bldr.generateSink(Pred->getLocation().withTag(&T), 891 Pred->getState(), Pred); 892 return; 893 } 894 } 895 896 CallEventManager &CEMgr = getStateManager().getCallEventManager(); 897 CallEventRef<CXXDestructorCall> Call = 898 CEMgr.getCXXDestructorCall(DtorDecl, S, Dest, IsBaseDtor, State, LCtx); 899 900 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 901 Call->getSourceRange().getBegin(), 902 "Error evaluating destructor"); 903 904 ExplodedNodeSet DstPreCall; 905 getCheckerManager().runCheckersForPreCall(DstPreCall, Pred, 906 *Call, *this); 907 908 ExplodedNodeSet DstInvalidated; 909 StmtNodeBuilder Bldr(DstPreCall, DstInvalidated, *currBldrCtx); 910 for (ExplodedNodeSet::iterator I = DstPreCall.begin(), E = DstPreCall.end(); 911 I != E; ++I) 912 defaultEvalCall(Bldr, *I, *Call, CallOpts); 913 914 getCheckerManager().runCheckersForPostCall(Dst, DstInvalidated, 915 *Call, *this); 916 } 917 918 void ExprEngine::VisitCXXNewAllocatorCall(const CXXNewExpr *CNE, 919 ExplodedNode *Pred, 920 ExplodedNodeSet &Dst) { 921 ProgramStateRef State = Pred->getState(); 922 const LocationContext *LCtx = Pred->getLocationContext(); 923 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 924 CNE->getBeginLoc(), 925 "Error evaluating New Allocator Call"); 926 CallEventManager &CEMgr = getStateManager().getCallEventManager(); 927 CallEventRef<CXXAllocatorCall> Call = 928 CEMgr.getCXXAllocatorCall(CNE, State, LCtx); 929 930 ExplodedNodeSet DstPreCall; 931 getCheckerManager().runCheckersForPreCall(DstPreCall, Pred, 932 *Call, *this); 933 934 ExplodedNodeSet DstPostCall; 935 StmtNodeBuilder CallBldr(DstPreCall, DstPostCall, *currBldrCtx); 936 for (ExplodedNode *I : DstPreCall) { 937 // FIXME: Provide evalCall for checkers? 938 defaultEvalCall(CallBldr, I, *Call); 939 } 940 // If the call is inlined, DstPostCall will be empty and we bail out now. 941 942 // Store return value of operator new() for future use, until the actual 943 // CXXNewExpr gets processed. 944 ExplodedNodeSet DstPostValue; 945 StmtNodeBuilder ValueBldr(DstPostCall, DstPostValue, *currBldrCtx); 946 for (ExplodedNode *I : DstPostCall) { 947 // FIXME: Because CNE serves as the "call site" for the allocator (due to 948 // lack of a better expression in the AST), the conjured return value symbol 949 // is going to be of the same type (C++ object pointer type). Technically 950 // this is not correct because the operator new's prototype always says that 951 // it returns a 'void *'. So we should change the type of the symbol, 952 // and then evaluate the cast over the symbolic pointer from 'void *' to 953 // the object pointer type. But without changing the symbol's type it 954 // is breaking too much to evaluate the no-op symbolic cast over it, so we 955 // skip it for now. 956 ProgramStateRef State = I->getState(); 957 SVal RetVal = State->getSVal(CNE, LCtx); 958 // [basic.stc.dynamic.allocation] (on the return value of an allocation 959 // function): 960 // "The order, contiguity, and initial value of storage allocated by 961 // successive calls to an allocation function are unspecified." 962 State = State->bindDefaultInitial(RetVal, UndefinedVal{}, LCtx); 963 964 // If this allocation function is not declared as non-throwing, failures 965 // /must/ be signalled by exceptions, and thus the return value will never 966 // be NULL. -fno-exceptions does not influence this semantics. 967 // FIXME: GCC has a -fcheck-new option, which forces it to consider the case 968 // where new can return NULL. If we end up supporting that option, we can 969 // consider adding a check for it here. 970 // C++11 [basic.stc.dynamic.allocation]p3. 971 if (const FunctionDecl *FD = CNE->getOperatorNew()) { 972 QualType Ty = FD->getType(); 973 if (const auto *ProtoType = Ty->getAs<FunctionProtoType>()) 974 if (!ProtoType->isNothrow()) 975 State = State->assume(RetVal.castAs<DefinedOrUnknownSVal>(), true); 976 } 977 978 ValueBldr.generateNode( 979 CNE, I, addObjectUnderConstruction(State, CNE, LCtx, RetVal)); 980 } 981 982 ExplodedNodeSet DstPostPostCallCallback; 983 getCheckerManager().runCheckersForPostCall(DstPostPostCallCallback, 984 DstPostValue, *Call, *this); 985 for (ExplodedNode *I : DstPostPostCallCallback) { 986 getCheckerManager().runCheckersForNewAllocator(*Call, Dst, I, *this); 987 } 988 } 989 990 void ExprEngine::VisitCXXNewExpr(const CXXNewExpr *CNE, ExplodedNode *Pred, 991 ExplodedNodeSet &Dst) { 992 // FIXME: Much of this should eventually migrate to CXXAllocatorCall. 993 // Also, we need to decide how allocators actually work -- they're not 994 // really part of the CXXNewExpr because they happen BEFORE the 995 // CXXConstructExpr subexpression. See PR12014 for some discussion. 996 997 unsigned blockCount = currBldrCtx->blockCount(); 998 const LocationContext *LCtx = Pred->getLocationContext(); 999 SVal symVal = UnknownVal(); 1000 FunctionDecl *FD = CNE->getOperatorNew(); 1001 1002 bool IsStandardGlobalOpNewFunction = 1003 FD->isReplaceableGlobalAllocationFunction(); 1004 1005 ProgramStateRef State = Pred->getState(); 1006 1007 // Retrieve the stored operator new() return value. 1008 if (AMgr.getAnalyzerOptions().MayInlineCXXAllocator) { 1009 symVal = *getObjectUnderConstruction(State, CNE, LCtx); 1010 State = finishObjectConstruction(State, CNE, LCtx); 1011 } 1012 1013 // We assume all standard global 'operator new' functions allocate memory in 1014 // heap. We realize this is an approximation that might not correctly model 1015 // a custom global allocator. 1016 if (symVal.isUnknown()) { 1017 if (IsStandardGlobalOpNewFunction) 1018 symVal = svalBuilder.getConjuredHeapSymbolVal(CNE, LCtx, blockCount); 1019 else 1020 symVal = svalBuilder.conjureSymbolVal(nullptr, CNE, LCtx, CNE->getType(), 1021 blockCount); 1022 } 1023 1024 CallEventManager &CEMgr = getStateManager().getCallEventManager(); 1025 CallEventRef<CXXAllocatorCall> Call = 1026 CEMgr.getCXXAllocatorCall(CNE, State, LCtx); 1027 1028 if (!AMgr.getAnalyzerOptions().MayInlineCXXAllocator) { 1029 // Invalidate placement args. 1030 // FIXME: Once we figure out how we want allocators to work, 1031 // we should be using the usual pre-/(default-)eval-/post-call checkers 1032 // here. 1033 State = Call->invalidateRegions(blockCount); 1034 if (!State) 1035 return; 1036 1037 // If this allocation function is not declared as non-throwing, failures 1038 // /must/ be signalled by exceptions, and thus the return value will never 1039 // be NULL. -fno-exceptions does not influence this semantics. 1040 // FIXME: GCC has a -fcheck-new option, which forces it to consider the case 1041 // where new can return NULL. If we end up supporting that option, we can 1042 // consider adding a check for it here. 1043 // C++11 [basic.stc.dynamic.allocation]p3. 1044 if (const auto *ProtoType = FD->getType()->getAs<FunctionProtoType>()) 1045 if (!ProtoType->isNothrow()) 1046 if (auto dSymVal = symVal.getAs<DefinedOrUnknownSVal>()) 1047 State = State->assume(*dSymVal, true); 1048 } 1049 1050 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 1051 1052 SVal Result = symVal; 1053 1054 if (CNE->isArray()) { 1055 1056 if (const auto *NewReg = cast_or_null<SubRegion>(symVal.getAsRegion())) { 1057 // If each element is initialized by their default constructor, the field 1058 // values are properly placed inside the required region, however if an 1059 // initializer list is used, this doesn't happen automatically. 1060 auto *Init = CNE->getInitializer(); 1061 bool isInitList = isa_and_nonnull<InitListExpr>(Init); 1062 1063 QualType ObjTy = 1064 isInitList ? Init->getType() : CNE->getType()->getPointeeType(); 1065 const ElementRegion *EleReg = 1066 MRMgr.getElementRegion(ObjTy, svalBuilder.makeArrayIndex(0), NewReg, 1067 svalBuilder.getContext()); 1068 Result = loc::MemRegionVal(EleReg); 1069 1070 // If the array is list initialized, we bind the initializer list to the 1071 // memory region here, otherwise we would lose it. 1072 if (isInitList) { 1073 Bldr.takeNodes(Pred); 1074 Pred = Bldr.generateNode(CNE, Pred, State); 1075 1076 SVal V = State->getSVal(Init, LCtx); 1077 ExplodedNodeSet evaluated; 1078 evalBind(evaluated, CNE, Pred, Result, V, true); 1079 1080 Bldr.takeNodes(Pred); 1081 Bldr.addNodes(evaluated); 1082 1083 Pred = *evaluated.begin(); 1084 State = Pred->getState(); 1085 } 1086 } 1087 1088 State = State->BindExpr(CNE, Pred->getLocationContext(), Result); 1089 Bldr.generateNode(CNE, Pred, State); 1090 return; 1091 } 1092 1093 // FIXME: Once we have proper support for CXXConstructExprs inside 1094 // CXXNewExpr, we need to make sure that the constructed object is not 1095 // immediately invalidated here. (The placement call should happen before 1096 // the constructor call anyway.) 1097 if (FD->isReservedGlobalPlacementOperator()) { 1098 // Non-array placement new should always return the placement location. 1099 SVal PlacementLoc = State->getSVal(CNE->getPlacementArg(0), LCtx); 1100 Result = svalBuilder.evalCast(PlacementLoc, CNE->getType(), 1101 CNE->getPlacementArg(0)->getType()); 1102 } 1103 1104 // Bind the address of the object, then check to see if we cached out. 1105 State = State->BindExpr(CNE, LCtx, Result); 1106 ExplodedNode *NewN = Bldr.generateNode(CNE, Pred, State); 1107 if (!NewN) 1108 return; 1109 1110 // If the type is not a record, we won't have a CXXConstructExpr as an 1111 // initializer. Copy the value over. 1112 if (const Expr *Init = CNE->getInitializer()) { 1113 if (!isa<CXXConstructExpr>(Init)) { 1114 assert(Bldr.getResults().size() == 1); 1115 Bldr.takeNodes(NewN); 1116 evalBind(Dst, CNE, NewN, Result, State->getSVal(Init, LCtx), 1117 /*FirstInit=*/IsStandardGlobalOpNewFunction); 1118 } 1119 } 1120 } 1121 1122 void ExprEngine::VisitCXXDeleteExpr(const CXXDeleteExpr *CDE, 1123 ExplodedNode *Pred, ExplodedNodeSet &Dst) { 1124 1125 CallEventManager &CEMgr = getStateManager().getCallEventManager(); 1126 CallEventRef<CXXDeallocatorCall> Call = CEMgr.getCXXDeallocatorCall( 1127 CDE, Pred->getState(), Pred->getLocationContext()); 1128 1129 ExplodedNodeSet DstPreCall; 1130 getCheckerManager().runCheckersForPreCall(DstPreCall, Pred, *Call, *this); 1131 ExplodedNodeSet DstPostCall; 1132 1133 if (AMgr.getAnalyzerOptions().MayInlineCXXAllocator) { 1134 StmtNodeBuilder Bldr(DstPreCall, DstPostCall, *currBldrCtx); 1135 for (ExplodedNode *I : DstPreCall) { 1136 defaultEvalCall(Bldr, I, *Call); 1137 } 1138 } else { 1139 DstPostCall = DstPreCall; 1140 } 1141 getCheckerManager().runCheckersForPostCall(Dst, DstPostCall, *Call, *this); 1142 } 1143 1144 void ExprEngine::VisitCXXCatchStmt(const CXXCatchStmt *CS, ExplodedNode *Pred, 1145 ExplodedNodeSet &Dst) { 1146 const VarDecl *VD = CS->getExceptionDecl(); 1147 if (!VD) { 1148 Dst.Add(Pred); 1149 return; 1150 } 1151 1152 const LocationContext *LCtx = Pred->getLocationContext(); 1153 SVal V = svalBuilder.conjureSymbolVal(CS, LCtx, VD->getType(), 1154 currBldrCtx->blockCount()); 1155 ProgramStateRef state = Pred->getState(); 1156 state = state->bindLoc(state->getLValue(VD, LCtx), V, LCtx); 1157 1158 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 1159 Bldr.generateNode(CS, Pred, state); 1160 } 1161 1162 void ExprEngine::VisitCXXThisExpr(const CXXThisExpr *TE, ExplodedNode *Pred, 1163 ExplodedNodeSet &Dst) { 1164 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 1165 1166 // Get the this object region from StoreManager. 1167 const LocationContext *LCtx = Pred->getLocationContext(); 1168 const MemRegion *R = 1169 svalBuilder.getRegionManager().getCXXThisRegion( 1170 getContext().getCanonicalType(TE->getType()), 1171 LCtx); 1172 1173 ProgramStateRef state = Pred->getState(); 1174 SVal V = state->getSVal(loc::MemRegionVal(R)); 1175 Bldr.generateNode(TE, Pred, state->BindExpr(TE, LCtx, V)); 1176 } 1177 1178 void ExprEngine::VisitLambdaExpr(const LambdaExpr *LE, ExplodedNode *Pred, 1179 ExplodedNodeSet &Dst) { 1180 const LocationContext *LocCtxt = Pred->getLocationContext(); 1181 1182 // Get the region of the lambda itself. 1183 const MemRegion *R = svalBuilder.getRegionManager().getCXXTempObjectRegion( 1184 LE, LocCtxt); 1185 SVal V = loc::MemRegionVal(R); 1186 1187 ProgramStateRef State = Pred->getState(); 1188 1189 // If we created a new MemRegion for the lambda, we should explicitly bind 1190 // the captures. 1191 unsigned Idx = 0; 1192 CXXRecordDecl::field_iterator CurField = LE->getLambdaClass()->field_begin(); 1193 for (LambdaExpr::const_capture_init_iterator i = LE->capture_init_begin(), 1194 e = LE->capture_init_end(); 1195 i != e; ++i, ++CurField, ++Idx) { 1196 FieldDecl *FieldForCapture = *CurField; 1197 SVal FieldLoc = State->getLValue(FieldForCapture, V); 1198 1199 SVal InitVal; 1200 if (!FieldForCapture->hasCapturedVLAType()) { 1201 const Expr *InitExpr = *i; 1202 1203 assert(InitExpr && "Capture missing initialization expression"); 1204 1205 // Capturing a 0 length array is a no-op, so we ignore it to get a more 1206 // accurate analysis. If it's not ignored, it would set the default 1207 // binding of the lambda to 'Unknown', which can lead to falsely detecting 1208 // 'Uninitialized' values as 'Unknown' and not reporting a warning. 1209 const auto FTy = FieldForCapture->getType(); 1210 if (FTy->isConstantArrayType() && 1211 getContext().getConstantArrayElementCount( 1212 getContext().getAsConstantArrayType(FTy)) == 0) 1213 continue; 1214 1215 // With C++17 copy elision the InitExpr can be anything, so instead of 1216 // pattern matching all cases, we simple check if the current field is 1217 // under construction or not, regardless what it's InitExpr is. 1218 if (const auto OUC = 1219 getObjectUnderConstruction(State, {LE, Idx}, LocCtxt)) { 1220 InitVal = State->getSVal(OUC->getAsRegion()); 1221 1222 State = finishObjectConstruction(State, {LE, Idx}, LocCtxt); 1223 } else 1224 InitVal = State->getSVal(InitExpr, LocCtxt); 1225 1226 } else { 1227 1228 assert(!getObjectUnderConstruction(State, {LE, Idx}, LocCtxt) && 1229 "VLA capture by value is a compile time error!"); 1230 1231 // The field stores the length of a captured variable-length array. 1232 // These captures don't have initialization expressions; instead we 1233 // get the length from the VLAType size expression. 1234 Expr *SizeExpr = FieldForCapture->getCapturedVLAType()->getSizeExpr(); 1235 InitVal = State->getSVal(SizeExpr, LocCtxt); 1236 } 1237 1238 State = State->bindLoc(FieldLoc, InitVal, LocCtxt); 1239 } 1240 1241 // Decay the Loc into an RValue, because there might be a 1242 // MaterializeTemporaryExpr node above this one which expects the bound value 1243 // to be an RValue. 1244 SVal LambdaRVal = State->getSVal(R); 1245 1246 ExplodedNodeSet Tmp; 1247 StmtNodeBuilder Bldr(Pred, Tmp, *currBldrCtx); 1248 // FIXME: is this the right program point kind? 1249 Bldr.generateNode(LE, Pred, 1250 State->BindExpr(LE, LocCtxt, LambdaRVal), 1251 nullptr, ProgramPoint::PostLValueKind); 1252 1253 // FIXME: Move all post/pre visits to ::Visit(). 1254 getCheckerManager().runCheckersForPostStmt(Dst, Tmp, LE, *this); 1255 } 1256