1 //===- ExprEngineCXX.cpp - ExprEngine support for C++ -----------*- C++ -*-===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file defines the C++ expression evaluation engine. 10 // 11 //===----------------------------------------------------------------------===// 12 13 #include "clang/AST/DeclCXX.h" 14 #include "clang/AST/ParentMap.h" 15 #include "clang/AST/StmtCXX.h" 16 #include "clang/Analysis/ConstructionContext.h" 17 #include "clang/Basic/PrettyStackTrace.h" 18 #include "clang/StaticAnalyzer/Core/CheckerManager.h" 19 #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h" 20 #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h" 21 #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h" 22 #include "clang/StaticAnalyzer/Core/PathSensitive/SVals.h" 23 24 using namespace clang; 25 using namespace ento; 26 27 void ExprEngine::CreateCXXTemporaryObject(const MaterializeTemporaryExpr *ME, 28 ExplodedNode *Pred, 29 ExplodedNodeSet &Dst) { 30 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 31 const Expr *tempExpr = ME->getSubExpr()->IgnoreParens(); 32 ProgramStateRef state = Pred->getState(); 33 const LocationContext *LCtx = Pred->getLocationContext(); 34 35 state = createTemporaryRegionIfNeeded(state, LCtx, tempExpr, ME); 36 Bldr.generateNode(ME, Pred, state); 37 } 38 39 // FIXME: This is the sort of code that should eventually live in a Core 40 // checker rather than as a special case in ExprEngine. 41 void ExprEngine::performTrivialCopy(NodeBuilder &Bldr, ExplodedNode *Pred, 42 const CallEvent &Call) { 43 SVal ThisVal; 44 bool AlwaysReturnsLValue; 45 const CXXRecordDecl *ThisRD = nullptr; 46 if (const CXXConstructorCall *Ctor = dyn_cast<CXXConstructorCall>(&Call)) { 47 assert(Ctor->getDecl()->isTrivial()); 48 assert(Ctor->getDecl()->isCopyOrMoveConstructor()); 49 ThisVal = Ctor->getCXXThisVal(); 50 ThisRD = Ctor->getDecl()->getParent(); 51 AlwaysReturnsLValue = false; 52 } else { 53 assert(cast<CXXMethodDecl>(Call.getDecl())->isTrivial()); 54 assert(cast<CXXMethodDecl>(Call.getDecl())->getOverloadedOperator() == 55 OO_Equal); 56 ThisVal = cast<CXXInstanceCall>(Call).getCXXThisVal(); 57 ThisRD = cast<CXXMethodDecl>(Call.getDecl())->getParent(); 58 AlwaysReturnsLValue = true; 59 } 60 61 assert(ThisRD); 62 if (ThisRD->isEmpty()) { 63 // Do nothing for empty classes. Otherwise it'd retrieve an UnknownVal 64 // and bind it and RegionStore would think that the actual value 65 // in this region at this offset is unknown. 66 return; 67 } 68 69 const LocationContext *LCtx = Pred->getLocationContext(); 70 71 ExplodedNodeSet Dst; 72 Bldr.takeNodes(Pred); 73 74 SVal V = Call.getArgSVal(0); 75 76 // If the value being copied is not unknown, load from its location to get 77 // an aggregate rvalue. 78 if (Optional<Loc> L = V.getAs<Loc>()) 79 V = Pred->getState()->getSVal(*L); 80 else 81 assert(V.isUnknownOrUndef()); 82 83 const Expr *CallExpr = Call.getOriginExpr(); 84 evalBind(Dst, CallExpr, Pred, ThisVal, V, true); 85 86 PostStmt PS(CallExpr, LCtx); 87 for (ExplodedNodeSet::iterator I = Dst.begin(), E = Dst.end(); 88 I != E; ++I) { 89 ProgramStateRef State = (*I)->getState(); 90 if (AlwaysReturnsLValue) 91 State = State->BindExpr(CallExpr, LCtx, ThisVal); 92 else 93 State = bindReturnValue(Call, LCtx, State); 94 Bldr.generateNode(PS, State, *I); 95 } 96 } 97 98 SVal ExprEngine::makeElementRegion(ProgramStateRef State, SVal LValue, 99 QualType &Ty, bool &IsArray, unsigned Idx) { 100 SValBuilder &SVB = State->getStateManager().getSValBuilder(); 101 ASTContext &Ctx = SVB.getContext(); 102 103 if (const ArrayType *AT = Ctx.getAsArrayType(Ty)) { 104 while (AT) { 105 Ty = AT->getElementType(); 106 AT = dyn_cast<ArrayType>(AT->getElementType()); 107 } 108 LValue = State->getLValue(Ty, SVB.makeArrayIndex(Idx), LValue); 109 IsArray = true; 110 } 111 112 return LValue; 113 } 114 115 // In case when the prvalue is returned from the function (kind is one of 116 // SimpleReturnedValueKind, CXX17ElidedCopyReturnedValueKind), then 117 // it's materialization happens in context of the caller. 118 // We pass BldrCtx explicitly, as currBldrCtx always refers to callee's context. 119 SVal ExprEngine::computeObjectUnderConstruction( 120 const Expr *E, ProgramStateRef State, const NodeBuilderContext *BldrCtx, 121 const LocationContext *LCtx, const ConstructionContext *CC, 122 EvalCallOptions &CallOpts, unsigned Idx) { 123 124 SValBuilder &SVB = getSValBuilder(); 125 MemRegionManager &MRMgr = SVB.getRegionManager(); 126 ASTContext &ACtx = SVB.getContext(); 127 128 // Compute the target region by exploring the construction context. 129 if (CC) { 130 switch (CC->getKind()) { 131 case ConstructionContext::CXX17ElidedCopyVariableKind: 132 case ConstructionContext::SimpleVariableKind: { 133 const auto *DSCC = cast<VariableConstructionContext>(CC); 134 const auto *DS = DSCC->getDeclStmt(); 135 const auto *Var = cast<VarDecl>(DS->getSingleDecl()); 136 QualType Ty = Var->getType(); 137 return makeElementRegion(State, State->getLValue(Var, LCtx), Ty, 138 CallOpts.IsArrayCtorOrDtor, Idx); 139 } 140 case ConstructionContext::CXX17ElidedCopyConstructorInitializerKind: 141 case ConstructionContext::SimpleConstructorInitializerKind: { 142 const auto *ICC = cast<ConstructorInitializerConstructionContext>(CC); 143 const auto *Init = ICC->getCXXCtorInitializer(); 144 const CXXMethodDecl *CurCtor = cast<CXXMethodDecl>(LCtx->getDecl()); 145 Loc ThisPtr = SVB.getCXXThis(CurCtor, LCtx->getStackFrame()); 146 SVal ThisVal = State->getSVal(ThisPtr); 147 if (Init->isBaseInitializer()) { 148 const auto *ThisReg = cast<SubRegion>(ThisVal.getAsRegion()); 149 const CXXRecordDecl *BaseClass = 150 Init->getBaseClass()->getAsCXXRecordDecl(); 151 const auto *BaseReg = 152 MRMgr.getCXXBaseObjectRegion(BaseClass, ThisReg, 153 Init->isBaseVirtual()); 154 return SVB.makeLoc(BaseReg); 155 } 156 if (Init->isDelegatingInitializer()) 157 return ThisVal; 158 159 const ValueDecl *Field; 160 SVal FieldVal; 161 if (Init->isIndirectMemberInitializer()) { 162 Field = Init->getIndirectMember(); 163 FieldVal = State->getLValue(Init->getIndirectMember(), ThisVal); 164 } else { 165 Field = Init->getMember(); 166 FieldVal = State->getLValue(Init->getMember(), ThisVal); 167 } 168 169 QualType Ty = Field->getType(); 170 return makeElementRegion(State, FieldVal, Ty, CallOpts.IsArrayCtorOrDtor, 171 Idx); 172 } 173 case ConstructionContext::NewAllocatedObjectKind: { 174 if (AMgr.getAnalyzerOptions().MayInlineCXXAllocator) { 175 const auto *NECC = cast<NewAllocatedObjectConstructionContext>(CC); 176 const auto *NE = NECC->getCXXNewExpr(); 177 SVal V = *getObjectUnderConstruction(State, NE, LCtx); 178 if (const SubRegion *MR = 179 dyn_cast_or_null<SubRegion>(V.getAsRegion())) { 180 if (NE->isArray()) { 181 CallOpts.IsArrayCtorOrDtor = true; 182 183 auto Ty = NE->getType()->getPointeeType(); 184 while (const auto *AT = getContext().getAsArrayType(Ty)) 185 Ty = AT->getElementType(); 186 187 auto R = MRMgr.getElementRegion(Ty, svalBuilder.makeArrayIndex(Idx), 188 MR, SVB.getContext()); 189 190 return loc::MemRegionVal(R); 191 } 192 return V; 193 } 194 // TODO: Detect when the allocator returns a null pointer. 195 // Constructor shall not be called in this case. 196 } 197 break; 198 } 199 case ConstructionContext::SimpleReturnedValueKind: 200 case ConstructionContext::CXX17ElidedCopyReturnedValueKind: { 201 // The temporary is to be managed by the parent stack frame. 202 // So build it in the parent stack frame if we're not in the 203 // top frame of the analysis. 204 const StackFrameContext *SFC = LCtx->getStackFrame(); 205 if (const LocationContext *CallerLCtx = SFC->getParent()) { 206 auto RTC = (*SFC->getCallSiteBlock())[SFC->getIndex()] 207 .getAs<CFGCXXRecordTypedCall>(); 208 if (!RTC) { 209 // We were unable to find the correct construction context for the 210 // call in the parent stack frame. This is equivalent to not being 211 // able to find construction context at all. 212 break; 213 } 214 if (isa<BlockInvocationContext>(CallerLCtx)) { 215 // Unwrap block invocation contexts. They're mostly part of 216 // the current stack frame. 217 CallerLCtx = CallerLCtx->getParent(); 218 assert(!isa<BlockInvocationContext>(CallerLCtx)); 219 } 220 221 NodeBuilderContext CallerBldrCtx(getCoreEngine(), 222 SFC->getCallSiteBlock(), CallerLCtx); 223 return computeObjectUnderConstruction( 224 cast<Expr>(SFC->getCallSite()), State, &CallerBldrCtx, CallerLCtx, 225 RTC->getConstructionContext(), CallOpts); 226 } else { 227 // We are on the top frame of the analysis. We do not know where is the 228 // object returned to. Conjure a symbolic region for the return value. 229 // TODO: We probably need a new MemRegion kind to represent the storage 230 // of that SymbolicRegion, so that we cound produce a fancy symbol 231 // instead of an anonymous conjured symbol. 232 // TODO: Do we need to track the region to avoid having it dead 233 // too early? It does die too early, at least in C++17, but because 234 // putting anything into a SymbolicRegion causes an immediate escape, 235 // it doesn't cause any leak false positives. 236 const auto *RCC = cast<ReturnedValueConstructionContext>(CC); 237 // Make sure that this doesn't coincide with any other symbol 238 // conjured for the returned expression. 239 static const int TopLevelSymRegionTag = 0; 240 const Expr *RetE = RCC->getReturnStmt()->getRetValue(); 241 assert(RetE && "Void returns should not have a construction context"); 242 QualType ReturnTy = RetE->getType(); 243 QualType RegionTy = ACtx.getPointerType(ReturnTy); 244 return SVB.conjureSymbolVal(&TopLevelSymRegionTag, RetE, SFC, RegionTy, 245 currBldrCtx->blockCount()); 246 } 247 llvm_unreachable("Unhandled return value construction context!"); 248 } 249 case ConstructionContext::ElidedTemporaryObjectKind: { 250 assert(AMgr.getAnalyzerOptions().ShouldElideConstructors); 251 const auto *TCC = cast<ElidedTemporaryObjectConstructionContext>(CC); 252 253 // Support pre-C++17 copy elision. We'll have the elidable copy 254 // constructor in the AST and in the CFG, but we'll skip it 255 // and construct directly into the final object. This call 256 // also sets the CallOpts flags for us. 257 // If the elided copy/move constructor is not supported, there's still 258 // benefit in trying to model the non-elided constructor. 259 // Stash our state before trying to elide, as it'll get overwritten. 260 ProgramStateRef PreElideState = State; 261 EvalCallOptions PreElideCallOpts = CallOpts; 262 263 SVal V = computeObjectUnderConstruction( 264 TCC->getConstructorAfterElision(), State, BldrCtx, LCtx, 265 TCC->getConstructionContextAfterElision(), CallOpts); 266 267 // FIXME: This definition of "copy elision has not failed" is unreliable. 268 // It doesn't indicate that the constructor will actually be inlined 269 // later; this is still up to evalCall() to decide. 270 if (!CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion) 271 return V; 272 273 // Copy elision failed. Revert the changes and proceed as if we have 274 // a simple temporary. 275 CallOpts = PreElideCallOpts; 276 CallOpts.IsElidableCtorThatHasNotBeenElided = true; 277 [[fallthrough]]; 278 } 279 case ConstructionContext::SimpleTemporaryObjectKind: { 280 const auto *TCC = cast<TemporaryObjectConstructionContext>(CC); 281 const MaterializeTemporaryExpr *MTE = TCC->getMaterializedTemporaryExpr(); 282 283 CallOpts.IsTemporaryCtorOrDtor = true; 284 if (MTE) { 285 if (const ValueDecl *VD = MTE->getExtendingDecl()) { 286 assert(MTE->getStorageDuration() != SD_FullExpression); 287 if (!VD->getType()->isReferenceType()) { 288 // We're lifetime-extended by a surrounding aggregate. 289 // Automatic destructors aren't quite working in this case 290 // on the CFG side. We should warn the caller about that. 291 // FIXME: Is there a better way to retrieve this information from 292 // the MaterializeTemporaryExpr? 293 CallOpts.IsTemporaryLifetimeExtendedViaAggregate = true; 294 } 295 } 296 297 if (MTE->getStorageDuration() == SD_Static || 298 MTE->getStorageDuration() == SD_Thread) 299 return loc::MemRegionVal(MRMgr.getCXXStaticTempObjectRegion(E)); 300 } 301 302 return loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(E, LCtx)); 303 } 304 case ConstructionContext::LambdaCaptureKind: { 305 CallOpts.IsTemporaryCtorOrDtor = true; 306 307 const auto *LCC = cast<LambdaCaptureConstructionContext>(CC); 308 309 SVal Base = loc::MemRegionVal( 310 MRMgr.getCXXTempObjectRegion(LCC->getInitializer(), LCtx)); 311 312 const auto *CE = dyn_cast_or_null<CXXConstructExpr>(E); 313 if (getIndexOfElementToConstruct(State, CE, LCtx)) { 314 CallOpts.IsArrayCtorOrDtor = true; 315 Base = State->getLValue(E->getType(), svalBuilder.makeArrayIndex(Idx), 316 Base); 317 } 318 319 return Base; 320 } 321 case ConstructionContext::ArgumentKind: { 322 // Arguments are technically temporaries. 323 CallOpts.IsTemporaryCtorOrDtor = true; 324 325 const auto *ACC = cast<ArgumentConstructionContext>(CC); 326 const Expr *E = ACC->getCallLikeExpr(); 327 unsigned Idx = ACC->getIndex(); 328 329 CallEventManager &CEMgr = getStateManager().getCallEventManager(); 330 auto getArgLoc = [&](CallEventRef<> Caller) -> Optional<SVal> { 331 const LocationContext *FutureSFC = 332 Caller->getCalleeStackFrame(BldrCtx->blockCount()); 333 // Return early if we are unable to reliably foresee 334 // the future stack frame. 335 if (!FutureSFC) 336 return None; 337 338 // This should be equivalent to Caller->getDecl() for now, but 339 // FutureSFC->getDecl() is likely to support better stuff (like 340 // virtual functions) earlier. 341 const Decl *CalleeD = FutureSFC->getDecl(); 342 343 // FIXME: Support for variadic arguments is not implemented here yet. 344 if (CallEvent::isVariadic(CalleeD)) 345 return None; 346 347 // Operator arguments do not correspond to operator parameters 348 // because this-argument is implemented as a normal argument in 349 // operator call expressions but not in operator declarations. 350 const TypedValueRegion *TVR = Caller->getParameterLocation( 351 *Caller->getAdjustedParameterIndex(Idx), BldrCtx->blockCount()); 352 if (!TVR) 353 return None; 354 355 return loc::MemRegionVal(TVR); 356 }; 357 358 if (const auto *CE = dyn_cast<CallExpr>(E)) { 359 CallEventRef<> Caller = CEMgr.getSimpleCall(CE, State, LCtx); 360 if (Optional<SVal> V = getArgLoc(Caller)) 361 return *V; 362 else 363 break; 364 } else if (const auto *CCE = dyn_cast<CXXConstructExpr>(E)) { 365 // Don't bother figuring out the target region for the future 366 // constructor because we won't need it. 367 CallEventRef<> Caller = 368 CEMgr.getCXXConstructorCall(CCE, /*Target=*/nullptr, State, LCtx); 369 if (Optional<SVal> V = getArgLoc(Caller)) 370 return *V; 371 else 372 break; 373 } else if (const auto *ME = dyn_cast<ObjCMessageExpr>(E)) { 374 CallEventRef<> Caller = CEMgr.getObjCMethodCall(ME, State, LCtx); 375 if (Optional<SVal> V = getArgLoc(Caller)) 376 return *V; 377 else 378 break; 379 } 380 } 381 } // switch (CC->getKind()) 382 } 383 384 // If we couldn't find an existing region to construct into, assume we're 385 // constructing a temporary. Notify the caller of our failure. 386 CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true; 387 return loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(E, LCtx)); 388 } 389 390 ProgramStateRef ExprEngine::updateObjectsUnderConstruction( 391 SVal V, const Expr *E, ProgramStateRef State, const LocationContext *LCtx, 392 const ConstructionContext *CC, const EvalCallOptions &CallOpts) { 393 if (CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion) { 394 // Sounds like we failed to find the target region and therefore 395 // copy elision failed. There's nothing we can do about it here. 396 return State; 397 } 398 399 // See if we're constructing an existing region by looking at the 400 // current construction context. 401 assert(CC && "Computed target region without construction context?"); 402 switch (CC->getKind()) { 403 case ConstructionContext::CXX17ElidedCopyVariableKind: 404 case ConstructionContext::SimpleVariableKind: { 405 const auto *DSCC = cast<VariableConstructionContext>(CC); 406 return addObjectUnderConstruction(State, DSCC->getDeclStmt(), LCtx, V); 407 } 408 case ConstructionContext::CXX17ElidedCopyConstructorInitializerKind: 409 case ConstructionContext::SimpleConstructorInitializerKind: { 410 const auto *ICC = cast<ConstructorInitializerConstructionContext>(CC); 411 const auto *Init = ICC->getCXXCtorInitializer(); 412 // Base and delegating initializers handled above 413 assert(Init->isAnyMemberInitializer() && 414 "Base and delegating initializers should have been handled by" 415 "computeObjectUnderConstruction()"); 416 return addObjectUnderConstruction(State, Init, LCtx, V); 417 } 418 case ConstructionContext::NewAllocatedObjectKind: { 419 return State; 420 } 421 case ConstructionContext::SimpleReturnedValueKind: 422 case ConstructionContext::CXX17ElidedCopyReturnedValueKind: { 423 const StackFrameContext *SFC = LCtx->getStackFrame(); 424 const LocationContext *CallerLCtx = SFC->getParent(); 425 if (!CallerLCtx) { 426 // No extra work is necessary in top frame. 427 return State; 428 } 429 430 auto RTC = (*SFC->getCallSiteBlock())[SFC->getIndex()] 431 .getAs<CFGCXXRecordTypedCall>(); 432 assert(RTC && "Could not have had a target region without it"); 433 if (isa<BlockInvocationContext>(CallerLCtx)) { 434 // Unwrap block invocation contexts. They're mostly part of 435 // the current stack frame. 436 CallerLCtx = CallerLCtx->getParent(); 437 assert(!isa<BlockInvocationContext>(CallerLCtx)); 438 } 439 440 return updateObjectsUnderConstruction(V, 441 cast<Expr>(SFC->getCallSite()), State, CallerLCtx, 442 RTC->getConstructionContext(), CallOpts); 443 } 444 case ConstructionContext::ElidedTemporaryObjectKind: { 445 assert(AMgr.getAnalyzerOptions().ShouldElideConstructors); 446 if (!CallOpts.IsElidableCtorThatHasNotBeenElided) { 447 const auto *TCC = cast<ElidedTemporaryObjectConstructionContext>(CC); 448 State = updateObjectsUnderConstruction( 449 V, TCC->getConstructorAfterElision(), State, LCtx, 450 TCC->getConstructionContextAfterElision(), CallOpts); 451 452 // Remember that we've elided the constructor. 453 State = addObjectUnderConstruction( 454 State, TCC->getConstructorAfterElision(), LCtx, V); 455 456 // Remember that we've elided the destructor. 457 if (const auto *BTE = TCC->getCXXBindTemporaryExpr()) 458 State = elideDestructor(State, BTE, LCtx); 459 460 // Instead of materialization, shamelessly return 461 // the final object destination. 462 if (const auto *MTE = TCC->getMaterializedTemporaryExpr()) 463 State = addObjectUnderConstruction(State, MTE, LCtx, V); 464 465 return State; 466 } 467 // If we decided not to elide the constructor, proceed as if 468 // it's a simple temporary. 469 [[fallthrough]]; 470 } 471 case ConstructionContext::SimpleTemporaryObjectKind: { 472 const auto *TCC = cast<TemporaryObjectConstructionContext>(CC); 473 if (const auto *BTE = TCC->getCXXBindTemporaryExpr()) 474 State = addObjectUnderConstruction(State, BTE, LCtx, V); 475 476 if (const auto *MTE = TCC->getMaterializedTemporaryExpr()) 477 State = addObjectUnderConstruction(State, MTE, LCtx, V); 478 479 return State; 480 } 481 case ConstructionContext::LambdaCaptureKind: { 482 const auto *LCC = cast<LambdaCaptureConstructionContext>(CC); 483 484 // If we capture and array, we want to store the super region, not a 485 // sub-region. 486 if (const auto *EL = dyn_cast_or_null<ElementRegion>(V.getAsRegion())) 487 V = loc::MemRegionVal(EL->getSuperRegion()); 488 489 return addObjectUnderConstruction( 490 State, {LCC->getLambdaExpr(), LCC->getIndex()}, LCtx, V); 491 } 492 case ConstructionContext::ArgumentKind: { 493 const auto *ACC = cast<ArgumentConstructionContext>(CC); 494 if (const auto *BTE = ACC->getCXXBindTemporaryExpr()) 495 State = addObjectUnderConstruction(State, BTE, LCtx, V); 496 497 return addObjectUnderConstruction( 498 State, {ACC->getCallLikeExpr(), ACC->getIndex()}, LCtx, V); 499 } 500 } 501 llvm_unreachable("Unhandled construction context!"); 502 } 503 504 static ProgramStateRef 505 bindRequiredArrayElementToEnvironment(ProgramStateRef State, 506 const ArrayInitLoopExpr *AILE, 507 const LocationContext *LCtx, SVal Idx) { 508 // The ctor in this case is guaranteed to be a copy ctor, otherwise we hit a 509 // compile time error. 510 // 511 // -ArrayInitLoopExpr <-- we're here 512 // |-OpaqueValueExpr 513 // | `-DeclRefExpr <-- match this 514 // `-CXXConstructExpr 515 // `-ImplicitCastExpr 516 // `-ArraySubscriptExpr 517 // |-ImplicitCastExpr 518 // | `-OpaqueValueExpr 519 // | `-DeclRefExpr 520 // `-ArrayInitIndexExpr 521 // 522 // The resulting expression might look like the one below in an implicit 523 // copy/move ctor. 524 // 525 // ArrayInitLoopExpr <-- we're here 526 // |-OpaqueValueExpr 527 // | `-MemberExpr <-- match this 528 // | (`-CXXStaticCastExpr) <-- move ctor only 529 // | `-DeclRefExpr 530 // `-CXXConstructExpr 531 // `-ArraySubscriptExpr 532 // |-ImplicitCastExpr 533 // | `-OpaqueValueExpr 534 // | `-MemberExpr 535 // | `-DeclRefExpr 536 // `-ArrayInitIndexExpr 537 // 538 // The resulting expression for a multidimensional array. 539 // ArrayInitLoopExpr <-- we're here 540 // |-OpaqueValueExpr 541 // | `-DeclRefExpr <-- match this 542 // `-ArrayInitLoopExpr 543 // |-OpaqueValueExpr 544 // | `-ArraySubscriptExpr 545 // | |-ImplicitCastExpr 546 // | | `-OpaqueValueExpr 547 // | | `-DeclRefExpr 548 // | `-ArrayInitIndexExpr 549 // `-CXXConstructExpr <-- extract this 550 // ` ... 551 552 const auto *OVESrc = AILE->getCommonExpr()->getSourceExpr(); 553 554 // HACK: There is no way we can put the index of the array element into the 555 // CFG unless we unroll the loop, so we manually select and bind the required 556 // parameter to the environment. 557 const auto *CE = 558 cast<CXXConstructExpr>(extractElementInitializerFromNestedAILE(AILE)); 559 560 SVal Base = UnknownVal(); 561 if (const auto *ME = dyn_cast<MemberExpr>(OVESrc)) 562 Base = State->getSVal(ME, LCtx); 563 else if (const auto *DRE = dyn_cast<DeclRefExpr>(OVESrc)) 564 Base = State->getLValue(cast<VarDecl>(DRE->getDecl()), LCtx); 565 else 566 llvm_unreachable("ArrayInitLoopExpr contains unexpected source expression"); 567 568 SVal NthElem = State->getLValue(CE->getType(), Idx, Base); 569 570 return State->BindExpr(CE->getArg(0), LCtx, NthElem); 571 } 572 573 void ExprEngine::handleConstructor(const Expr *E, 574 ExplodedNode *Pred, 575 ExplodedNodeSet &destNodes) { 576 const auto *CE = dyn_cast<CXXConstructExpr>(E); 577 const auto *CIE = dyn_cast<CXXInheritedCtorInitExpr>(E); 578 assert(CE || CIE); 579 580 const LocationContext *LCtx = Pred->getLocationContext(); 581 ProgramStateRef State = Pred->getState(); 582 583 SVal Target = UnknownVal(); 584 585 if (CE) { 586 if (Optional<SVal> ElidedTarget = 587 getObjectUnderConstruction(State, CE, LCtx)) { 588 // We've previously modeled an elidable constructor by pretending that it 589 // in fact constructs into the correct target. This constructor can 590 // therefore be skipped. 591 Target = *ElidedTarget; 592 StmtNodeBuilder Bldr(Pred, destNodes, *currBldrCtx); 593 State = finishObjectConstruction(State, CE, LCtx); 594 if (auto L = Target.getAs<Loc>()) 595 State = State->BindExpr(CE, LCtx, State->getSVal(*L, CE->getType())); 596 Bldr.generateNode(CE, Pred, State); 597 return; 598 } 599 } 600 601 EvalCallOptions CallOpts; 602 auto C = getCurrentCFGElement().getAs<CFGConstructor>(); 603 assert(C || getCurrentCFGElement().getAs<CFGStmt>()); 604 const ConstructionContext *CC = C ? C->getConstructionContext() : nullptr; 605 606 const CXXConstructExpr::ConstructionKind CK = 607 CE ? CE->getConstructionKind() : CIE->getConstructionKind(); 608 switch (CK) { 609 case CXXConstructExpr::CK_Complete: { 610 // Inherited constructors are always base class constructors. 611 assert(CE && !CIE && "A complete constructor is inherited?!"); 612 613 // If the ctor is part of an ArrayInitLoopExpr, we want to handle it 614 // differently. 615 auto *AILE = CC ? CC->getArrayInitLoop() : nullptr; 616 617 unsigned Idx = 0; 618 if (CE->getType()->isArrayType() || AILE) { 619 620 auto isZeroSizeArray = [&] { 621 uint64_t Size = 1; 622 623 if (const auto *CAT = dyn_cast<ConstantArrayType>(CE->getType())) 624 Size = getContext().getConstantArrayElementCount(CAT); 625 else if (AILE) 626 Size = getContext().getArrayInitLoopExprElementCount(AILE); 627 628 return Size == 0; 629 }; 630 631 // No element construction will happen in a 0 size array. 632 if (isZeroSizeArray()) { 633 StmtNodeBuilder Bldr(Pred, destNodes, *currBldrCtx); 634 static SimpleProgramPointTag T{"ExprEngine", 635 "Skipping 0 size array construction"}; 636 Bldr.generateNode(CE, Pred, State, &T); 637 return; 638 } 639 640 Idx = getIndexOfElementToConstruct(State, CE, LCtx).value_or(0u); 641 State = setIndexOfElementToConstruct(State, CE, LCtx, Idx + 1); 642 } 643 644 if (AILE) { 645 // Only set this once even though we loop through it multiple times. 646 if (!getPendingInitLoop(State, CE, LCtx)) 647 State = setPendingInitLoop( 648 State, CE, LCtx, 649 getContext().getArrayInitLoopExprElementCount(AILE)); 650 651 State = bindRequiredArrayElementToEnvironment( 652 State, AILE, LCtx, svalBuilder.makeArrayIndex(Idx)); 653 } 654 655 // The target region is found from construction context. 656 std::tie(State, Target) = handleConstructionContext( 657 CE, State, currBldrCtx, LCtx, CC, CallOpts, Idx); 658 break; 659 } 660 case CXXConstructExpr::CK_VirtualBase: { 661 // Make sure we are not calling virtual base class initializers twice. 662 // Only the most-derived object should initialize virtual base classes. 663 const auto *OuterCtor = dyn_cast_or_null<CXXConstructExpr>( 664 LCtx->getStackFrame()->getCallSite()); 665 assert( 666 (!OuterCtor || 667 OuterCtor->getConstructionKind() == CXXConstructExpr::CK_Complete || 668 OuterCtor->getConstructionKind() == CXXConstructExpr::CK_Delegating) && 669 ("This virtual base should have already been initialized by " 670 "the most derived class!")); 671 (void)OuterCtor; 672 [[fallthrough]]; 673 } 674 case CXXConstructExpr::CK_NonVirtualBase: 675 // In C++17, classes with non-virtual bases may be aggregates, so they would 676 // be initialized as aggregates without a constructor call, so we may have 677 // a base class constructed directly into an initializer list without 678 // having the derived-class constructor call on the previous stack frame. 679 // Initializer lists may be nested into more initializer lists that 680 // correspond to surrounding aggregate initializations. 681 // FIXME: For now this code essentially bails out. We need to find the 682 // correct target region and set it. 683 // FIXME: Instead of relying on the ParentMap, we should have the 684 // trigger-statement (InitListExpr in this case) passed down from CFG or 685 // otherwise always available during construction. 686 if (isa_and_nonnull<InitListExpr>(LCtx->getParentMap().getParent(E))) { 687 MemRegionManager &MRMgr = getSValBuilder().getRegionManager(); 688 Target = loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(E, LCtx)); 689 CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true; 690 break; 691 } 692 [[fallthrough]]; 693 case CXXConstructExpr::CK_Delegating: { 694 const CXXMethodDecl *CurCtor = cast<CXXMethodDecl>(LCtx->getDecl()); 695 Loc ThisPtr = getSValBuilder().getCXXThis(CurCtor, 696 LCtx->getStackFrame()); 697 SVal ThisVal = State->getSVal(ThisPtr); 698 699 if (CK == CXXConstructExpr::CK_Delegating) { 700 Target = ThisVal; 701 } else { 702 // Cast to the base type. 703 bool IsVirtual = (CK == CXXConstructExpr::CK_VirtualBase); 704 SVal BaseVal = 705 getStoreManager().evalDerivedToBase(ThisVal, E->getType(), IsVirtual); 706 Target = BaseVal; 707 } 708 break; 709 } 710 } 711 712 if (State != Pred->getState()) { 713 static SimpleProgramPointTag T("ExprEngine", 714 "Prepare for object construction"); 715 ExplodedNodeSet DstPrepare; 716 StmtNodeBuilder BldrPrepare(Pred, DstPrepare, *currBldrCtx); 717 BldrPrepare.generateNode(E, Pred, State, &T, ProgramPoint::PreStmtKind); 718 assert(DstPrepare.size() <= 1); 719 if (DstPrepare.size() == 0) 720 return; 721 Pred = *BldrPrepare.begin(); 722 } 723 724 const MemRegion *TargetRegion = Target.getAsRegion(); 725 CallEventManager &CEMgr = getStateManager().getCallEventManager(); 726 CallEventRef<> Call = 727 CIE ? (CallEventRef<>)CEMgr.getCXXInheritedConstructorCall( 728 CIE, TargetRegion, State, LCtx) 729 : (CallEventRef<>)CEMgr.getCXXConstructorCall( 730 CE, TargetRegion, State, LCtx); 731 732 ExplodedNodeSet DstPreVisit; 733 getCheckerManager().runCheckersForPreStmt(DstPreVisit, Pred, E, *this); 734 735 ExplodedNodeSet PreInitialized; 736 if (CE) { 737 // FIXME: Is it possible and/or useful to do this before PreStmt? 738 StmtNodeBuilder Bldr(DstPreVisit, PreInitialized, *currBldrCtx); 739 for (ExplodedNodeSet::iterator I = DstPreVisit.begin(), 740 E = DstPreVisit.end(); 741 I != E; ++I) { 742 ProgramStateRef State = (*I)->getState(); 743 if (CE->requiresZeroInitialization()) { 744 // FIXME: Once we properly handle constructors in new-expressions, we'll 745 // need to invalidate the region before setting a default value, to make 746 // sure there aren't any lingering bindings around. This probably needs 747 // to happen regardless of whether or not the object is zero-initialized 748 // to handle random fields of a placement-initialized object picking up 749 // old bindings. We might only want to do it when we need to, though. 750 // FIXME: This isn't actually correct for arrays -- we need to zero- 751 // initialize the entire array, not just the first element -- but our 752 // handling of arrays everywhere else is weak as well, so this shouldn't 753 // actually make things worse. Placement new makes this tricky as well, 754 // since it's then possible to be initializing one part of a multi- 755 // dimensional array. 756 State = State->bindDefaultZero(Target, LCtx); 757 } 758 759 Bldr.generateNode(CE, *I, State, /*tag=*/nullptr, 760 ProgramPoint::PreStmtKind); 761 } 762 } else { 763 PreInitialized = DstPreVisit; 764 } 765 766 ExplodedNodeSet DstPreCall; 767 getCheckerManager().runCheckersForPreCall(DstPreCall, PreInitialized, 768 *Call, *this); 769 770 ExplodedNodeSet DstEvaluated; 771 772 if (CE && CE->getConstructor()->isTrivial() && 773 CE->getConstructor()->isCopyOrMoveConstructor() && 774 !CallOpts.IsArrayCtorOrDtor) { 775 StmtNodeBuilder Bldr(DstPreCall, DstEvaluated, *currBldrCtx); 776 // FIXME: Handle other kinds of trivial constructors as well. 777 for (ExplodedNodeSet::iterator I = DstPreCall.begin(), E = DstPreCall.end(); 778 I != E; ++I) 779 performTrivialCopy(Bldr, *I, *Call); 780 781 } else { 782 for (ExplodedNodeSet::iterator I = DstPreCall.begin(), E = DstPreCall.end(); 783 I != E; ++I) 784 getCheckerManager().runCheckersForEvalCall(DstEvaluated, *I, *Call, *this, 785 CallOpts); 786 } 787 788 // If the CFG was constructed without elements for temporary destructors 789 // and the just-called constructor created a temporary object then 790 // stop exploration if the temporary object has a noreturn constructor. 791 // This can lose coverage because the destructor, if it were present 792 // in the CFG, would be called at the end of the full expression or 793 // later (for life-time extended temporaries) -- but avoids infeasible 794 // paths when no-return temporary destructors are used for assertions. 795 ExplodedNodeSet DstEvaluatedPostProcessed; 796 StmtNodeBuilder Bldr(DstEvaluated, DstEvaluatedPostProcessed, *currBldrCtx); 797 const AnalysisDeclContext *ADC = LCtx->getAnalysisDeclContext(); 798 if (!ADC->getCFGBuildOptions().AddTemporaryDtors) { 799 if (llvm::isa_and_nonnull<CXXTempObjectRegion>(TargetRegion) && 800 cast<CXXConstructorDecl>(Call->getDecl()) 801 ->getParent() 802 ->isAnyDestructorNoReturn()) { 803 804 // If we've inlined the constructor, then DstEvaluated would be empty. 805 // In this case we still want a sink, which could be implemented 806 // in processCallExit. But we don't have that implemented at the moment, 807 // so if you hit this assertion, see if you can avoid inlining 808 // the respective constructor when analyzer-config cfg-temporary-dtors 809 // is set to false. 810 // Otherwise there's nothing wrong with inlining such constructor. 811 assert(!DstEvaluated.empty() && 812 "We should not have inlined this constructor!"); 813 814 for (ExplodedNode *N : DstEvaluated) { 815 Bldr.generateSink(E, N, N->getState()); 816 } 817 818 // There is no need to run the PostCall and PostStmt checker 819 // callbacks because we just generated sinks on all nodes in th 820 // frontier. 821 return; 822 } 823 } 824 825 ExplodedNodeSet DstPostArgumentCleanup; 826 for (ExplodedNode *I : DstEvaluatedPostProcessed) 827 finishArgumentConstruction(DstPostArgumentCleanup, I, *Call); 828 829 // If there were other constructors called for object-type arguments 830 // of this constructor, clean them up. 831 ExplodedNodeSet DstPostCall; 832 getCheckerManager().runCheckersForPostCall(DstPostCall, 833 DstPostArgumentCleanup, 834 *Call, *this); 835 getCheckerManager().runCheckersForPostStmt(destNodes, DstPostCall, E, *this); 836 } 837 838 void ExprEngine::VisitCXXConstructExpr(const CXXConstructExpr *CE, 839 ExplodedNode *Pred, 840 ExplodedNodeSet &Dst) { 841 handleConstructor(CE, Pred, Dst); 842 } 843 844 void ExprEngine::VisitCXXInheritedCtorInitExpr( 845 const CXXInheritedCtorInitExpr *CE, ExplodedNode *Pred, 846 ExplodedNodeSet &Dst) { 847 handleConstructor(CE, Pred, Dst); 848 } 849 850 void ExprEngine::VisitCXXDestructor(QualType ObjectType, 851 const MemRegion *Dest, 852 const Stmt *S, 853 bool IsBaseDtor, 854 ExplodedNode *Pred, 855 ExplodedNodeSet &Dst, 856 EvalCallOptions &CallOpts) { 857 assert(S && "A destructor without a trigger!"); 858 const LocationContext *LCtx = Pred->getLocationContext(); 859 ProgramStateRef State = Pred->getState(); 860 861 const CXXRecordDecl *RecordDecl = ObjectType->getAsCXXRecordDecl(); 862 assert(RecordDecl && "Only CXXRecordDecls should have destructors"); 863 const CXXDestructorDecl *DtorDecl = RecordDecl->getDestructor(); 864 // FIXME: There should always be a Decl, otherwise the destructor call 865 // shouldn't have been added to the CFG in the first place. 866 if (!DtorDecl) { 867 // Skip the invalid destructor. We cannot simply return because 868 // it would interrupt the analysis instead. 869 static SimpleProgramPointTag T("ExprEngine", "SkipInvalidDestructor"); 870 // FIXME: PostImplicitCall with a null decl may crash elsewhere anyway. 871 PostImplicitCall PP(/*Decl=*/nullptr, S->getEndLoc(), LCtx, &T); 872 NodeBuilder Bldr(Pred, Dst, *currBldrCtx); 873 Bldr.generateNode(PP, Pred->getState(), Pred); 874 return; 875 } 876 877 if (!Dest) { 878 // We're trying to destroy something that is not a region. This may happen 879 // for a variety of reasons (unknown target region, concrete integer instead 880 // of target region, etc.). The current code makes an attempt to recover. 881 // FIXME: We probably don't really need to recover when we're dealing 882 // with concrete integers specifically. 883 CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true; 884 if (const Expr *E = dyn_cast_or_null<Expr>(S)) { 885 Dest = MRMgr.getCXXTempObjectRegion(E, Pred->getLocationContext()); 886 } else { 887 static SimpleProgramPointTag T("ExprEngine", "SkipInvalidDestructor"); 888 NodeBuilder Bldr(Pred, Dst, *currBldrCtx); 889 Bldr.generateSink(Pred->getLocation().withTag(&T), 890 Pred->getState(), Pred); 891 return; 892 } 893 } 894 895 CallEventManager &CEMgr = getStateManager().getCallEventManager(); 896 CallEventRef<CXXDestructorCall> Call = 897 CEMgr.getCXXDestructorCall(DtorDecl, S, Dest, IsBaseDtor, State, LCtx); 898 899 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 900 Call->getSourceRange().getBegin(), 901 "Error evaluating destructor"); 902 903 ExplodedNodeSet DstPreCall; 904 getCheckerManager().runCheckersForPreCall(DstPreCall, Pred, 905 *Call, *this); 906 907 ExplodedNodeSet DstInvalidated; 908 StmtNodeBuilder Bldr(DstPreCall, DstInvalidated, *currBldrCtx); 909 for (ExplodedNodeSet::iterator I = DstPreCall.begin(), E = DstPreCall.end(); 910 I != E; ++I) 911 defaultEvalCall(Bldr, *I, *Call, CallOpts); 912 913 getCheckerManager().runCheckersForPostCall(Dst, DstInvalidated, 914 *Call, *this); 915 } 916 917 void ExprEngine::VisitCXXNewAllocatorCall(const CXXNewExpr *CNE, 918 ExplodedNode *Pred, 919 ExplodedNodeSet &Dst) { 920 ProgramStateRef State = Pred->getState(); 921 const LocationContext *LCtx = Pred->getLocationContext(); 922 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 923 CNE->getBeginLoc(), 924 "Error evaluating New Allocator Call"); 925 CallEventManager &CEMgr = getStateManager().getCallEventManager(); 926 CallEventRef<CXXAllocatorCall> Call = 927 CEMgr.getCXXAllocatorCall(CNE, State, LCtx); 928 929 ExplodedNodeSet DstPreCall; 930 getCheckerManager().runCheckersForPreCall(DstPreCall, Pred, 931 *Call, *this); 932 933 ExplodedNodeSet DstPostCall; 934 StmtNodeBuilder CallBldr(DstPreCall, DstPostCall, *currBldrCtx); 935 for (ExplodedNode *I : DstPreCall) { 936 // FIXME: Provide evalCall for checkers? 937 defaultEvalCall(CallBldr, I, *Call); 938 } 939 // If the call is inlined, DstPostCall will be empty and we bail out now. 940 941 // Store return value of operator new() for future use, until the actual 942 // CXXNewExpr gets processed. 943 ExplodedNodeSet DstPostValue; 944 StmtNodeBuilder ValueBldr(DstPostCall, DstPostValue, *currBldrCtx); 945 for (ExplodedNode *I : DstPostCall) { 946 // FIXME: Because CNE serves as the "call site" for the allocator (due to 947 // lack of a better expression in the AST), the conjured return value symbol 948 // is going to be of the same type (C++ object pointer type). Technically 949 // this is not correct because the operator new's prototype always says that 950 // it returns a 'void *'. So we should change the type of the symbol, 951 // and then evaluate the cast over the symbolic pointer from 'void *' to 952 // the object pointer type. But without changing the symbol's type it 953 // is breaking too much to evaluate the no-op symbolic cast over it, so we 954 // skip it for now. 955 ProgramStateRef State = I->getState(); 956 SVal RetVal = State->getSVal(CNE, LCtx); 957 // [basic.stc.dynamic.allocation] (on the return value of an allocation 958 // function): 959 // "The order, contiguity, and initial value of storage allocated by 960 // successive calls to an allocation function are unspecified." 961 State = State->bindDefaultInitial(RetVal, UndefinedVal{}, LCtx); 962 963 // If this allocation function is not declared as non-throwing, failures 964 // /must/ be signalled by exceptions, and thus the return value will never 965 // be NULL. -fno-exceptions does not influence this semantics. 966 // FIXME: GCC has a -fcheck-new option, which forces it to consider the case 967 // where new can return NULL. If we end up supporting that option, we can 968 // consider adding a check for it here. 969 // C++11 [basic.stc.dynamic.allocation]p3. 970 if (const FunctionDecl *FD = CNE->getOperatorNew()) { 971 QualType Ty = FD->getType(); 972 if (const auto *ProtoType = Ty->getAs<FunctionProtoType>()) 973 if (!ProtoType->isNothrow()) 974 State = State->assume(RetVal.castAs<DefinedOrUnknownSVal>(), true); 975 } 976 977 ValueBldr.generateNode( 978 CNE, I, addObjectUnderConstruction(State, CNE, LCtx, RetVal)); 979 } 980 981 ExplodedNodeSet DstPostPostCallCallback; 982 getCheckerManager().runCheckersForPostCall(DstPostPostCallCallback, 983 DstPostValue, *Call, *this); 984 for (ExplodedNode *I : DstPostPostCallCallback) { 985 getCheckerManager().runCheckersForNewAllocator(*Call, Dst, I, *this); 986 } 987 } 988 989 void ExprEngine::VisitCXXNewExpr(const CXXNewExpr *CNE, ExplodedNode *Pred, 990 ExplodedNodeSet &Dst) { 991 // FIXME: Much of this should eventually migrate to CXXAllocatorCall. 992 // Also, we need to decide how allocators actually work -- they're not 993 // really part of the CXXNewExpr because they happen BEFORE the 994 // CXXConstructExpr subexpression. See PR12014 for some discussion. 995 996 unsigned blockCount = currBldrCtx->blockCount(); 997 const LocationContext *LCtx = Pred->getLocationContext(); 998 SVal symVal = UnknownVal(); 999 FunctionDecl *FD = CNE->getOperatorNew(); 1000 1001 bool IsStandardGlobalOpNewFunction = 1002 FD->isReplaceableGlobalAllocationFunction(); 1003 1004 ProgramStateRef State = Pred->getState(); 1005 1006 // Retrieve the stored operator new() return value. 1007 if (AMgr.getAnalyzerOptions().MayInlineCXXAllocator) { 1008 symVal = *getObjectUnderConstruction(State, CNE, LCtx); 1009 State = finishObjectConstruction(State, CNE, LCtx); 1010 } 1011 1012 // We assume all standard global 'operator new' functions allocate memory in 1013 // heap. We realize this is an approximation that might not correctly model 1014 // a custom global allocator. 1015 if (symVal.isUnknown()) { 1016 if (IsStandardGlobalOpNewFunction) 1017 symVal = svalBuilder.getConjuredHeapSymbolVal(CNE, LCtx, blockCount); 1018 else 1019 symVal = svalBuilder.conjureSymbolVal(nullptr, CNE, LCtx, CNE->getType(), 1020 blockCount); 1021 } 1022 1023 CallEventManager &CEMgr = getStateManager().getCallEventManager(); 1024 CallEventRef<CXXAllocatorCall> Call = 1025 CEMgr.getCXXAllocatorCall(CNE, State, LCtx); 1026 1027 if (!AMgr.getAnalyzerOptions().MayInlineCXXAllocator) { 1028 // Invalidate placement args. 1029 // FIXME: Once we figure out how we want allocators to work, 1030 // we should be using the usual pre-/(default-)eval-/post-call checkers 1031 // here. 1032 State = Call->invalidateRegions(blockCount); 1033 if (!State) 1034 return; 1035 1036 // If this allocation function is not declared as non-throwing, failures 1037 // /must/ be signalled by exceptions, and thus the return value will never 1038 // be NULL. -fno-exceptions does not influence this semantics. 1039 // FIXME: GCC has a -fcheck-new option, which forces it to consider the case 1040 // where new can return NULL. If we end up supporting that option, we can 1041 // consider adding a check for it here. 1042 // C++11 [basic.stc.dynamic.allocation]p3. 1043 if (const auto *ProtoType = FD->getType()->getAs<FunctionProtoType>()) 1044 if (!ProtoType->isNothrow()) 1045 if (auto dSymVal = symVal.getAs<DefinedOrUnknownSVal>()) 1046 State = State->assume(*dSymVal, true); 1047 } 1048 1049 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 1050 1051 SVal Result = symVal; 1052 1053 if (CNE->isArray()) { 1054 1055 if (const auto *NewReg = cast_or_null<SubRegion>(symVal.getAsRegion())) { 1056 // If each element is initialized by their default constructor, the field 1057 // values are properly placed inside the required region, however if an 1058 // initializer list is used, this doesn't happen automatically. 1059 auto *Init = CNE->getInitializer(); 1060 bool isInitList = isa_and_nonnull<InitListExpr>(Init); 1061 1062 QualType ObjTy = 1063 isInitList ? Init->getType() : CNE->getType()->getPointeeType(); 1064 const ElementRegion *EleReg = 1065 MRMgr.getElementRegion(ObjTy, svalBuilder.makeArrayIndex(0), NewReg, 1066 svalBuilder.getContext()); 1067 Result = loc::MemRegionVal(EleReg); 1068 1069 // If the array is list initialized, we bind the initializer list to the 1070 // memory region here, otherwise we would lose it. 1071 if (isInitList) { 1072 Bldr.takeNodes(Pred); 1073 Pred = Bldr.generateNode(CNE, Pred, State); 1074 1075 SVal V = State->getSVal(Init, LCtx); 1076 ExplodedNodeSet evaluated; 1077 evalBind(evaluated, CNE, Pred, Result, V, true); 1078 1079 Bldr.takeNodes(Pred); 1080 Bldr.addNodes(evaluated); 1081 1082 Pred = *evaluated.begin(); 1083 State = Pred->getState(); 1084 } 1085 } 1086 1087 State = State->BindExpr(CNE, Pred->getLocationContext(), Result); 1088 Bldr.generateNode(CNE, Pred, State); 1089 return; 1090 } 1091 1092 // FIXME: Once we have proper support for CXXConstructExprs inside 1093 // CXXNewExpr, we need to make sure that the constructed object is not 1094 // immediately invalidated here. (The placement call should happen before 1095 // the constructor call anyway.) 1096 if (FD->isReservedGlobalPlacementOperator()) { 1097 // Non-array placement new should always return the placement location. 1098 SVal PlacementLoc = State->getSVal(CNE->getPlacementArg(0), LCtx); 1099 Result = svalBuilder.evalCast(PlacementLoc, CNE->getType(), 1100 CNE->getPlacementArg(0)->getType()); 1101 } 1102 1103 // Bind the address of the object, then check to see if we cached out. 1104 State = State->BindExpr(CNE, LCtx, Result); 1105 ExplodedNode *NewN = Bldr.generateNode(CNE, Pred, State); 1106 if (!NewN) 1107 return; 1108 1109 // If the type is not a record, we won't have a CXXConstructExpr as an 1110 // initializer. Copy the value over. 1111 if (const Expr *Init = CNE->getInitializer()) { 1112 if (!isa<CXXConstructExpr>(Init)) { 1113 assert(Bldr.getResults().size() == 1); 1114 Bldr.takeNodes(NewN); 1115 evalBind(Dst, CNE, NewN, Result, State->getSVal(Init, LCtx), 1116 /*FirstInit=*/IsStandardGlobalOpNewFunction); 1117 } 1118 } 1119 } 1120 1121 void ExprEngine::VisitCXXDeleteExpr(const CXXDeleteExpr *CDE, 1122 ExplodedNode *Pred, ExplodedNodeSet &Dst) { 1123 1124 CallEventManager &CEMgr = getStateManager().getCallEventManager(); 1125 CallEventRef<CXXDeallocatorCall> Call = CEMgr.getCXXDeallocatorCall( 1126 CDE, Pred->getState(), Pred->getLocationContext()); 1127 1128 ExplodedNodeSet DstPreCall; 1129 getCheckerManager().runCheckersForPreCall(DstPreCall, Pred, *Call, *this); 1130 ExplodedNodeSet DstPostCall; 1131 1132 if (AMgr.getAnalyzerOptions().MayInlineCXXAllocator) { 1133 StmtNodeBuilder Bldr(DstPreCall, DstPostCall, *currBldrCtx); 1134 for (ExplodedNode *I : DstPreCall) { 1135 defaultEvalCall(Bldr, I, *Call); 1136 } 1137 } else { 1138 DstPostCall = DstPreCall; 1139 } 1140 getCheckerManager().runCheckersForPostCall(Dst, DstPostCall, *Call, *this); 1141 } 1142 1143 void ExprEngine::VisitCXXCatchStmt(const CXXCatchStmt *CS, ExplodedNode *Pred, 1144 ExplodedNodeSet &Dst) { 1145 const VarDecl *VD = CS->getExceptionDecl(); 1146 if (!VD) { 1147 Dst.Add(Pred); 1148 return; 1149 } 1150 1151 const LocationContext *LCtx = Pred->getLocationContext(); 1152 SVal V = svalBuilder.conjureSymbolVal(CS, LCtx, VD->getType(), 1153 currBldrCtx->blockCount()); 1154 ProgramStateRef state = Pred->getState(); 1155 state = state->bindLoc(state->getLValue(VD, LCtx), V, LCtx); 1156 1157 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 1158 Bldr.generateNode(CS, Pred, state); 1159 } 1160 1161 void ExprEngine::VisitCXXThisExpr(const CXXThisExpr *TE, ExplodedNode *Pred, 1162 ExplodedNodeSet &Dst) { 1163 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 1164 1165 // Get the this object region from StoreManager. 1166 const LocationContext *LCtx = Pred->getLocationContext(); 1167 const MemRegion *R = 1168 svalBuilder.getRegionManager().getCXXThisRegion( 1169 getContext().getCanonicalType(TE->getType()), 1170 LCtx); 1171 1172 ProgramStateRef state = Pred->getState(); 1173 SVal V = state->getSVal(loc::MemRegionVal(R)); 1174 Bldr.generateNode(TE, Pred, state->BindExpr(TE, LCtx, V)); 1175 } 1176 1177 void ExprEngine::VisitLambdaExpr(const LambdaExpr *LE, ExplodedNode *Pred, 1178 ExplodedNodeSet &Dst) { 1179 const LocationContext *LocCtxt = Pred->getLocationContext(); 1180 1181 // Get the region of the lambda itself. 1182 const MemRegion *R = svalBuilder.getRegionManager().getCXXTempObjectRegion( 1183 LE, LocCtxt); 1184 SVal V = loc::MemRegionVal(R); 1185 1186 ProgramStateRef State = Pred->getState(); 1187 1188 // If we created a new MemRegion for the lambda, we should explicitly bind 1189 // the captures. 1190 unsigned Idx = 0; 1191 CXXRecordDecl::field_iterator CurField = LE->getLambdaClass()->field_begin(); 1192 for (LambdaExpr::const_capture_init_iterator i = LE->capture_init_begin(), 1193 e = LE->capture_init_end(); 1194 i != e; ++i, ++CurField, ++Idx) { 1195 FieldDecl *FieldForCapture = *CurField; 1196 SVal FieldLoc = State->getLValue(FieldForCapture, V); 1197 1198 SVal InitVal; 1199 if (!FieldForCapture->hasCapturedVLAType()) { 1200 const Expr *InitExpr = *i; 1201 1202 assert(InitExpr && "Capture missing initialization expression"); 1203 1204 // Capturing a 0 length array is a no-op, so we ignore it to get a more 1205 // accurate analysis. If it's not ignored, it would set the default 1206 // binding of the lambda to 'Unknown', which can lead to falsely detecting 1207 // 'Uninitialized' values as 'Unknown' and not reporting a warning. 1208 const auto FTy = FieldForCapture->getType(); 1209 if (FTy->isConstantArrayType() && 1210 getContext().getConstantArrayElementCount( 1211 getContext().getAsConstantArrayType(FTy)) == 0) 1212 continue; 1213 1214 // With C++17 copy elision the InitExpr can be anything, so instead of 1215 // pattern matching all cases, we simple check if the current field is 1216 // under construction or not, regardless what it's InitExpr is. 1217 if (const auto OUC = 1218 getObjectUnderConstruction(State, {LE, Idx}, LocCtxt)) { 1219 InitVal = State->getSVal(OUC->getAsRegion()); 1220 1221 State = finishObjectConstruction(State, {LE, Idx}, LocCtxt); 1222 } else 1223 InitVal = State->getSVal(InitExpr, LocCtxt); 1224 1225 } else { 1226 1227 assert(!getObjectUnderConstruction(State, {LE, Idx}, LocCtxt) && 1228 "VLA capture by value is a compile time error!"); 1229 1230 // The field stores the length of a captured variable-length array. 1231 // These captures don't have initialization expressions; instead we 1232 // get the length from the VLAType size expression. 1233 Expr *SizeExpr = FieldForCapture->getCapturedVLAType()->getSizeExpr(); 1234 InitVal = State->getSVal(SizeExpr, LocCtxt); 1235 } 1236 1237 State = State->bindLoc(FieldLoc, InitVal, LocCtxt); 1238 } 1239 1240 // Decay the Loc into an RValue, because there might be a 1241 // MaterializeTemporaryExpr node above this one which expects the bound value 1242 // to be an RValue. 1243 SVal LambdaRVal = State->getSVal(R); 1244 1245 ExplodedNodeSet Tmp; 1246 StmtNodeBuilder Bldr(Pred, Tmp, *currBldrCtx); 1247 // FIXME: is this the right program point kind? 1248 Bldr.generateNode(LE, Pred, 1249 State->BindExpr(LE, LocCtxt, LambdaRVal), 1250 nullptr, ProgramPoint::PostLValueKind); 1251 1252 // FIXME: Move all post/pre visits to ::Visit(). 1253 getCheckerManager().runCheckersForPostStmt(Dst, Tmp, LE, *this); 1254 } 1255