1 //===- ExprEngineCXX.cpp - ExprEngine support for C++ -----------*- C++ -*-===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file defines the C++ expression evaluation engine. 10 // 11 //===----------------------------------------------------------------------===// 12 13 #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h" 14 #include "clang/Analysis/ConstructionContext.h" 15 #include "clang/AST/DeclCXX.h" 16 #include "clang/AST/StmtCXX.h" 17 #include "clang/AST/ParentMap.h" 18 #include "clang/Basic/PrettyStackTrace.h" 19 #include "clang/StaticAnalyzer/Core/CheckerManager.h" 20 #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h" 21 #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h" 22 23 using namespace clang; 24 using namespace ento; 25 26 void ExprEngine::CreateCXXTemporaryObject(const MaterializeTemporaryExpr *ME, 27 ExplodedNode *Pred, 28 ExplodedNodeSet &Dst) { 29 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 30 const Expr *tempExpr = ME->getSubExpr()->IgnoreParens(); 31 ProgramStateRef state = Pred->getState(); 32 const LocationContext *LCtx = Pred->getLocationContext(); 33 34 state = createTemporaryRegionIfNeeded(state, LCtx, tempExpr, ME); 35 Bldr.generateNode(ME, Pred, state); 36 } 37 38 // FIXME: This is the sort of code that should eventually live in a Core 39 // checker rather than as a special case in ExprEngine. 40 void ExprEngine::performTrivialCopy(NodeBuilder &Bldr, ExplodedNode *Pred, 41 const CallEvent &Call) { 42 SVal ThisVal; 43 bool AlwaysReturnsLValue; 44 const CXXRecordDecl *ThisRD = nullptr; 45 if (const CXXConstructorCall *Ctor = dyn_cast<CXXConstructorCall>(&Call)) { 46 assert(Ctor->getDecl()->isTrivial()); 47 assert(Ctor->getDecl()->isCopyOrMoveConstructor()); 48 ThisVal = Ctor->getCXXThisVal(); 49 ThisRD = Ctor->getDecl()->getParent(); 50 AlwaysReturnsLValue = false; 51 } else { 52 assert(cast<CXXMethodDecl>(Call.getDecl())->isTrivial()); 53 assert(cast<CXXMethodDecl>(Call.getDecl())->getOverloadedOperator() == 54 OO_Equal); 55 ThisVal = cast<CXXInstanceCall>(Call).getCXXThisVal(); 56 ThisRD = cast<CXXMethodDecl>(Call.getDecl())->getParent(); 57 AlwaysReturnsLValue = true; 58 } 59 60 assert(ThisRD); 61 if (ThisRD->isEmpty()) { 62 // Do nothing for empty classes. Otherwise it'd retrieve an UnknownVal 63 // and bind it and RegionStore would think that the actual value 64 // in this region at this offset is unknown. 65 return; 66 } 67 68 const LocationContext *LCtx = Pred->getLocationContext(); 69 70 ExplodedNodeSet Dst; 71 Bldr.takeNodes(Pred); 72 73 SVal V = Call.getArgSVal(0); 74 75 // If the value being copied is not unknown, load from its location to get 76 // an aggregate rvalue. 77 if (Optional<Loc> L = V.getAs<Loc>()) 78 V = Pred->getState()->getSVal(*L); 79 else 80 assert(V.isUnknownOrUndef()); 81 82 const Expr *CallExpr = Call.getOriginExpr(); 83 evalBind(Dst, CallExpr, Pred, ThisVal, V, true); 84 85 PostStmt PS(CallExpr, LCtx); 86 for (ExplodedNodeSet::iterator I = Dst.begin(), E = Dst.end(); 87 I != E; ++I) { 88 ProgramStateRef State = (*I)->getState(); 89 if (AlwaysReturnsLValue) 90 State = State->BindExpr(CallExpr, LCtx, ThisVal); 91 else 92 State = bindReturnValue(Call, LCtx, State); 93 Bldr.generateNode(PS, State, *I); 94 } 95 } 96 97 SVal ExprEngine::makeElementRegion(ProgramStateRef State, SVal LValue, 98 QualType &Ty, bool &IsArray, unsigned Idx) { 99 SValBuilder &SVB = State->getStateManager().getSValBuilder(); 100 ASTContext &Ctx = SVB.getContext(); 101 102 if (const ArrayType *AT = Ctx.getAsArrayType(Ty)) { 103 while (AT) { 104 Ty = AT->getElementType(); 105 AT = dyn_cast<ArrayType>(AT->getElementType()); 106 } 107 LValue = State->getLValue(Ty, SVB.makeArrayIndex(Idx), LValue); 108 IsArray = true; 109 } 110 111 return LValue; 112 } 113 114 SVal ExprEngine::computeObjectUnderConstruction( 115 const Expr *E, ProgramStateRef State, const LocationContext *LCtx, 116 const ConstructionContext *CC, EvalCallOptions &CallOpts, unsigned Idx) { 117 SValBuilder &SVB = getSValBuilder(); 118 MemRegionManager &MRMgr = SVB.getRegionManager(); 119 ASTContext &ACtx = SVB.getContext(); 120 121 // Compute the target region by exploring the construction context. 122 if (CC) { 123 switch (CC->getKind()) { 124 case ConstructionContext::CXX17ElidedCopyVariableKind: 125 case ConstructionContext::SimpleVariableKind: { 126 const auto *DSCC = cast<VariableConstructionContext>(CC); 127 const auto *DS = DSCC->getDeclStmt(); 128 const auto *Var = cast<VarDecl>(DS->getSingleDecl()); 129 QualType Ty = Var->getType(); 130 return makeElementRegion(State, State->getLValue(Var, LCtx), Ty, 131 CallOpts.IsArrayCtorOrDtor, Idx); 132 } 133 case ConstructionContext::CXX17ElidedCopyConstructorInitializerKind: 134 case ConstructionContext::SimpleConstructorInitializerKind: { 135 const auto *ICC = cast<ConstructorInitializerConstructionContext>(CC); 136 const auto *Init = ICC->getCXXCtorInitializer(); 137 const CXXMethodDecl *CurCtor = cast<CXXMethodDecl>(LCtx->getDecl()); 138 Loc ThisPtr = SVB.getCXXThis(CurCtor, LCtx->getStackFrame()); 139 SVal ThisVal = State->getSVal(ThisPtr); 140 if (Init->isBaseInitializer()) { 141 const auto *ThisReg = cast<SubRegion>(ThisVal.getAsRegion()); 142 const CXXRecordDecl *BaseClass = 143 Init->getBaseClass()->getAsCXXRecordDecl(); 144 const auto *BaseReg = 145 MRMgr.getCXXBaseObjectRegion(BaseClass, ThisReg, 146 Init->isBaseVirtual()); 147 return SVB.makeLoc(BaseReg); 148 } 149 if (Init->isDelegatingInitializer()) 150 return ThisVal; 151 152 const ValueDecl *Field; 153 SVal FieldVal; 154 if (Init->isIndirectMemberInitializer()) { 155 Field = Init->getIndirectMember(); 156 FieldVal = State->getLValue(Init->getIndirectMember(), ThisVal); 157 } else { 158 Field = Init->getMember(); 159 FieldVal = State->getLValue(Init->getMember(), ThisVal); 160 } 161 162 QualType Ty = Field->getType(); 163 return makeElementRegion(State, FieldVal, Ty, CallOpts.IsArrayCtorOrDtor, 164 Idx); 165 } 166 case ConstructionContext::NewAllocatedObjectKind: { 167 if (AMgr.getAnalyzerOptions().MayInlineCXXAllocator) { 168 const auto *NECC = cast<NewAllocatedObjectConstructionContext>(CC); 169 const auto *NE = NECC->getCXXNewExpr(); 170 SVal V = *getObjectUnderConstruction(State, NE, LCtx); 171 if (const SubRegion *MR = 172 dyn_cast_or_null<SubRegion>(V.getAsRegion())) { 173 if (NE->isArray()) { 174 CallOpts.IsArrayCtorOrDtor = true; 175 176 auto Ty = NE->getType()->getPointeeType(); 177 while (const auto *AT = getContext().getAsArrayType(Ty)) 178 Ty = AT->getElementType(); 179 180 auto R = MRMgr.getElementRegion(Ty, svalBuilder.makeArrayIndex(Idx), 181 MR, SVB.getContext()); 182 183 return loc::MemRegionVal(R); 184 } 185 return V; 186 } 187 // TODO: Detect when the allocator returns a null pointer. 188 // Constructor shall not be called in this case. 189 } 190 break; 191 } 192 case ConstructionContext::SimpleReturnedValueKind: 193 case ConstructionContext::CXX17ElidedCopyReturnedValueKind: { 194 // The temporary is to be managed by the parent stack frame. 195 // So build it in the parent stack frame if we're not in the 196 // top frame of the analysis. 197 const StackFrameContext *SFC = LCtx->getStackFrame(); 198 if (const LocationContext *CallerLCtx = SFC->getParent()) { 199 auto RTC = (*SFC->getCallSiteBlock())[SFC->getIndex()] 200 .getAs<CFGCXXRecordTypedCall>(); 201 if (!RTC) { 202 // We were unable to find the correct construction context for the 203 // call in the parent stack frame. This is equivalent to not being 204 // able to find construction context at all. 205 break; 206 } 207 if (isa<BlockInvocationContext>(CallerLCtx)) { 208 // Unwrap block invocation contexts. They're mostly part of 209 // the current stack frame. 210 CallerLCtx = CallerLCtx->getParent(); 211 assert(!isa<BlockInvocationContext>(CallerLCtx)); 212 } 213 return computeObjectUnderConstruction( 214 cast<Expr>(SFC->getCallSite()), State, CallerLCtx, 215 RTC->getConstructionContext(), CallOpts); 216 } else { 217 // We are on the top frame of the analysis. We do not know where is the 218 // object returned to. Conjure a symbolic region for the return value. 219 // TODO: We probably need a new MemRegion kind to represent the storage 220 // of that SymbolicRegion, so that we cound produce a fancy symbol 221 // instead of an anonymous conjured symbol. 222 // TODO: Do we need to track the region to avoid having it dead 223 // too early? It does die too early, at least in C++17, but because 224 // putting anything into a SymbolicRegion causes an immediate escape, 225 // it doesn't cause any leak false positives. 226 const auto *RCC = cast<ReturnedValueConstructionContext>(CC); 227 // Make sure that this doesn't coincide with any other symbol 228 // conjured for the returned expression. 229 static const int TopLevelSymRegionTag = 0; 230 const Expr *RetE = RCC->getReturnStmt()->getRetValue(); 231 assert(RetE && "Void returns should not have a construction context"); 232 QualType ReturnTy = RetE->getType(); 233 QualType RegionTy = ACtx.getPointerType(ReturnTy); 234 return SVB.conjureSymbolVal(&TopLevelSymRegionTag, RetE, SFC, RegionTy, 235 currBldrCtx->blockCount()); 236 } 237 llvm_unreachable("Unhandled return value construction context!"); 238 } 239 case ConstructionContext::ElidedTemporaryObjectKind: { 240 assert(AMgr.getAnalyzerOptions().ShouldElideConstructors); 241 const auto *TCC = cast<ElidedTemporaryObjectConstructionContext>(CC); 242 243 // Support pre-C++17 copy elision. We'll have the elidable copy 244 // constructor in the AST and in the CFG, but we'll skip it 245 // and construct directly into the final object. This call 246 // also sets the CallOpts flags for us. 247 // If the elided copy/move constructor is not supported, there's still 248 // benefit in trying to model the non-elided constructor. 249 // Stash our state before trying to elide, as it'll get overwritten. 250 ProgramStateRef PreElideState = State; 251 EvalCallOptions PreElideCallOpts = CallOpts; 252 253 SVal V = computeObjectUnderConstruction( 254 TCC->getConstructorAfterElision(), State, LCtx, 255 TCC->getConstructionContextAfterElision(), CallOpts); 256 257 // FIXME: This definition of "copy elision has not failed" is unreliable. 258 // It doesn't indicate that the constructor will actually be inlined 259 // later; this is still up to evalCall() to decide. 260 if (!CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion) 261 return V; 262 263 // Copy elision failed. Revert the changes and proceed as if we have 264 // a simple temporary. 265 CallOpts = PreElideCallOpts; 266 CallOpts.IsElidableCtorThatHasNotBeenElided = true; 267 [[fallthrough]]; 268 } 269 case ConstructionContext::SimpleTemporaryObjectKind: { 270 const auto *TCC = cast<TemporaryObjectConstructionContext>(CC); 271 const MaterializeTemporaryExpr *MTE = TCC->getMaterializedTemporaryExpr(); 272 273 CallOpts.IsTemporaryCtorOrDtor = true; 274 if (MTE) { 275 if (const ValueDecl *VD = MTE->getExtendingDecl()) { 276 assert(MTE->getStorageDuration() != SD_FullExpression); 277 if (!VD->getType()->isReferenceType()) { 278 // We're lifetime-extended by a surrounding aggregate. 279 // Automatic destructors aren't quite working in this case 280 // on the CFG side. We should warn the caller about that. 281 // FIXME: Is there a better way to retrieve this information from 282 // the MaterializeTemporaryExpr? 283 CallOpts.IsTemporaryLifetimeExtendedViaAggregate = true; 284 } 285 } 286 287 if (MTE->getStorageDuration() == SD_Static || 288 MTE->getStorageDuration() == SD_Thread) 289 return loc::MemRegionVal(MRMgr.getCXXStaticTempObjectRegion(E)); 290 } 291 292 return loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(E, LCtx)); 293 } 294 case ConstructionContext::LambdaCaptureKind: { 295 CallOpts.IsTemporaryCtorOrDtor = true; 296 297 const auto *LCC = cast<LambdaCaptureConstructionContext>(CC); 298 299 SVal Base = loc::MemRegionVal( 300 MRMgr.getCXXTempObjectRegion(LCC->getInitializer(), LCtx)); 301 302 const auto *CE = dyn_cast_or_null<CXXConstructExpr>(E); 303 if (getIndexOfElementToConstruct(State, CE, LCtx)) { 304 CallOpts.IsArrayCtorOrDtor = true; 305 Base = State->getLValue(E->getType(), svalBuilder.makeArrayIndex(Idx), 306 Base); 307 } 308 309 return Base; 310 } 311 case ConstructionContext::ArgumentKind: { 312 // Arguments are technically temporaries. 313 CallOpts.IsTemporaryCtorOrDtor = true; 314 315 const auto *ACC = cast<ArgumentConstructionContext>(CC); 316 const Expr *E = ACC->getCallLikeExpr(); 317 unsigned Idx = ACC->getIndex(); 318 319 CallEventManager &CEMgr = getStateManager().getCallEventManager(); 320 auto getArgLoc = [&](CallEventRef<> Caller) -> Optional<SVal> { 321 const LocationContext *FutureSFC = 322 Caller->getCalleeStackFrame(currBldrCtx->blockCount()); 323 // Return early if we are unable to reliably foresee 324 // the future stack frame. 325 if (!FutureSFC) 326 return None; 327 328 // This should be equivalent to Caller->getDecl() for now, but 329 // FutureSFC->getDecl() is likely to support better stuff (like 330 // virtual functions) earlier. 331 const Decl *CalleeD = FutureSFC->getDecl(); 332 333 // FIXME: Support for variadic arguments is not implemented here yet. 334 if (CallEvent::isVariadic(CalleeD)) 335 return None; 336 337 // Operator arguments do not correspond to operator parameters 338 // because this-argument is implemented as a normal argument in 339 // operator call expressions but not in operator declarations. 340 const TypedValueRegion *TVR = Caller->getParameterLocation( 341 *Caller->getAdjustedParameterIndex(Idx), currBldrCtx->blockCount()); 342 if (!TVR) 343 return None; 344 345 return loc::MemRegionVal(TVR); 346 }; 347 348 if (const auto *CE = dyn_cast<CallExpr>(E)) { 349 CallEventRef<> Caller = CEMgr.getSimpleCall(CE, State, LCtx); 350 if (Optional<SVal> V = getArgLoc(Caller)) 351 return *V; 352 else 353 break; 354 } else if (const auto *CCE = dyn_cast<CXXConstructExpr>(E)) { 355 // Don't bother figuring out the target region for the future 356 // constructor because we won't need it. 357 CallEventRef<> Caller = 358 CEMgr.getCXXConstructorCall(CCE, /*Target=*/nullptr, State, LCtx); 359 if (Optional<SVal> V = getArgLoc(Caller)) 360 return *V; 361 else 362 break; 363 } else if (const auto *ME = dyn_cast<ObjCMessageExpr>(E)) { 364 CallEventRef<> Caller = CEMgr.getObjCMethodCall(ME, State, LCtx); 365 if (Optional<SVal> V = getArgLoc(Caller)) 366 return *V; 367 else 368 break; 369 } 370 } 371 } // switch (CC->getKind()) 372 } 373 374 // If we couldn't find an existing region to construct into, assume we're 375 // constructing a temporary. Notify the caller of our failure. 376 CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true; 377 return loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(E, LCtx)); 378 } 379 380 ProgramStateRef ExprEngine::updateObjectsUnderConstruction( 381 SVal V, const Expr *E, ProgramStateRef State, const LocationContext *LCtx, 382 const ConstructionContext *CC, const EvalCallOptions &CallOpts) { 383 if (CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion) { 384 // Sounds like we failed to find the target region and therefore 385 // copy elision failed. There's nothing we can do about it here. 386 return State; 387 } 388 389 // See if we're constructing an existing region by looking at the 390 // current construction context. 391 assert(CC && "Computed target region without construction context?"); 392 switch (CC->getKind()) { 393 case ConstructionContext::CXX17ElidedCopyVariableKind: 394 case ConstructionContext::SimpleVariableKind: { 395 const auto *DSCC = cast<VariableConstructionContext>(CC); 396 return addObjectUnderConstruction(State, DSCC->getDeclStmt(), LCtx, V); 397 } 398 case ConstructionContext::CXX17ElidedCopyConstructorInitializerKind: 399 case ConstructionContext::SimpleConstructorInitializerKind: { 400 const auto *ICC = cast<ConstructorInitializerConstructionContext>(CC); 401 const auto *Init = ICC->getCXXCtorInitializer(); 402 // Base and delegating initializers handled above 403 assert(Init->isAnyMemberInitializer() && 404 "Base and delegating initializers should have been handled by" 405 "computeObjectUnderConstruction()"); 406 return addObjectUnderConstruction(State, Init, LCtx, V); 407 } 408 case ConstructionContext::NewAllocatedObjectKind: { 409 return State; 410 } 411 case ConstructionContext::SimpleReturnedValueKind: 412 case ConstructionContext::CXX17ElidedCopyReturnedValueKind: { 413 const StackFrameContext *SFC = LCtx->getStackFrame(); 414 const LocationContext *CallerLCtx = SFC->getParent(); 415 if (!CallerLCtx) { 416 // No extra work is necessary in top frame. 417 return State; 418 } 419 420 auto RTC = (*SFC->getCallSiteBlock())[SFC->getIndex()] 421 .getAs<CFGCXXRecordTypedCall>(); 422 assert(RTC && "Could not have had a target region without it"); 423 if (isa<BlockInvocationContext>(CallerLCtx)) { 424 // Unwrap block invocation contexts. They're mostly part of 425 // the current stack frame. 426 CallerLCtx = CallerLCtx->getParent(); 427 assert(!isa<BlockInvocationContext>(CallerLCtx)); 428 } 429 430 return updateObjectsUnderConstruction(V, 431 cast<Expr>(SFC->getCallSite()), State, CallerLCtx, 432 RTC->getConstructionContext(), CallOpts); 433 } 434 case ConstructionContext::ElidedTemporaryObjectKind: { 435 assert(AMgr.getAnalyzerOptions().ShouldElideConstructors); 436 if (!CallOpts.IsElidableCtorThatHasNotBeenElided) { 437 const auto *TCC = cast<ElidedTemporaryObjectConstructionContext>(CC); 438 State = updateObjectsUnderConstruction( 439 V, TCC->getConstructorAfterElision(), State, LCtx, 440 TCC->getConstructionContextAfterElision(), CallOpts); 441 442 // Remember that we've elided the constructor. 443 State = addObjectUnderConstruction( 444 State, TCC->getConstructorAfterElision(), LCtx, V); 445 446 // Remember that we've elided the destructor. 447 if (const auto *BTE = TCC->getCXXBindTemporaryExpr()) 448 State = elideDestructor(State, BTE, LCtx); 449 450 // Instead of materialization, shamelessly return 451 // the final object destination. 452 if (const auto *MTE = TCC->getMaterializedTemporaryExpr()) 453 State = addObjectUnderConstruction(State, MTE, LCtx, V); 454 455 return State; 456 } 457 // If we decided not to elide the constructor, proceed as if 458 // it's a simple temporary. 459 [[fallthrough]]; 460 } 461 case ConstructionContext::SimpleTemporaryObjectKind: { 462 const auto *TCC = cast<TemporaryObjectConstructionContext>(CC); 463 if (const auto *BTE = TCC->getCXXBindTemporaryExpr()) 464 State = addObjectUnderConstruction(State, BTE, LCtx, V); 465 466 if (const auto *MTE = TCC->getMaterializedTemporaryExpr()) 467 State = addObjectUnderConstruction(State, MTE, LCtx, V); 468 469 return State; 470 } 471 case ConstructionContext::LambdaCaptureKind: { 472 const auto *LCC = cast<LambdaCaptureConstructionContext>(CC); 473 474 // If we capture and array, we want to store the super region, not a 475 // sub-region. 476 if (const auto *EL = dyn_cast_or_null<ElementRegion>(V.getAsRegion())) 477 V = loc::MemRegionVal(EL->getSuperRegion()); 478 479 return addObjectUnderConstruction( 480 State, {LCC->getLambdaExpr(), LCC->getIndex()}, LCtx, V); 481 } 482 case ConstructionContext::ArgumentKind: { 483 const auto *ACC = cast<ArgumentConstructionContext>(CC); 484 if (const auto *BTE = ACC->getCXXBindTemporaryExpr()) 485 State = addObjectUnderConstruction(State, BTE, LCtx, V); 486 487 return addObjectUnderConstruction( 488 State, {ACC->getCallLikeExpr(), ACC->getIndex()}, LCtx, V); 489 } 490 } 491 llvm_unreachable("Unhandled construction context!"); 492 } 493 494 static ProgramStateRef 495 bindRequiredArrayElementToEnvironment(ProgramStateRef State, 496 const ArrayInitLoopExpr *AILE, 497 const LocationContext *LCtx, SVal Idx) { 498 // The ctor in this case is guaranteed to be a copy ctor, otherwise we hit a 499 // compile time error. 500 // 501 // -ArrayInitLoopExpr <-- we're here 502 // |-OpaqueValueExpr 503 // | `-DeclRefExpr <-- match this 504 // `-CXXConstructExpr 505 // `-ImplicitCastExpr 506 // `-ArraySubscriptExpr 507 // |-ImplicitCastExpr 508 // | `-OpaqueValueExpr 509 // | `-DeclRefExpr 510 // `-ArrayInitIndexExpr 511 // 512 // The resulting expression might look like the one below in an implicit 513 // copy/move ctor. 514 // 515 // ArrayInitLoopExpr <-- we're here 516 // |-OpaqueValueExpr 517 // | `-MemberExpr <-- match this 518 // | (`-CXXStaticCastExpr) <-- move ctor only 519 // | `-DeclRefExpr 520 // `-CXXConstructExpr 521 // `-ArraySubscriptExpr 522 // |-ImplicitCastExpr 523 // | `-OpaqueValueExpr 524 // | `-MemberExpr 525 // | `-DeclRefExpr 526 // `-ArrayInitIndexExpr 527 // 528 // The resulting expression for a multidimensional array. 529 // ArrayInitLoopExpr <-- we're here 530 // |-OpaqueValueExpr 531 // | `-DeclRefExpr <-- match this 532 // `-ArrayInitLoopExpr 533 // |-OpaqueValueExpr 534 // | `-ArraySubscriptExpr 535 // | |-ImplicitCastExpr 536 // | | `-OpaqueValueExpr 537 // | | `-DeclRefExpr 538 // | `-ArrayInitIndexExpr 539 // `-CXXConstructExpr <-- extract this 540 // ` ... 541 542 const auto *OVESrc = AILE->getCommonExpr()->getSourceExpr(); 543 544 // HACK: There is no way we can put the index of the array element into the 545 // CFG unless we unroll the loop, so we manually select and bind the required 546 // parameter to the environment. 547 const auto *CE = 548 cast<CXXConstructExpr>(extractElementInitializerFromNestedAILE(AILE)); 549 550 SVal Base = UnknownVal(); 551 if (const auto *ME = dyn_cast<MemberExpr>(OVESrc)) 552 Base = State->getSVal(ME, LCtx); 553 else if (const auto *DRE = dyn_cast<DeclRefExpr>(OVESrc)) 554 Base = State->getLValue(cast<VarDecl>(DRE->getDecl()), LCtx); 555 else 556 llvm_unreachable("ArrayInitLoopExpr contains unexpected source expression"); 557 558 SVal NthElem = State->getLValue(CE->getType(), Idx, Base); 559 560 return State->BindExpr(CE->getArg(0), LCtx, NthElem); 561 } 562 563 void ExprEngine::handleConstructor(const Expr *E, 564 ExplodedNode *Pred, 565 ExplodedNodeSet &destNodes) { 566 const auto *CE = dyn_cast<CXXConstructExpr>(E); 567 const auto *CIE = dyn_cast<CXXInheritedCtorInitExpr>(E); 568 assert(CE || CIE); 569 570 const LocationContext *LCtx = Pred->getLocationContext(); 571 ProgramStateRef State = Pred->getState(); 572 573 SVal Target = UnknownVal(); 574 575 if (CE) { 576 if (Optional<SVal> ElidedTarget = 577 getObjectUnderConstruction(State, CE, LCtx)) { 578 // We've previously modeled an elidable constructor by pretending that it 579 // in fact constructs into the correct target. This constructor can 580 // therefore be skipped. 581 Target = *ElidedTarget; 582 StmtNodeBuilder Bldr(Pred, destNodes, *currBldrCtx); 583 State = finishObjectConstruction(State, CE, LCtx); 584 if (auto L = Target.getAs<Loc>()) 585 State = State->BindExpr(CE, LCtx, State->getSVal(*L, CE->getType())); 586 Bldr.generateNode(CE, Pred, State); 587 return; 588 } 589 } 590 591 EvalCallOptions CallOpts; 592 auto C = getCurrentCFGElement().getAs<CFGConstructor>(); 593 assert(C || getCurrentCFGElement().getAs<CFGStmt>()); 594 const ConstructionContext *CC = C ? C->getConstructionContext() : nullptr; 595 596 const CXXConstructExpr::ConstructionKind CK = 597 CE ? CE->getConstructionKind() : CIE->getConstructionKind(); 598 switch (CK) { 599 case CXXConstructExpr::CK_Complete: { 600 // Inherited constructors are always base class constructors. 601 assert(CE && !CIE && "A complete constructor is inherited?!"); 602 603 // If the ctor is part of an ArrayInitLoopExpr, we want to handle it 604 // differently. 605 auto *AILE = CC ? CC->getArrayInitLoop() : nullptr; 606 607 unsigned Idx = 0; 608 if (CE->getType()->isArrayType() || AILE) { 609 Idx = getIndexOfElementToConstruct(State, CE, LCtx).value_or(0u); 610 State = setIndexOfElementToConstruct(State, CE, LCtx, Idx + 1); 611 } 612 613 if (AILE) { 614 // Only set this once even though we loop through it multiple times. 615 if (!getPendingInitLoop(State, CE, LCtx)) 616 State = setPendingInitLoop( 617 State, CE, LCtx, 618 getContext().getArrayInitLoopExprElementCount(AILE)); 619 620 State = bindRequiredArrayElementToEnvironment( 621 State, AILE, LCtx, svalBuilder.makeArrayIndex(Idx)); 622 } 623 624 // The target region is found from construction context. 625 std::tie(State, Target) = 626 handleConstructionContext(CE, State, LCtx, CC, CallOpts, Idx); 627 break; 628 } 629 case CXXConstructExpr::CK_VirtualBase: { 630 // Make sure we are not calling virtual base class initializers twice. 631 // Only the most-derived object should initialize virtual base classes. 632 const auto *OuterCtor = dyn_cast_or_null<CXXConstructExpr>( 633 LCtx->getStackFrame()->getCallSite()); 634 assert( 635 (!OuterCtor || 636 OuterCtor->getConstructionKind() == CXXConstructExpr::CK_Complete || 637 OuterCtor->getConstructionKind() == CXXConstructExpr::CK_Delegating) && 638 ("This virtual base should have already been initialized by " 639 "the most derived class!")); 640 (void)OuterCtor; 641 [[fallthrough]]; 642 } 643 case CXXConstructExpr::CK_NonVirtualBase: 644 // In C++17, classes with non-virtual bases may be aggregates, so they would 645 // be initialized as aggregates without a constructor call, so we may have 646 // a base class constructed directly into an initializer list without 647 // having the derived-class constructor call on the previous stack frame. 648 // Initializer lists may be nested into more initializer lists that 649 // correspond to surrounding aggregate initializations. 650 // FIXME: For now this code essentially bails out. We need to find the 651 // correct target region and set it. 652 // FIXME: Instead of relying on the ParentMap, we should have the 653 // trigger-statement (InitListExpr in this case) passed down from CFG or 654 // otherwise always available during construction. 655 if (isa_and_nonnull<InitListExpr>(LCtx->getParentMap().getParent(E))) { 656 MemRegionManager &MRMgr = getSValBuilder().getRegionManager(); 657 Target = loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(E, LCtx)); 658 CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true; 659 break; 660 } 661 [[fallthrough]]; 662 case CXXConstructExpr::CK_Delegating: { 663 const CXXMethodDecl *CurCtor = cast<CXXMethodDecl>(LCtx->getDecl()); 664 Loc ThisPtr = getSValBuilder().getCXXThis(CurCtor, 665 LCtx->getStackFrame()); 666 SVal ThisVal = State->getSVal(ThisPtr); 667 668 if (CK == CXXConstructExpr::CK_Delegating) { 669 Target = ThisVal; 670 } else { 671 // Cast to the base type. 672 bool IsVirtual = (CK == CXXConstructExpr::CK_VirtualBase); 673 SVal BaseVal = 674 getStoreManager().evalDerivedToBase(ThisVal, E->getType(), IsVirtual); 675 Target = BaseVal; 676 } 677 break; 678 } 679 } 680 681 if (State != Pred->getState()) { 682 static SimpleProgramPointTag T("ExprEngine", 683 "Prepare for object construction"); 684 ExplodedNodeSet DstPrepare; 685 StmtNodeBuilder BldrPrepare(Pred, DstPrepare, *currBldrCtx); 686 BldrPrepare.generateNode(E, Pred, State, &T, ProgramPoint::PreStmtKind); 687 assert(DstPrepare.size() <= 1); 688 if (DstPrepare.size() == 0) 689 return; 690 Pred = *BldrPrepare.begin(); 691 } 692 693 const MemRegion *TargetRegion = Target.getAsRegion(); 694 CallEventManager &CEMgr = getStateManager().getCallEventManager(); 695 CallEventRef<> Call = 696 CIE ? (CallEventRef<>)CEMgr.getCXXInheritedConstructorCall( 697 CIE, TargetRegion, State, LCtx) 698 : (CallEventRef<>)CEMgr.getCXXConstructorCall( 699 CE, TargetRegion, State, LCtx); 700 701 ExplodedNodeSet DstPreVisit; 702 getCheckerManager().runCheckersForPreStmt(DstPreVisit, Pred, E, *this); 703 704 ExplodedNodeSet PreInitialized; 705 if (CE) { 706 // FIXME: Is it possible and/or useful to do this before PreStmt? 707 StmtNodeBuilder Bldr(DstPreVisit, PreInitialized, *currBldrCtx); 708 for (ExplodedNodeSet::iterator I = DstPreVisit.begin(), 709 E = DstPreVisit.end(); 710 I != E; ++I) { 711 ProgramStateRef State = (*I)->getState(); 712 if (CE->requiresZeroInitialization()) { 713 // FIXME: Once we properly handle constructors in new-expressions, we'll 714 // need to invalidate the region before setting a default value, to make 715 // sure there aren't any lingering bindings around. This probably needs 716 // to happen regardless of whether or not the object is zero-initialized 717 // to handle random fields of a placement-initialized object picking up 718 // old bindings. We might only want to do it when we need to, though. 719 // FIXME: This isn't actually correct for arrays -- we need to zero- 720 // initialize the entire array, not just the first element -- but our 721 // handling of arrays everywhere else is weak as well, so this shouldn't 722 // actually make things worse. Placement new makes this tricky as well, 723 // since it's then possible to be initializing one part of a multi- 724 // dimensional array. 725 State = State->bindDefaultZero(Target, LCtx); 726 } 727 728 Bldr.generateNode(CE, *I, State, /*tag=*/nullptr, 729 ProgramPoint::PreStmtKind); 730 } 731 } else { 732 PreInitialized = DstPreVisit; 733 } 734 735 ExplodedNodeSet DstPreCall; 736 getCheckerManager().runCheckersForPreCall(DstPreCall, PreInitialized, 737 *Call, *this); 738 739 ExplodedNodeSet DstEvaluated; 740 741 if (CE && CE->getConstructor()->isTrivial() && 742 CE->getConstructor()->isCopyOrMoveConstructor() && 743 !CallOpts.IsArrayCtorOrDtor) { 744 StmtNodeBuilder Bldr(DstPreCall, DstEvaluated, *currBldrCtx); 745 // FIXME: Handle other kinds of trivial constructors as well. 746 for (ExplodedNodeSet::iterator I = DstPreCall.begin(), E = DstPreCall.end(); 747 I != E; ++I) 748 performTrivialCopy(Bldr, *I, *Call); 749 750 } else { 751 for (ExplodedNodeSet::iterator I = DstPreCall.begin(), E = DstPreCall.end(); 752 I != E; ++I) 753 getCheckerManager().runCheckersForEvalCall(DstEvaluated, *I, *Call, *this, 754 CallOpts); 755 } 756 757 // If the CFG was constructed without elements for temporary destructors 758 // and the just-called constructor created a temporary object then 759 // stop exploration if the temporary object has a noreturn constructor. 760 // This can lose coverage because the destructor, if it were present 761 // in the CFG, would be called at the end of the full expression or 762 // later (for life-time extended temporaries) -- but avoids infeasible 763 // paths when no-return temporary destructors are used for assertions. 764 ExplodedNodeSet DstEvaluatedPostProcessed; 765 StmtNodeBuilder Bldr(DstEvaluated, DstEvaluatedPostProcessed, *currBldrCtx); 766 const AnalysisDeclContext *ADC = LCtx->getAnalysisDeclContext(); 767 if (!ADC->getCFGBuildOptions().AddTemporaryDtors) { 768 if (llvm::isa_and_nonnull<CXXTempObjectRegion>(TargetRegion) && 769 cast<CXXConstructorDecl>(Call->getDecl()) 770 ->getParent() 771 ->isAnyDestructorNoReturn()) { 772 773 // If we've inlined the constructor, then DstEvaluated would be empty. 774 // In this case we still want a sink, which could be implemented 775 // in processCallExit. But we don't have that implemented at the moment, 776 // so if you hit this assertion, see if you can avoid inlining 777 // the respective constructor when analyzer-config cfg-temporary-dtors 778 // is set to false. 779 // Otherwise there's nothing wrong with inlining such constructor. 780 assert(!DstEvaluated.empty() && 781 "We should not have inlined this constructor!"); 782 783 for (ExplodedNode *N : DstEvaluated) { 784 Bldr.generateSink(E, N, N->getState()); 785 } 786 787 // There is no need to run the PostCall and PostStmt checker 788 // callbacks because we just generated sinks on all nodes in th 789 // frontier. 790 return; 791 } 792 } 793 794 ExplodedNodeSet DstPostArgumentCleanup; 795 for (ExplodedNode *I : DstEvaluatedPostProcessed) 796 finishArgumentConstruction(DstPostArgumentCleanup, I, *Call); 797 798 // If there were other constructors called for object-type arguments 799 // of this constructor, clean them up. 800 ExplodedNodeSet DstPostCall; 801 getCheckerManager().runCheckersForPostCall(DstPostCall, 802 DstPostArgumentCleanup, 803 *Call, *this); 804 getCheckerManager().runCheckersForPostStmt(destNodes, DstPostCall, E, *this); 805 } 806 807 void ExprEngine::VisitCXXConstructExpr(const CXXConstructExpr *CE, 808 ExplodedNode *Pred, 809 ExplodedNodeSet &Dst) { 810 handleConstructor(CE, Pred, Dst); 811 } 812 813 void ExprEngine::VisitCXXInheritedCtorInitExpr( 814 const CXXInheritedCtorInitExpr *CE, ExplodedNode *Pred, 815 ExplodedNodeSet &Dst) { 816 handleConstructor(CE, Pred, Dst); 817 } 818 819 void ExprEngine::VisitCXXDestructor(QualType ObjectType, 820 const MemRegion *Dest, 821 const Stmt *S, 822 bool IsBaseDtor, 823 ExplodedNode *Pred, 824 ExplodedNodeSet &Dst, 825 EvalCallOptions &CallOpts) { 826 assert(S && "A destructor without a trigger!"); 827 const LocationContext *LCtx = Pred->getLocationContext(); 828 ProgramStateRef State = Pred->getState(); 829 830 const CXXRecordDecl *RecordDecl = ObjectType->getAsCXXRecordDecl(); 831 assert(RecordDecl && "Only CXXRecordDecls should have destructors"); 832 const CXXDestructorDecl *DtorDecl = RecordDecl->getDestructor(); 833 // FIXME: There should always be a Decl, otherwise the destructor call 834 // shouldn't have been added to the CFG in the first place. 835 if (!DtorDecl) { 836 // Skip the invalid destructor. We cannot simply return because 837 // it would interrupt the analysis instead. 838 static SimpleProgramPointTag T("ExprEngine", "SkipInvalidDestructor"); 839 // FIXME: PostImplicitCall with a null decl may crash elsewhere anyway. 840 PostImplicitCall PP(/*Decl=*/nullptr, S->getEndLoc(), LCtx, &T); 841 NodeBuilder Bldr(Pred, Dst, *currBldrCtx); 842 Bldr.generateNode(PP, Pred->getState(), Pred); 843 return; 844 } 845 846 if (!Dest) { 847 // We're trying to destroy something that is not a region. This may happen 848 // for a variety of reasons (unknown target region, concrete integer instead 849 // of target region, etc.). The current code makes an attempt to recover. 850 // FIXME: We probably don't really need to recover when we're dealing 851 // with concrete integers specifically. 852 CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true; 853 if (const Expr *E = dyn_cast_or_null<Expr>(S)) { 854 Dest = MRMgr.getCXXTempObjectRegion(E, Pred->getLocationContext()); 855 } else { 856 static SimpleProgramPointTag T("ExprEngine", "SkipInvalidDestructor"); 857 NodeBuilder Bldr(Pred, Dst, *currBldrCtx); 858 Bldr.generateSink(Pred->getLocation().withTag(&T), 859 Pred->getState(), Pred); 860 return; 861 } 862 } 863 864 CallEventManager &CEMgr = getStateManager().getCallEventManager(); 865 CallEventRef<CXXDestructorCall> Call = 866 CEMgr.getCXXDestructorCall(DtorDecl, S, Dest, IsBaseDtor, State, LCtx); 867 868 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 869 Call->getSourceRange().getBegin(), 870 "Error evaluating destructor"); 871 872 ExplodedNodeSet DstPreCall; 873 getCheckerManager().runCheckersForPreCall(DstPreCall, Pred, 874 *Call, *this); 875 876 ExplodedNodeSet DstInvalidated; 877 StmtNodeBuilder Bldr(DstPreCall, DstInvalidated, *currBldrCtx); 878 for (ExplodedNodeSet::iterator I = DstPreCall.begin(), E = DstPreCall.end(); 879 I != E; ++I) 880 defaultEvalCall(Bldr, *I, *Call, CallOpts); 881 882 getCheckerManager().runCheckersForPostCall(Dst, DstInvalidated, 883 *Call, *this); 884 } 885 886 void ExprEngine::VisitCXXNewAllocatorCall(const CXXNewExpr *CNE, 887 ExplodedNode *Pred, 888 ExplodedNodeSet &Dst) { 889 ProgramStateRef State = Pred->getState(); 890 const LocationContext *LCtx = Pred->getLocationContext(); 891 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 892 CNE->getBeginLoc(), 893 "Error evaluating New Allocator Call"); 894 CallEventManager &CEMgr = getStateManager().getCallEventManager(); 895 CallEventRef<CXXAllocatorCall> Call = 896 CEMgr.getCXXAllocatorCall(CNE, State, LCtx); 897 898 ExplodedNodeSet DstPreCall; 899 getCheckerManager().runCheckersForPreCall(DstPreCall, Pred, 900 *Call, *this); 901 902 ExplodedNodeSet DstPostCall; 903 StmtNodeBuilder CallBldr(DstPreCall, DstPostCall, *currBldrCtx); 904 for (ExplodedNode *I : DstPreCall) { 905 // FIXME: Provide evalCall for checkers? 906 defaultEvalCall(CallBldr, I, *Call); 907 } 908 // If the call is inlined, DstPostCall will be empty and we bail out now. 909 910 // Store return value of operator new() for future use, until the actual 911 // CXXNewExpr gets processed. 912 ExplodedNodeSet DstPostValue; 913 StmtNodeBuilder ValueBldr(DstPostCall, DstPostValue, *currBldrCtx); 914 for (ExplodedNode *I : DstPostCall) { 915 // FIXME: Because CNE serves as the "call site" for the allocator (due to 916 // lack of a better expression in the AST), the conjured return value symbol 917 // is going to be of the same type (C++ object pointer type). Technically 918 // this is not correct because the operator new's prototype always says that 919 // it returns a 'void *'. So we should change the type of the symbol, 920 // and then evaluate the cast over the symbolic pointer from 'void *' to 921 // the object pointer type. But without changing the symbol's type it 922 // is breaking too much to evaluate the no-op symbolic cast over it, so we 923 // skip it for now. 924 ProgramStateRef State = I->getState(); 925 SVal RetVal = State->getSVal(CNE, LCtx); 926 927 // If this allocation function is not declared as non-throwing, failures 928 // /must/ be signalled by exceptions, and thus the return value will never 929 // be NULL. -fno-exceptions does not influence this semantics. 930 // FIXME: GCC has a -fcheck-new option, which forces it to consider the case 931 // where new can return NULL. If we end up supporting that option, we can 932 // consider adding a check for it here. 933 // C++11 [basic.stc.dynamic.allocation]p3. 934 if (const FunctionDecl *FD = CNE->getOperatorNew()) { 935 QualType Ty = FD->getType(); 936 if (const auto *ProtoType = Ty->getAs<FunctionProtoType>()) 937 if (!ProtoType->isNothrow()) 938 State = State->assume(RetVal.castAs<DefinedOrUnknownSVal>(), true); 939 } 940 941 ValueBldr.generateNode( 942 CNE, I, addObjectUnderConstruction(State, CNE, LCtx, RetVal)); 943 } 944 945 ExplodedNodeSet DstPostPostCallCallback; 946 getCheckerManager().runCheckersForPostCall(DstPostPostCallCallback, 947 DstPostValue, *Call, *this); 948 for (ExplodedNode *I : DstPostPostCallCallback) { 949 getCheckerManager().runCheckersForNewAllocator(*Call, Dst, I, *this); 950 } 951 } 952 953 void ExprEngine::VisitCXXNewExpr(const CXXNewExpr *CNE, ExplodedNode *Pred, 954 ExplodedNodeSet &Dst) { 955 // FIXME: Much of this should eventually migrate to CXXAllocatorCall. 956 // Also, we need to decide how allocators actually work -- they're not 957 // really part of the CXXNewExpr because they happen BEFORE the 958 // CXXConstructExpr subexpression. See PR12014 for some discussion. 959 960 unsigned blockCount = currBldrCtx->blockCount(); 961 const LocationContext *LCtx = Pred->getLocationContext(); 962 SVal symVal = UnknownVal(); 963 FunctionDecl *FD = CNE->getOperatorNew(); 964 965 bool IsStandardGlobalOpNewFunction = 966 FD->isReplaceableGlobalAllocationFunction(); 967 968 ProgramStateRef State = Pred->getState(); 969 970 // Retrieve the stored operator new() return value. 971 if (AMgr.getAnalyzerOptions().MayInlineCXXAllocator) { 972 symVal = *getObjectUnderConstruction(State, CNE, LCtx); 973 State = finishObjectConstruction(State, CNE, LCtx); 974 } 975 976 // We assume all standard global 'operator new' functions allocate memory in 977 // heap. We realize this is an approximation that might not correctly model 978 // a custom global allocator. 979 if (symVal.isUnknown()) { 980 if (IsStandardGlobalOpNewFunction) 981 symVal = svalBuilder.getConjuredHeapSymbolVal(CNE, LCtx, blockCount); 982 else 983 symVal = svalBuilder.conjureSymbolVal(nullptr, CNE, LCtx, CNE->getType(), 984 blockCount); 985 } 986 987 CallEventManager &CEMgr = getStateManager().getCallEventManager(); 988 CallEventRef<CXXAllocatorCall> Call = 989 CEMgr.getCXXAllocatorCall(CNE, State, LCtx); 990 991 if (!AMgr.getAnalyzerOptions().MayInlineCXXAllocator) { 992 // Invalidate placement args. 993 // FIXME: Once we figure out how we want allocators to work, 994 // we should be using the usual pre-/(default-)eval-/post-call checkers 995 // here. 996 State = Call->invalidateRegions(blockCount); 997 if (!State) 998 return; 999 1000 // If this allocation function is not declared as non-throwing, failures 1001 // /must/ be signalled by exceptions, and thus the return value will never 1002 // be NULL. -fno-exceptions does not influence this semantics. 1003 // FIXME: GCC has a -fcheck-new option, which forces it to consider the case 1004 // where new can return NULL. If we end up supporting that option, we can 1005 // consider adding a check for it here. 1006 // C++11 [basic.stc.dynamic.allocation]p3. 1007 if (const auto *ProtoType = FD->getType()->getAs<FunctionProtoType>()) 1008 if (!ProtoType->isNothrow()) 1009 if (auto dSymVal = symVal.getAs<DefinedOrUnknownSVal>()) 1010 State = State->assume(*dSymVal, true); 1011 } 1012 1013 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 1014 1015 SVal Result = symVal; 1016 1017 if (CNE->isArray()) { 1018 1019 if (const auto *NewReg = cast_or_null<SubRegion>(symVal.getAsRegion())) { 1020 // If each element is initialized by their default constructor, the field 1021 // values are properly placed inside the required region, however if an 1022 // initializer list is used, this doesn't happen automatically. 1023 auto *Init = CNE->getInitializer(); 1024 bool isInitList = isa_and_nonnull<InitListExpr>(Init); 1025 1026 QualType ObjTy = 1027 isInitList ? Init->getType() : CNE->getType()->getPointeeType(); 1028 const ElementRegion *EleReg = 1029 MRMgr.getElementRegion(ObjTy, svalBuilder.makeArrayIndex(0), NewReg, 1030 svalBuilder.getContext()); 1031 Result = loc::MemRegionVal(EleReg); 1032 1033 // If the array is list initialized, we bind the initializer list to the 1034 // memory region here, otherwise we would lose it. 1035 if (isInitList) { 1036 Bldr.takeNodes(Pred); 1037 Pred = Bldr.generateNode(CNE, Pred, State); 1038 1039 SVal V = State->getSVal(Init, LCtx); 1040 ExplodedNodeSet evaluated; 1041 evalBind(evaluated, CNE, Pred, Result, V, true); 1042 1043 Bldr.takeNodes(Pred); 1044 Bldr.addNodes(evaluated); 1045 1046 Pred = *evaluated.begin(); 1047 State = Pred->getState(); 1048 } 1049 } 1050 1051 State = State->BindExpr(CNE, Pred->getLocationContext(), Result); 1052 Bldr.generateNode(CNE, Pred, State); 1053 return; 1054 } 1055 1056 // FIXME: Once we have proper support for CXXConstructExprs inside 1057 // CXXNewExpr, we need to make sure that the constructed object is not 1058 // immediately invalidated here. (The placement call should happen before 1059 // the constructor call anyway.) 1060 if (FD->isReservedGlobalPlacementOperator()) { 1061 // Non-array placement new should always return the placement location. 1062 SVal PlacementLoc = State->getSVal(CNE->getPlacementArg(0), LCtx); 1063 Result = svalBuilder.evalCast(PlacementLoc, CNE->getType(), 1064 CNE->getPlacementArg(0)->getType()); 1065 } 1066 1067 // Bind the address of the object, then check to see if we cached out. 1068 State = State->BindExpr(CNE, LCtx, Result); 1069 ExplodedNode *NewN = Bldr.generateNode(CNE, Pred, State); 1070 if (!NewN) 1071 return; 1072 1073 // If the type is not a record, we won't have a CXXConstructExpr as an 1074 // initializer. Copy the value over. 1075 if (const Expr *Init = CNE->getInitializer()) { 1076 if (!isa<CXXConstructExpr>(Init)) { 1077 assert(Bldr.getResults().size() == 1); 1078 Bldr.takeNodes(NewN); 1079 evalBind(Dst, CNE, NewN, Result, State->getSVal(Init, LCtx), 1080 /*FirstInit=*/IsStandardGlobalOpNewFunction); 1081 } 1082 } 1083 } 1084 1085 void ExprEngine::VisitCXXDeleteExpr(const CXXDeleteExpr *CDE, 1086 ExplodedNode *Pred, ExplodedNodeSet &Dst) { 1087 1088 CallEventManager &CEMgr = getStateManager().getCallEventManager(); 1089 CallEventRef<CXXDeallocatorCall> Call = CEMgr.getCXXDeallocatorCall( 1090 CDE, Pred->getState(), Pred->getLocationContext()); 1091 1092 ExplodedNodeSet DstPreCall; 1093 getCheckerManager().runCheckersForPreCall(DstPreCall, Pred, *Call, *this); 1094 ExplodedNodeSet DstPostCall; 1095 1096 if (AMgr.getAnalyzerOptions().MayInlineCXXAllocator) { 1097 StmtNodeBuilder Bldr(DstPreCall, DstPostCall, *currBldrCtx); 1098 for (ExplodedNode *I : DstPreCall) { 1099 defaultEvalCall(Bldr, I, *Call); 1100 } 1101 } else { 1102 DstPostCall = DstPreCall; 1103 } 1104 getCheckerManager().runCheckersForPostCall(Dst, DstPostCall, *Call, *this); 1105 } 1106 1107 void ExprEngine::VisitCXXCatchStmt(const CXXCatchStmt *CS, ExplodedNode *Pred, 1108 ExplodedNodeSet &Dst) { 1109 const VarDecl *VD = CS->getExceptionDecl(); 1110 if (!VD) { 1111 Dst.Add(Pred); 1112 return; 1113 } 1114 1115 const LocationContext *LCtx = Pred->getLocationContext(); 1116 SVal V = svalBuilder.conjureSymbolVal(CS, LCtx, VD->getType(), 1117 currBldrCtx->blockCount()); 1118 ProgramStateRef state = Pred->getState(); 1119 state = state->bindLoc(state->getLValue(VD, LCtx), V, LCtx); 1120 1121 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 1122 Bldr.generateNode(CS, Pred, state); 1123 } 1124 1125 void ExprEngine::VisitCXXThisExpr(const CXXThisExpr *TE, ExplodedNode *Pred, 1126 ExplodedNodeSet &Dst) { 1127 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 1128 1129 // Get the this object region from StoreManager. 1130 const LocationContext *LCtx = Pred->getLocationContext(); 1131 const MemRegion *R = 1132 svalBuilder.getRegionManager().getCXXThisRegion( 1133 getContext().getCanonicalType(TE->getType()), 1134 LCtx); 1135 1136 ProgramStateRef state = Pred->getState(); 1137 SVal V = state->getSVal(loc::MemRegionVal(R)); 1138 Bldr.generateNode(TE, Pred, state->BindExpr(TE, LCtx, V)); 1139 } 1140 1141 void ExprEngine::VisitLambdaExpr(const LambdaExpr *LE, ExplodedNode *Pred, 1142 ExplodedNodeSet &Dst) { 1143 const LocationContext *LocCtxt = Pred->getLocationContext(); 1144 1145 // Get the region of the lambda itself. 1146 const MemRegion *R = svalBuilder.getRegionManager().getCXXTempObjectRegion( 1147 LE, LocCtxt); 1148 SVal V = loc::MemRegionVal(R); 1149 1150 ProgramStateRef State = Pred->getState(); 1151 1152 // If we created a new MemRegion for the lambda, we should explicitly bind 1153 // the captures. 1154 unsigned Idx = 0; 1155 CXXRecordDecl::field_iterator CurField = LE->getLambdaClass()->field_begin(); 1156 for (LambdaExpr::const_capture_init_iterator i = LE->capture_init_begin(), 1157 e = LE->capture_init_end(); 1158 i != e; ++i, ++CurField, ++Idx) { 1159 FieldDecl *FieldForCapture = *CurField; 1160 SVal FieldLoc = State->getLValue(FieldForCapture, V); 1161 1162 SVal InitVal; 1163 if (!FieldForCapture->hasCapturedVLAType()) { 1164 const Expr *InitExpr = *i; 1165 1166 assert(InitExpr && "Capture missing initialization expression"); 1167 1168 // With C++17 copy elision the InitExpr can be anything, so instead of 1169 // pattern matching all cases, we simple check if the current field is 1170 // under construction or not, regardless what it's InitExpr is. 1171 if (const auto OUC = 1172 getObjectUnderConstruction(State, {LE, Idx}, LocCtxt)) { 1173 InitVal = State->getSVal(OUC->getAsRegion()); 1174 1175 State = finishObjectConstruction(State, {LE, Idx}, LocCtxt); 1176 } else 1177 InitVal = State->getSVal(InitExpr, LocCtxt); 1178 1179 } else { 1180 1181 assert(!getObjectUnderConstruction(State, {LE, Idx}, LocCtxt) && 1182 "VLA capture by value is a compile time error!"); 1183 1184 // The field stores the length of a captured variable-length array. 1185 // These captures don't have initialization expressions; instead we 1186 // get the length from the VLAType size expression. 1187 Expr *SizeExpr = FieldForCapture->getCapturedVLAType()->getSizeExpr(); 1188 InitVal = State->getSVal(SizeExpr, LocCtxt); 1189 } 1190 1191 State = State->bindLoc(FieldLoc, InitVal, LocCtxt); 1192 } 1193 1194 // Decay the Loc into an RValue, because there might be a 1195 // MaterializeTemporaryExpr node above this one which expects the bound value 1196 // to be an RValue. 1197 SVal LambdaRVal = State->getSVal(R); 1198 1199 ExplodedNodeSet Tmp; 1200 StmtNodeBuilder Bldr(Pred, Tmp, *currBldrCtx); 1201 // FIXME: is this the right program point kind? 1202 Bldr.generateNode(LE, Pred, 1203 State->BindExpr(LE, LocCtxt, LambdaRVal), 1204 nullptr, ProgramPoint::PostLValueKind); 1205 1206 // FIXME: Move all post/pre visits to ::Visit(). 1207 getCheckerManager().runCheckersForPostStmt(Dst, Tmp, LE, *this); 1208 } 1209