1 //===--- CGClass.cpp - Emit LLVM Code for C++ classes -----------*- C++ -*-===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This contains code dealing with C++ code generation of classes 10 // 11 //===----------------------------------------------------------------------===// 12 13 #include "CGBlocks.h" 14 #include "CGCXXABI.h" 15 #include "CGDebugInfo.h" 16 #include "CGRecordLayout.h" 17 #include "CodeGenFunction.h" 18 #include "TargetInfo.h" 19 #include "clang/AST/Attr.h" 20 #include "clang/AST/CXXInheritance.h" 21 #include "clang/AST/CharUnits.h" 22 #include "clang/AST/DeclTemplate.h" 23 #include "clang/AST/EvaluatedExprVisitor.h" 24 #include "clang/AST/RecordLayout.h" 25 #include "clang/AST/StmtCXX.h" 26 #include "clang/Basic/CodeGenOptions.h" 27 #include "clang/Basic/TargetBuiltins.h" 28 #include "clang/CodeGen/CGFunctionInfo.h" 29 #include "llvm/IR/Intrinsics.h" 30 #include "llvm/IR/Metadata.h" 31 #include "llvm/Transforms/Utils/SanitizerStats.h" 32 #include <optional> 33 34 using namespace clang; 35 using namespace CodeGen; 36 37 /// Return the best known alignment for an unknown pointer to a 38 /// particular class. 39 CharUnits CodeGenModule::getClassPointerAlignment(const CXXRecordDecl *RD) { 40 if (!RD->hasDefinition()) 41 return CharUnits::One(); // Hopefully won't be used anywhere. 42 43 auto &layout = getContext().getASTRecordLayout(RD); 44 45 // If the class is final, then we know that the pointer points to an 46 // object of that type and can use the full alignment. 47 if (RD->isEffectivelyFinal()) 48 return layout.getAlignment(); 49 50 // Otherwise, we have to assume it could be a subclass. 51 return layout.getNonVirtualAlignment(); 52 } 53 54 /// Return the smallest possible amount of storage that might be allocated 55 /// starting from the beginning of an object of a particular class. 56 /// 57 /// This may be smaller than sizeof(RD) if RD has virtual base classes. 58 CharUnits CodeGenModule::getMinimumClassObjectSize(const CXXRecordDecl *RD) { 59 if (!RD->hasDefinition()) 60 return CharUnits::One(); 61 62 auto &layout = getContext().getASTRecordLayout(RD); 63 64 // If the class is final, then we know that the pointer points to an 65 // object of that type and can use the full alignment. 66 if (RD->isEffectivelyFinal()) 67 return layout.getSize(); 68 69 // Otherwise, we have to assume it could be a subclass. 70 return std::max(layout.getNonVirtualSize(), CharUnits::One()); 71 } 72 73 /// Return the best known alignment for a pointer to a virtual base, 74 /// given the alignment of a pointer to the derived class. 75 CharUnits CodeGenModule::getVBaseAlignment(CharUnits actualDerivedAlign, 76 const CXXRecordDecl *derivedClass, 77 const CXXRecordDecl *vbaseClass) { 78 // The basic idea here is that an underaligned derived pointer might 79 // indicate an underaligned base pointer. 80 81 assert(vbaseClass->isCompleteDefinition()); 82 auto &baseLayout = getContext().getASTRecordLayout(vbaseClass); 83 CharUnits expectedVBaseAlign = baseLayout.getNonVirtualAlignment(); 84 85 return getDynamicOffsetAlignment(actualDerivedAlign, derivedClass, 86 expectedVBaseAlign); 87 } 88 89 CharUnits 90 CodeGenModule::getDynamicOffsetAlignment(CharUnits actualBaseAlign, 91 const CXXRecordDecl *baseDecl, 92 CharUnits expectedTargetAlign) { 93 // If the base is an incomplete type (which is, alas, possible with 94 // member pointers), be pessimistic. 95 if (!baseDecl->isCompleteDefinition()) 96 return std::min(actualBaseAlign, expectedTargetAlign); 97 98 auto &baseLayout = getContext().getASTRecordLayout(baseDecl); 99 CharUnits expectedBaseAlign = baseLayout.getNonVirtualAlignment(); 100 101 // If the class is properly aligned, assume the target offset is, too. 102 // 103 // This actually isn't necessarily the right thing to do --- if the 104 // class is a complete object, but it's only properly aligned for a 105 // base subobject, then the alignments of things relative to it are 106 // probably off as well. (Note that this requires the alignment of 107 // the target to be greater than the NV alignment of the derived 108 // class.) 109 // 110 // However, our approach to this kind of under-alignment can only 111 // ever be best effort; after all, we're never going to propagate 112 // alignments through variables or parameters. Note, in particular, 113 // that constructing a polymorphic type in an address that's less 114 // than pointer-aligned will generally trap in the constructor, 115 // unless we someday add some sort of attribute to change the 116 // assumed alignment of 'this'. So our goal here is pretty much 117 // just to allow the user to explicitly say that a pointer is 118 // under-aligned and then safely access its fields and vtables. 119 if (actualBaseAlign >= expectedBaseAlign) { 120 return expectedTargetAlign; 121 } 122 123 // Otherwise, we might be offset by an arbitrary multiple of the 124 // actual alignment. The correct adjustment is to take the min of 125 // the two alignments. 126 return std::min(actualBaseAlign, expectedTargetAlign); 127 } 128 129 Address CodeGenFunction::LoadCXXThisAddress() { 130 assert(CurFuncDecl && "loading 'this' without a func declaration?"); 131 auto *MD = cast<CXXMethodDecl>(CurFuncDecl); 132 133 // Lazily compute CXXThisAlignment. 134 if (CXXThisAlignment.isZero()) { 135 // Just use the best known alignment for the parent. 136 // TODO: if we're currently emitting a complete-object ctor/dtor, 137 // we can always use the complete-object alignment. 138 CXXThisAlignment = CGM.getClassPointerAlignment(MD->getParent()); 139 } 140 141 llvm::Type *Ty = ConvertType(MD->getThisObjectType()); 142 return Address(LoadCXXThis(), Ty, CXXThisAlignment, KnownNonNull); 143 } 144 145 /// Emit the address of a field using a member data pointer. 146 /// 147 /// \param E Only used for emergency diagnostics 148 Address 149 CodeGenFunction::EmitCXXMemberDataPointerAddress(const Expr *E, Address base, 150 llvm::Value *memberPtr, 151 const MemberPointerType *memberPtrType, 152 LValueBaseInfo *BaseInfo, 153 TBAAAccessInfo *TBAAInfo) { 154 // Ask the ABI to compute the actual address. 155 llvm::Value *ptr = 156 CGM.getCXXABI().EmitMemberDataPointerAddress(*this, E, base, 157 memberPtr, memberPtrType); 158 159 QualType memberType = memberPtrType->getPointeeType(); 160 CharUnits memberAlign = 161 CGM.getNaturalTypeAlignment(memberType, BaseInfo, TBAAInfo); 162 memberAlign = 163 CGM.getDynamicOffsetAlignment(base.getAlignment(), 164 memberPtrType->getClass()->getAsCXXRecordDecl(), 165 memberAlign); 166 return Address(ptr, ConvertTypeForMem(memberPtrType->getPointeeType()), 167 memberAlign); 168 } 169 170 CharUnits CodeGenModule::computeNonVirtualBaseClassOffset( 171 const CXXRecordDecl *DerivedClass, CastExpr::path_const_iterator Start, 172 CastExpr::path_const_iterator End) { 173 CharUnits Offset = CharUnits::Zero(); 174 175 const ASTContext &Context = getContext(); 176 const CXXRecordDecl *RD = DerivedClass; 177 178 for (CastExpr::path_const_iterator I = Start; I != End; ++I) { 179 const CXXBaseSpecifier *Base = *I; 180 assert(!Base->isVirtual() && "Should not see virtual bases here!"); 181 182 // Get the layout. 183 const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD); 184 185 const auto *BaseDecl = 186 cast<CXXRecordDecl>(Base->getType()->castAs<RecordType>()->getDecl()); 187 188 // Add the offset. 189 Offset += Layout.getBaseClassOffset(BaseDecl); 190 191 RD = BaseDecl; 192 } 193 194 return Offset; 195 } 196 197 llvm::Constant * 198 CodeGenModule::GetNonVirtualBaseClassOffset(const CXXRecordDecl *ClassDecl, 199 CastExpr::path_const_iterator PathBegin, 200 CastExpr::path_const_iterator PathEnd) { 201 assert(PathBegin != PathEnd && "Base path should not be empty!"); 202 203 CharUnits Offset = 204 computeNonVirtualBaseClassOffset(ClassDecl, PathBegin, PathEnd); 205 if (Offset.isZero()) 206 return nullptr; 207 208 llvm::Type *PtrDiffTy = 209 Types.ConvertType(getContext().getPointerDiffType()); 210 211 return llvm::ConstantInt::get(PtrDiffTy, Offset.getQuantity()); 212 } 213 214 /// Gets the address of a direct base class within a complete object. 215 /// This should only be used for (1) non-virtual bases or (2) virtual bases 216 /// when the type is known to be complete (e.g. in complete destructors). 217 /// 218 /// The object pointed to by 'This' is assumed to be non-null. 219 Address 220 CodeGenFunction::GetAddressOfDirectBaseInCompleteClass(Address This, 221 const CXXRecordDecl *Derived, 222 const CXXRecordDecl *Base, 223 bool BaseIsVirtual) { 224 // 'this' must be a pointer (in some address space) to Derived. 225 assert(This.getElementType() == ConvertType(Derived)); 226 227 // Compute the offset of the virtual base. 228 CharUnits Offset; 229 const ASTRecordLayout &Layout = getContext().getASTRecordLayout(Derived); 230 if (BaseIsVirtual) 231 Offset = Layout.getVBaseClassOffset(Base); 232 else 233 Offset = Layout.getBaseClassOffset(Base); 234 235 // Shift and cast down to the base type. 236 // TODO: for complete types, this should be possible with a GEP. 237 Address V = This; 238 if (!Offset.isZero()) { 239 V = V.withElementType(Int8Ty); 240 V = Builder.CreateConstInBoundsByteGEP(V, Offset); 241 } 242 return V.withElementType(ConvertType(Base)); 243 } 244 245 static Address 246 ApplyNonVirtualAndVirtualOffset(CodeGenFunction &CGF, Address addr, 247 CharUnits nonVirtualOffset, 248 llvm::Value *virtualOffset, 249 const CXXRecordDecl *derivedClass, 250 const CXXRecordDecl *nearestVBase) { 251 // Assert that we have something to do. 252 assert(!nonVirtualOffset.isZero() || virtualOffset != nullptr); 253 254 // Compute the offset from the static and dynamic components. 255 llvm::Value *baseOffset; 256 if (!nonVirtualOffset.isZero()) { 257 llvm::Type *OffsetType = 258 (CGF.CGM.getTarget().getCXXABI().isItaniumFamily() && 259 CGF.CGM.getItaniumVTableContext().isRelativeLayout()) 260 ? CGF.Int32Ty 261 : CGF.PtrDiffTy; 262 baseOffset = 263 llvm::ConstantInt::get(OffsetType, nonVirtualOffset.getQuantity()); 264 if (virtualOffset) { 265 baseOffset = CGF.Builder.CreateAdd(virtualOffset, baseOffset); 266 } 267 } else { 268 baseOffset = virtualOffset; 269 } 270 271 // Apply the base offset. 272 llvm::Value *ptr = addr.getPointer(); 273 ptr = CGF.Builder.CreateInBoundsGEP(CGF.Int8Ty, ptr, baseOffset, "add.ptr"); 274 275 // If we have a virtual component, the alignment of the result will 276 // be relative only to the known alignment of that vbase. 277 CharUnits alignment; 278 if (virtualOffset) { 279 assert(nearestVBase && "virtual offset without vbase?"); 280 alignment = CGF.CGM.getVBaseAlignment(addr.getAlignment(), 281 derivedClass, nearestVBase); 282 } else { 283 alignment = addr.getAlignment(); 284 } 285 alignment = alignment.alignmentAtOffset(nonVirtualOffset); 286 287 return Address(ptr, CGF.Int8Ty, alignment); 288 } 289 290 Address CodeGenFunction::GetAddressOfBaseClass( 291 Address Value, const CXXRecordDecl *Derived, 292 CastExpr::path_const_iterator PathBegin, 293 CastExpr::path_const_iterator PathEnd, bool NullCheckValue, 294 SourceLocation Loc) { 295 assert(PathBegin != PathEnd && "Base path should not be empty!"); 296 297 CastExpr::path_const_iterator Start = PathBegin; 298 const CXXRecordDecl *VBase = nullptr; 299 300 // Sema has done some convenient canonicalization here: if the 301 // access path involved any virtual steps, the conversion path will 302 // *start* with a step down to the correct virtual base subobject, 303 // and hence will not require any further steps. 304 if ((*Start)->isVirtual()) { 305 VBase = cast<CXXRecordDecl>( 306 (*Start)->getType()->castAs<RecordType>()->getDecl()); 307 ++Start; 308 } 309 310 // Compute the static offset of the ultimate destination within its 311 // allocating subobject (the virtual base, if there is one, or else 312 // the "complete" object that we see). 313 CharUnits NonVirtualOffset = CGM.computeNonVirtualBaseClassOffset( 314 VBase ? VBase : Derived, Start, PathEnd); 315 316 // If there's a virtual step, we can sometimes "devirtualize" it. 317 // For now, that's limited to when the derived type is final. 318 // TODO: "devirtualize" this for accesses to known-complete objects. 319 if (VBase && Derived->hasAttr<FinalAttr>()) { 320 const ASTRecordLayout &layout = getContext().getASTRecordLayout(Derived); 321 CharUnits vBaseOffset = layout.getVBaseClassOffset(VBase); 322 NonVirtualOffset += vBaseOffset; 323 VBase = nullptr; // we no longer have a virtual step 324 } 325 326 // Get the base pointer type. 327 llvm::Type *BaseValueTy = ConvertType((PathEnd[-1])->getType()); 328 llvm::Type *PtrTy = llvm::PointerType::get( 329 CGM.getLLVMContext(), Value.getType()->getPointerAddressSpace()); 330 331 QualType DerivedTy = getContext().getRecordType(Derived); 332 CharUnits DerivedAlign = CGM.getClassPointerAlignment(Derived); 333 334 // If the static offset is zero and we don't have a virtual step, 335 // just do a bitcast; null checks are unnecessary. 336 if (NonVirtualOffset.isZero() && !VBase) { 337 if (sanitizePerformTypeCheck()) { 338 SanitizerSet SkippedChecks; 339 SkippedChecks.set(SanitizerKind::Null, !NullCheckValue); 340 EmitTypeCheck(TCK_Upcast, Loc, Value.getPointer(), 341 DerivedTy, DerivedAlign, SkippedChecks); 342 } 343 return Value.withElementType(BaseValueTy); 344 } 345 346 llvm::BasicBlock *origBB = nullptr; 347 llvm::BasicBlock *endBB = nullptr; 348 349 // Skip over the offset (and the vtable load) if we're supposed to 350 // null-check the pointer. 351 if (NullCheckValue) { 352 origBB = Builder.GetInsertBlock(); 353 llvm::BasicBlock *notNullBB = createBasicBlock("cast.notnull"); 354 endBB = createBasicBlock("cast.end"); 355 356 llvm::Value *isNull = Builder.CreateIsNull(Value.getPointer()); 357 Builder.CreateCondBr(isNull, endBB, notNullBB); 358 EmitBlock(notNullBB); 359 } 360 361 if (sanitizePerformTypeCheck()) { 362 SanitizerSet SkippedChecks; 363 SkippedChecks.set(SanitizerKind::Null, true); 364 EmitTypeCheck(VBase ? TCK_UpcastToVirtualBase : TCK_Upcast, Loc, 365 Value.getPointer(), DerivedTy, DerivedAlign, SkippedChecks); 366 } 367 368 // Compute the virtual offset. 369 llvm::Value *VirtualOffset = nullptr; 370 if (VBase) { 371 VirtualOffset = 372 CGM.getCXXABI().GetVirtualBaseClassOffset(*this, Value, Derived, VBase); 373 } 374 375 // Apply both offsets. 376 Value = ApplyNonVirtualAndVirtualOffset(*this, Value, NonVirtualOffset, 377 VirtualOffset, Derived, VBase); 378 379 // Cast to the destination type. 380 Value = Value.withElementType(BaseValueTy); 381 382 // Build a phi if we needed a null check. 383 if (NullCheckValue) { 384 llvm::BasicBlock *notNullBB = Builder.GetInsertBlock(); 385 Builder.CreateBr(endBB); 386 EmitBlock(endBB); 387 388 llvm::PHINode *PHI = Builder.CreatePHI(PtrTy, 2, "cast.result"); 389 PHI->addIncoming(Value.getPointer(), notNullBB); 390 PHI->addIncoming(llvm::Constant::getNullValue(PtrTy), origBB); 391 Value = Value.withPointer(PHI, NotKnownNonNull); 392 } 393 394 return Value; 395 } 396 397 Address 398 CodeGenFunction::GetAddressOfDerivedClass(Address BaseAddr, 399 const CXXRecordDecl *Derived, 400 CastExpr::path_const_iterator PathBegin, 401 CastExpr::path_const_iterator PathEnd, 402 bool NullCheckValue) { 403 assert(PathBegin != PathEnd && "Base path should not be empty!"); 404 405 QualType DerivedTy = 406 getContext().getCanonicalType(getContext().getTagDeclType(Derived)); 407 llvm::Type *DerivedValueTy = ConvertType(DerivedTy); 408 409 llvm::Value *NonVirtualOffset = 410 CGM.GetNonVirtualBaseClassOffset(Derived, PathBegin, PathEnd); 411 412 if (!NonVirtualOffset) { 413 // No offset, we can just cast back. 414 return BaseAddr.withElementType(DerivedValueTy); 415 } 416 417 llvm::BasicBlock *CastNull = nullptr; 418 llvm::BasicBlock *CastNotNull = nullptr; 419 llvm::BasicBlock *CastEnd = nullptr; 420 421 if (NullCheckValue) { 422 CastNull = createBasicBlock("cast.null"); 423 CastNotNull = createBasicBlock("cast.notnull"); 424 CastEnd = createBasicBlock("cast.end"); 425 426 llvm::Value *IsNull = Builder.CreateIsNull(BaseAddr.getPointer()); 427 Builder.CreateCondBr(IsNull, CastNull, CastNotNull); 428 EmitBlock(CastNotNull); 429 } 430 431 // Apply the offset. 432 llvm::Value *Value = BaseAddr.getPointer(); 433 Value = Builder.CreateInBoundsGEP( 434 Int8Ty, Value, Builder.CreateNeg(NonVirtualOffset), "sub.ptr"); 435 436 // Produce a PHI if we had a null-check. 437 if (NullCheckValue) { 438 Builder.CreateBr(CastEnd); 439 EmitBlock(CastNull); 440 Builder.CreateBr(CastEnd); 441 EmitBlock(CastEnd); 442 443 llvm::PHINode *PHI = Builder.CreatePHI(Value->getType(), 2); 444 PHI->addIncoming(Value, CastNotNull); 445 PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()), CastNull); 446 Value = PHI; 447 } 448 449 return Address(Value, DerivedValueTy, CGM.getClassPointerAlignment(Derived)); 450 } 451 452 llvm::Value *CodeGenFunction::GetVTTParameter(GlobalDecl GD, 453 bool ForVirtualBase, 454 bool Delegating) { 455 if (!CGM.getCXXABI().NeedsVTTParameter(GD)) { 456 // This constructor/destructor does not need a VTT parameter. 457 return nullptr; 458 } 459 460 const CXXRecordDecl *RD = cast<CXXMethodDecl>(CurCodeDecl)->getParent(); 461 const CXXRecordDecl *Base = cast<CXXMethodDecl>(GD.getDecl())->getParent(); 462 463 uint64_t SubVTTIndex; 464 465 if (Delegating) { 466 // If this is a delegating constructor call, just load the VTT. 467 return LoadCXXVTT(); 468 } else if (RD == Base) { 469 // If the record matches the base, this is the complete ctor/dtor 470 // variant calling the base variant in a class with virtual bases. 471 assert(!CGM.getCXXABI().NeedsVTTParameter(CurGD) && 472 "doing no-op VTT offset in base dtor/ctor?"); 473 assert(!ForVirtualBase && "Can't have same class as virtual base!"); 474 SubVTTIndex = 0; 475 } else { 476 const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD); 477 CharUnits BaseOffset = ForVirtualBase ? 478 Layout.getVBaseClassOffset(Base) : 479 Layout.getBaseClassOffset(Base); 480 481 SubVTTIndex = 482 CGM.getVTables().getSubVTTIndex(RD, BaseSubobject(Base, BaseOffset)); 483 assert(SubVTTIndex != 0 && "Sub-VTT index must be greater than zero!"); 484 } 485 486 if (CGM.getCXXABI().NeedsVTTParameter(CurGD)) { 487 // A VTT parameter was passed to the constructor, use it. 488 llvm::Value *VTT = LoadCXXVTT(); 489 return Builder.CreateConstInBoundsGEP1_64(VoidPtrTy, VTT, SubVTTIndex); 490 } else { 491 // We're the complete constructor, so get the VTT by name. 492 llvm::GlobalValue *VTT = CGM.getVTables().GetAddrOfVTT(RD); 493 return Builder.CreateConstInBoundsGEP2_64( 494 VTT->getValueType(), VTT, 0, SubVTTIndex); 495 } 496 } 497 498 namespace { 499 /// Call the destructor for a direct base class. 500 struct CallBaseDtor final : EHScopeStack::Cleanup { 501 const CXXRecordDecl *BaseClass; 502 bool BaseIsVirtual; 503 CallBaseDtor(const CXXRecordDecl *Base, bool BaseIsVirtual) 504 : BaseClass(Base), BaseIsVirtual(BaseIsVirtual) {} 505 506 void Emit(CodeGenFunction &CGF, Flags flags) override { 507 const CXXRecordDecl *DerivedClass = 508 cast<CXXMethodDecl>(CGF.CurCodeDecl)->getParent(); 509 510 const CXXDestructorDecl *D = BaseClass->getDestructor(); 511 // We are already inside a destructor, so presumably the object being 512 // destroyed should have the expected type. 513 QualType ThisTy = D->getThisObjectType(); 514 Address Addr = 515 CGF.GetAddressOfDirectBaseInCompleteClass(CGF.LoadCXXThisAddress(), 516 DerivedClass, BaseClass, 517 BaseIsVirtual); 518 CGF.EmitCXXDestructorCall(D, Dtor_Base, BaseIsVirtual, 519 /*Delegating=*/false, Addr, ThisTy); 520 } 521 }; 522 523 /// A visitor which checks whether an initializer uses 'this' in a 524 /// way which requires the vtable to be properly set. 525 struct DynamicThisUseChecker : ConstEvaluatedExprVisitor<DynamicThisUseChecker> { 526 typedef ConstEvaluatedExprVisitor<DynamicThisUseChecker> super; 527 528 bool UsesThis; 529 530 DynamicThisUseChecker(const ASTContext &C) : super(C), UsesThis(false) {} 531 532 // Black-list all explicit and implicit references to 'this'. 533 // 534 // Do we need to worry about external references to 'this' derived 535 // from arbitrary code? If so, then anything which runs arbitrary 536 // external code might potentially access the vtable. 537 void VisitCXXThisExpr(const CXXThisExpr *E) { UsesThis = true; } 538 }; 539 } // end anonymous namespace 540 541 static bool BaseInitializerUsesThis(ASTContext &C, const Expr *Init) { 542 DynamicThisUseChecker Checker(C); 543 Checker.Visit(Init); 544 return Checker.UsesThis; 545 } 546 547 static void EmitBaseInitializer(CodeGenFunction &CGF, 548 const CXXRecordDecl *ClassDecl, 549 CXXCtorInitializer *BaseInit) { 550 assert(BaseInit->isBaseInitializer() && 551 "Must have base initializer!"); 552 553 Address ThisPtr = CGF.LoadCXXThisAddress(); 554 555 const Type *BaseType = BaseInit->getBaseClass(); 556 const auto *BaseClassDecl = 557 cast<CXXRecordDecl>(BaseType->castAs<RecordType>()->getDecl()); 558 559 bool isBaseVirtual = BaseInit->isBaseVirtual(); 560 561 // If the initializer for the base (other than the constructor 562 // itself) accesses 'this' in any way, we need to initialize the 563 // vtables. 564 if (BaseInitializerUsesThis(CGF.getContext(), BaseInit->getInit())) 565 CGF.InitializeVTablePointers(ClassDecl); 566 567 // We can pretend to be a complete class because it only matters for 568 // virtual bases, and we only do virtual bases for complete ctors. 569 Address V = 570 CGF.GetAddressOfDirectBaseInCompleteClass(ThisPtr, ClassDecl, 571 BaseClassDecl, 572 isBaseVirtual); 573 AggValueSlot AggSlot = 574 AggValueSlot::forAddr( 575 V, Qualifiers(), 576 AggValueSlot::IsDestructed, 577 AggValueSlot::DoesNotNeedGCBarriers, 578 AggValueSlot::IsNotAliased, 579 CGF.getOverlapForBaseInit(ClassDecl, BaseClassDecl, isBaseVirtual)); 580 581 CGF.EmitAggExpr(BaseInit->getInit(), AggSlot); 582 583 if (CGF.CGM.getLangOpts().Exceptions && 584 !BaseClassDecl->hasTrivialDestructor()) 585 CGF.EHStack.pushCleanup<CallBaseDtor>(EHCleanup, BaseClassDecl, 586 isBaseVirtual); 587 } 588 589 static bool isMemcpyEquivalentSpecialMember(const CXXMethodDecl *D) { 590 auto *CD = dyn_cast<CXXConstructorDecl>(D); 591 if (!(CD && CD->isCopyOrMoveConstructor()) && 592 !D->isCopyAssignmentOperator() && !D->isMoveAssignmentOperator()) 593 return false; 594 595 // We can emit a memcpy for a trivial copy or move constructor/assignment. 596 if (D->isTrivial() && !D->getParent()->mayInsertExtraPadding()) 597 return true; 598 599 // We *must* emit a memcpy for a defaulted union copy or move op. 600 if (D->getParent()->isUnion() && D->isDefaulted()) 601 return true; 602 603 return false; 604 } 605 606 static void EmitLValueForAnyFieldInitialization(CodeGenFunction &CGF, 607 CXXCtorInitializer *MemberInit, 608 LValue &LHS) { 609 FieldDecl *Field = MemberInit->getAnyMember(); 610 if (MemberInit->isIndirectMemberInitializer()) { 611 // If we are initializing an anonymous union field, drill down to the field. 612 IndirectFieldDecl *IndirectField = MemberInit->getIndirectMember(); 613 for (const auto *I : IndirectField->chain()) 614 LHS = CGF.EmitLValueForFieldInitialization(LHS, cast<FieldDecl>(I)); 615 } else { 616 LHS = CGF.EmitLValueForFieldInitialization(LHS, Field); 617 } 618 } 619 620 static void EmitMemberInitializer(CodeGenFunction &CGF, 621 const CXXRecordDecl *ClassDecl, 622 CXXCtorInitializer *MemberInit, 623 const CXXConstructorDecl *Constructor, 624 FunctionArgList &Args) { 625 ApplyDebugLocation Loc(CGF, MemberInit->getSourceLocation()); 626 assert(MemberInit->isAnyMemberInitializer() && 627 "Must have member initializer!"); 628 assert(MemberInit->getInit() && "Must have initializer!"); 629 630 // non-static data member initializers. 631 FieldDecl *Field = MemberInit->getAnyMember(); 632 QualType FieldType = Field->getType(); 633 634 llvm::Value *ThisPtr = CGF.LoadCXXThis(); 635 QualType RecordTy = CGF.getContext().getTypeDeclType(ClassDecl); 636 LValue LHS; 637 638 // If a base constructor is being emitted, create an LValue that has the 639 // non-virtual alignment. 640 if (CGF.CurGD.getCtorType() == Ctor_Base) 641 LHS = CGF.MakeNaturalAlignPointeeAddrLValue(ThisPtr, RecordTy); 642 else 643 LHS = CGF.MakeNaturalAlignAddrLValue(ThisPtr, RecordTy); 644 645 EmitLValueForAnyFieldInitialization(CGF, MemberInit, LHS); 646 647 // Special case: if we are in a copy or move constructor, and we are copying 648 // an array of PODs or classes with trivial copy constructors, ignore the 649 // AST and perform the copy we know is equivalent. 650 // FIXME: This is hacky at best... if we had a bit more explicit information 651 // in the AST, we could generalize it more easily. 652 const ConstantArrayType *Array 653 = CGF.getContext().getAsConstantArrayType(FieldType); 654 if (Array && Constructor->isDefaulted() && 655 Constructor->isCopyOrMoveConstructor()) { 656 QualType BaseElementTy = CGF.getContext().getBaseElementType(Array); 657 CXXConstructExpr *CE = dyn_cast<CXXConstructExpr>(MemberInit->getInit()); 658 if (BaseElementTy.isPODType(CGF.getContext()) || 659 (CE && isMemcpyEquivalentSpecialMember(CE->getConstructor()))) { 660 unsigned SrcArgIndex = 661 CGF.CGM.getCXXABI().getSrcArgforCopyCtor(Constructor, Args); 662 llvm::Value *SrcPtr 663 = CGF.Builder.CreateLoad(CGF.GetAddrOfLocalVar(Args[SrcArgIndex])); 664 LValue ThisRHSLV = CGF.MakeNaturalAlignAddrLValue(SrcPtr, RecordTy); 665 LValue Src = CGF.EmitLValueForFieldInitialization(ThisRHSLV, Field); 666 667 // Copy the aggregate. 668 CGF.EmitAggregateCopy(LHS, Src, FieldType, CGF.getOverlapForFieldInit(Field), 669 LHS.isVolatileQualified()); 670 // Ensure that we destroy the objects if an exception is thrown later in 671 // the constructor. 672 QualType::DestructionKind dtorKind = FieldType.isDestructedType(); 673 if (CGF.needsEHCleanup(dtorKind)) 674 CGF.pushEHDestroy(dtorKind, LHS.getAddress(CGF), FieldType); 675 return; 676 } 677 } 678 679 CGF.EmitInitializerForField(Field, LHS, MemberInit->getInit()); 680 } 681 682 void CodeGenFunction::EmitInitializerForField(FieldDecl *Field, LValue LHS, 683 Expr *Init) { 684 QualType FieldType = Field->getType(); 685 switch (getEvaluationKind(FieldType)) { 686 case TEK_Scalar: 687 if (LHS.isSimple()) { 688 EmitExprAsInit(Init, Field, LHS, false); 689 } else { 690 RValue RHS = RValue::get(EmitScalarExpr(Init)); 691 EmitStoreThroughLValue(RHS, LHS); 692 } 693 break; 694 case TEK_Complex: 695 EmitComplexExprIntoLValue(Init, LHS, /*isInit*/ true); 696 break; 697 case TEK_Aggregate: { 698 AggValueSlot Slot = AggValueSlot::forLValue( 699 LHS, *this, AggValueSlot::IsDestructed, 700 AggValueSlot::DoesNotNeedGCBarriers, AggValueSlot::IsNotAliased, 701 getOverlapForFieldInit(Field), AggValueSlot::IsNotZeroed, 702 // Checks are made by the code that calls constructor. 703 AggValueSlot::IsSanitizerChecked); 704 EmitAggExpr(Init, Slot); 705 break; 706 } 707 } 708 709 // Ensure that we destroy this object if an exception is thrown 710 // later in the constructor. 711 QualType::DestructionKind dtorKind = FieldType.isDestructedType(); 712 if (needsEHCleanup(dtorKind)) 713 pushEHDestroy(dtorKind, LHS.getAddress(*this), FieldType); 714 } 715 716 /// Checks whether the given constructor is a valid subject for the 717 /// complete-to-base constructor delegation optimization, i.e. 718 /// emitting the complete constructor as a simple call to the base 719 /// constructor. 720 bool CodeGenFunction::IsConstructorDelegationValid( 721 const CXXConstructorDecl *Ctor) { 722 723 // Currently we disable the optimization for classes with virtual 724 // bases because (1) the addresses of parameter variables need to be 725 // consistent across all initializers but (2) the delegate function 726 // call necessarily creates a second copy of the parameter variable. 727 // 728 // The limiting example (purely theoretical AFAIK): 729 // struct A { A(int &c) { c++; } }; 730 // struct B : virtual A { 731 // B(int count) : A(count) { printf("%d\n", count); } 732 // }; 733 // ...although even this example could in principle be emitted as a 734 // delegation since the address of the parameter doesn't escape. 735 if (Ctor->getParent()->getNumVBases()) { 736 // TODO: white-list trivial vbase initializers. This case wouldn't 737 // be subject to the restrictions below. 738 739 // TODO: white-list cases where: 740 // - there are no non-reference parameters to the constructor 741 // - the initializers don't access any non-reference parameters 742 // - the initializers don't take the address of non-reference 743 // parameters 744 // - etc. 745 // If we ever add any of the above cases, remember that: 746 // - function-try-blocks will always exclude this optimization 747 // - we need to perform the constructor prologue and cleanup in 748 // EmitConstructorBody. 749 750 return false; 751 } 752 753 // We also disable the optimization for variadic functions because 754 // it's impossible to "re-pass" varargs. 755 if (Ctor->getType()->castAs<FunctionProtoType>()->isVariadic()) 756 return false; 757 758 // FIXME: Decide if we can do a delegation of a delegating constructor. 759 if (Ctor->isDelegatingConstructor()) 760 return false; 761 762 return true; 763 } 764 765 // Emit code in ctor (Prologue==true) or dtor (Prologue==false) 766 // to poison the extra field paddings inserted under 767 // -fsanitize-address-field-padding=1|2. 768 void CodeGenFunction::EmitAsanPrologueOrEpilogue(bool Prologue) { 769 ASTContext &Context = getContext(); 770 const CXXRecordDecl *ClassDecl = 771 Prologue ? cast<CXXConstructorDecl>(CurGD.getDecl())->getParent() 772 : cast<CXXDestructorDecl>(CurGD.getDecl())->getParent(); 773 if (!ClassDecl->mayInsertExtraPadding()) return; 774 775 struct SizeAndOffset { 776 uint64_t Size; 777 uint64_t Offset; 778 }; 779 780 unsigned PtrSize = CGM.getDataLayout().getPointerSizeInBits(); 781 const ASTRecordLayout &Info = Context.getASTRecordLayout(ClassDecl); 782 783 // Populate sizes and offsets of fields. 784 SmallVector<SizeAndOffset, 16> SSV(Info.getFieldCount()); 785 for (unsigned i = 0, e = Info.getFieldCount(); i != e; ++i) 786 SSV[i].Offset = 787 Context.toCharUnitsFromBits(Info.getFieldOffset(i)).getQuantity(); 788 789 size_t NumFields = 0; 790 for (const auto *Field : ClassDecl->fields()) { 791 const FieldDecl *D = Field; 792 auto FieldInfo = Context.getTypeInfoInChars(D->getType()); 793 CharUnits FieldSize = FieldInfo.Width; 794 assert(NumFields < SSV.size()); 795 SSV[NumFields].Size = D->isBitField() ? 0 : FieldSize.getQuantity(); 796 NumFields++; 797 } 798 assert(NumFields == SSV.size()); 799 if (SSV.size() <= 1) return; 800 801 // We will insert calls to __asan_* run-time functions. 802 // LLVM AddressSanitizer pass may decide to inline them later. 803 llvm::Type *Args[2] = {IntPtrTy, IntPtrTy}; 804 llvm::FunctionType *FTy = 805 llvm::FunctionType::get(CGM.VoidTy, Args, false); 806 llvm::FunctionCallee F = CGM.CreateRuntimeFunction( 807 FTy, Prologue ? "__asan_poison_intra_object_redzone" 808 : "__asan_unpoison_intra_object_redzone"); 809 810 llvm::Value *ThisPtr = LoadCXXThis(); 811 ThisPtr = Builder.CreatePtrToInt(ThisPtr, IntPtrTy); 812 uint64_t TypeSize = Info.getNonVirtualSize().getQuantity(); 813 // For each field check if it has sufficient padding, 814 // if so (un)poison it with a call. 815 for (size_t i = 0; i < SSV.size(); i++) { 816 uint64_t AsanAlignment = 8; 817 uint64_t NextField = i == SSV.size() - 1 ? TypeSize : SSV[i + 1].Offset; 818 uint64_t PoisonSize = NextField - SSV[i].Offset - SSV[i].Size; 819 uint64_t EndOffset = SSV[i].Offset + SSV[i].Size; 820 if (PoisonSize < AsanAlignment || !SSV[i].Size || 821 (NextField % AsanAlignment) != 0) 822 continue; 823 Builder.CreateCall( 824 F, {Builder.CreateAdd(ThisPtr, Builder.getIntN(PtrSize, EndOffset)), 825 Builder.getIntN(PtrSize, PoisonSize)}); 826 } 827 } 828 829 /// EmitConstructorBody - Emits the body of the current constructor. 830 void CodeGenFunction::EmitConstructorBody(FunctionArgList &Args) { 831 EmitAsanPrologueOrEpilogue(true); 832 const CXXConstructorDecl *Ctor = cast<CXXConstructorDecl>(CurGD.getDecl()); 833 CXXCtorType CtorType = CurGD.getCtorType(); 834 835 assert((CGM.getTarget().getCXXABI().hasConstructorVariants() || 836 CtorType == Ctor_Complete) && 837 "can only generate complete ctor for this ABI"); 838 839 // Before we go any further, try the complete->base constructor 840 // delegation optimization. 841 if (CtorType == Ctor_Complete && IsConstructorDelegationValid(Ctor) && 842 CGM.getTarget().getCXXABI().hasConstructorVariants()) { 843 EmitDelegateCXXConstructorCall(Ctor, Ctor_Base, Args, Ctor->getEndLoc()); 844 return; 845 } 846 847 const FunctionDecl *Definition = nullptr; 848 Stmt *Body = Ctor->getBody(Definition); 849 assert(Definition == Ctor && "emitting wrong constructor body"); 850 851 // Enter the function-try-block before the constructor prologue if 852 // applicable. 853 bool IsTryBody = (Body && isa<CXXTryStmt>(Body)); 854 if (IsTryBody) 855 EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true); 856 857 incrementProfileCounter(Body); 858 859 RunCleanupsScope RunCleanups(*this); 860 861 // TODO: in restricted cases, we can emit the vbase initializers of 862 // a complete ctor and then delegate to the base ctor. 863 864 // Emit the constructor prologue, i.e. the base and member 865 // initializers. 866 EmitCtorPrologue(Ctor, CtorType, Args); 867 868 // Emit the body of the statement. 869 if (IsTryBody) 870 EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock()); 871 else if (Body) 872 EmitStmt(Body); 873 874 // Emit any cleanup blocks associated with the member or base 875 // initializers, which includes (along the exceptional path) the 876 // destructors for those members and bases that were fully 877 // constructed. 878 RunCleanups.ForceCleanup(); 879 880 if (IsTryBody) 881 ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true); 882 } 883 884 namespace { 885 /// RAII object to indicate that codegen is copying the value representation 886 /// instead of the object representation. Useful when copying a struct or 887 /// class which has uninitialized members and we're only performing 888 /// lvalue-to-rvalue conversion on the object but not its members. 889 class CopyingValueRepresentation { 890 public: 891 explicit CopyingValueRepresentation(CodeGenFunction &CGF) 892 : CGF(CGF), OldSanOpts(CGF.SanOpts) { 893 CGF.SanOpts.set(SanitizerKind::Bool, false); 894 CGF.SanOpts.set(SanitizerKind::Enum, false); 895 } 896 ~CopyingValueRepresentation() { 897 CGF.SanOpts = OldSanOpts; 898 } 899 private: 900 CodeGenFunction &CGF; 901 SanitizerSet OldSanOpts; 902 }; 903 } // end anonymous namespace 904 905 namespace { 906 class FieldMemcpyizer { 907 public: 908 FieldMemcpyizer(CodeGenFunction &CGF, const CXXRecordDecl *ClassDecl, 909 const VarDecl *SrcRec) 910 : CGF(CGF), ClassDecl(ClassDecl), SrcRec(SrcRec), 911 RecLayout(CGF.getContext().getASTRecordLayout(ClassDecl)), 912 FirstField(nullptr), LastField(nullptr), FirstFieldOffset(0), 913 LastFieldOffset(0), LastAddedFieldIndex(0) {} 914 915 bool isMemcpyableField(FieldDecl *F) const { 916 // Never memcpy fields when we are adding poisoned paddings. 917 if (CGF.getContext().getLangOpts().SanitizeAddressFieldPadding) 918 return false; 919 Qualifiers Qual = F->getType().getQualifiers(); 920 if (Qual.hasVolatile() || Qual.hasObjCLifetime()) 921 return false; 922 return true; 923 } 924 925 void addMemcpyableField(FieldDecl *F) { 926 if (F->isZeroSize(CGF.getContext())) 927 return; 928 if (!FirstField) 929 addInitialField(F); 930 else 931 addNextField(F); 932 } 933 934 CharUnits getMemcpySize(uint64_t FirstByteOffset) const { 935 ASTContext &Ctx = CGF.getContext(); 936 unsigned LastFieldSize = 937 LastField->isBitField() 938 ? LastField->getBitWidthValue(Ctx) 939 : Ctx.toBits( 940 Ctx.getTypeInfoDataSizeInChars(LastField->getType()).Width); 941 uint64_t MemcpySizeBits = LastFieldOffset + LastFieldSize - 942 FirstByteOffset + Ctx.getCharWidth() - 1; 943 CharUnits MemcpySize = Ctx.toCharUnitsFromBits(MemcpySizeBits); 944 return MemcpySize; 945 } 946 947 void emitMemcpy() { 948 // Give the subclass a chance to bail out if it feels the memcpy isn't 949 // worth it (e.g. Hasn't aggregated enough data). 950 if (!FirstField) { 951 return; 952 } 953 954 uint64_t FirstByteOffset; 955 if (FirstField->isBitField()) { 956 const CGRecordLayout &RL = 957 CGF.getTypes().getCGRecordLayout(FirstField->getParent()); 958 const CGBitFieldInfo &BFInfo = RL.getBitFieldInfo(FirstField); 959 // FirstFieldOffset is not appropriate for bitfields, 960 // we need to use the storage offset instead. 961 FirstByteOffset = CGF.getContext().toBits(BFInfo.StorageOffset); 962 } else { 963 FirstByteOffset = FirstFieldOffset; 964 } 965 966 CharUnits MemcpySize = getMemcpySize(FirstByteOffset); 967 QualType RecordTy = CGF.getContext().getTypeDeclType(ClassDecl); 968 Address ThisPtr = CGF.LoadCXXThisAddress(); 969 LValue DestLV = CGF.MakeAddrLValue(ThisPtr, RecordTy); 970 LValue Dest = CGF.EmitLValueForFieldInitialization(DestLV, FirstField); 971 llvm::Value *SrcPtr = CGF.Builder.CreateLoad(CGF.GetAddrOfLocalVar(SrcRec)); 972 LValue SrcLV = CGF.MakeNaturalAlignAddrLValue(SrcPtr, RecordTy); 973 LValue Src = CGF.EmitLValueForFieldInitialization(SrcLV, FirstField); 974 975 emitMemcpyIR( 976 Dest.isBitField() ? Dest.getBitFieldAddress() : Dest.getAddress(CGF), 977 Src.isBitField() ? Src.getBitFieldAddress() : Src.getAddress(CGF), 978 MemcpySize); 979 reset(); 980 } 981 982 void reset() { 983 FirstField = nullptr; 984 } 985 986 protected: 987 CodeGenFunction &CGF; 988 const CXXRecordDecl *ClassDecl; 989 990 private: 991 void emitMemcpyIR(Address DestPtr, Address SrcPtr, CharUnits Size) { 992 DestPtr = DestPtr.withElementType(CGF.Int8Ty); 993 SrcPtr = SrcPtr.withElementType(CGF.Int8Ty); 994 CGF.Builder.CreateMemCpy(DestPtr, SrcPtr, Size.getQuantity()); 995 } 996 997 void addInitialField(FieldDecl *F) { 998 FirstField = F; 999 LastField = F; 1000 FirstFieldOffset = RecLayout.getFieldOffset(F->getFieldIndex()); 1001 LastFieldOffset = FirstFieldOffset; 1002 LastAddedFieldIndex = F->getFieldIndex(); 1003 } 1004 1005 void addNextField(FieldDecl *F) { 1006 // For the most part, the following invariant will hold: 1007 // F->getFieldIndex() == LastAddedFieldIndex + 1 1008 // The one exception is that Sema won't add a copy-initializer for an 1009 // unnamed bitfield, which will show up here as a gap in the sequence. 1010 assert(F->getFieldIndex() >= LastAddedFieldIndex + 1 && 1011 "Cannot aggregate fields out of order."); 1012 LastAddedFieldIndex = F->getFieldIndex(); 1013 1014 // The 'first' and 'last' fields are chosen by offset, rather than field 1015 // index. This allows the code to support bitfields, as well as regular 1016 // fields. 1017 uint64_t FOffset = RecLayout.getFieldOffset(F->getFieldIndex()); 1018 if (FOffset < FirstFieldOffset) { 1019 FirstField = F; 1020 FirstFieldOffset = FOffset; 1021 } else if (FOffset >= LastFieldOffset) { 1022 LastField = F; 1023 LastFieldOffset = FOffset; 1024 } 1025 } 1026 1027 const VarDecl *SrcRec; 1028 const ASTRecordLayout &RecLayout; 1029 FieldDecl *FirstField; 1030 FieldDecl *LastField; 1031 uint64_t FirstFieldOffset, LastFieldOffset; 1032 unsigned LastAddedFieldIndex; 1033 }; 1034 1035 class ConstructorMemcpyizer : public FieldMemcpyizer { 1036 private: 1037 /// Get source argument for copy constructor. Returns null if not a copy 1038 /// constructor. 1039 static const VarDecl *getTrivialCopySource(CodeGenFunction &CGF, 1040 const CXXConstructorDecl *CD, 1041 FunctionArgList &Args) { 1042 if (CD->isCopyOrMoveConstructor() && CD->isDefaulted()) 1043 return Args[CGF.CGM.getCXXABI().getSrcArgforCopyCtor(CD, Args)]; 1044 return nullptr; 1045 } 1046 1047 // Returns true if a CXXCtorInitializer represents a member initialization 1048 // that can be rolled into a memcpy. 1049 bool isMemberInitMemcpyable(CXXCtorInitializer *MemberInit) const { 1050 if (!MemcpyableCtor) 1051 return false; 1052 FieldDecl *Field = MemberInit->getMember(); 1053 assert(Field && "No field for member init."); 1054 QualType FieldType = Field->getType(); 1055 CXXConstructExpr *CE = dyn_cast<CXXConstructExpr>(MemberInit->getInit()); 1056 1057 // Bail out on non-memcpyable, not-trivially-copyable members. 1058 if (!(CE && isMemcpyEquivalentSpecialMember(CE->getConstructor())) && 1059 !(FieldType.isTriviallyCopyableType(CGF.getContext()) || 1060 FieldType->isReferenceType())) 1061 return false; 1062 1063 // Bail out on volatile fields. 1064 if (!isMemcpyableField(Field)) 1065 return false; 1066 1067 // Otherwise we're good. 1068 return true; 1069 } 1070 1071 public: 1072 ConstructorMemcpyizer(CodeGenFunction &CGF, const CXXConstructorDecl *CD, 1073 FunctionArgList &Args) 1074 : FieldMemcpyizer(CGF, CD->getParent(), getTrivialCopySource(CGF, CD, Args)), 1075 ConstructorDecl(CD), 1076 MemcpyableCtor(CD->isDefaulted() && 1077 CD->isCopyOrMoveConstructor() && 1078 CGF.getLangOpts().getGC() == LangOptions::NonGC), 1079 Args(Args) { } 1080 1081 void addMemberInitializer(CXXCtorInitializer *MemberInit) { 1082 if (isMemberInitMemcpyable(MemberInit)) { 1083 AggregatedInits.push_back(MemberInit); 1084 addMemcpyableField(MemberInit->getMember()); 1085 } else { 1086 emitAggregatedInits(); 1087 EmitMemberInitializer(CGF, ConstructorDecl->getParent(), MemberInit, 1088 ConstructorDecl, Args); 1089 } 1090 } 1091 1092 void emitAggregatedInits() { 1093 if (AggregatedInits.size() <= 1) { 1094 // This memcpy is too small to be worthwhile. Fall back on default 1095 // codegen. 1096 if (!AggregatedInits.empty()) { 1097 CopyingValueRepresentation CVR(CGF); 1098 EmitMemberInitializer(CGF, ConstructorDecl->getParent(), 1099 AggregatedInits[0], ConstructorDecl, Args); 1100 AggregatedInits.clear(); 1101 } 1102 reset(); 1103 return; 1104 } 1105 1106 pushEHDestructors(); 1107 emitMemcpy(); 1108 AggregatedInits.clear(); 1109 } 1110 1111 void pushEHDestructors() { 1112 Address ThisPtr = CGF.LoadCXXThisAddress(); 1113 QualType RecordTy = CGF.getContext().getTypeDeclType(ClassDecl); 1114 LValue LHS = CGF.MakeAddrLValue(ThisPtr, RecordTy); 1115 1116 for (unsigned i = 0; i < AggregatedInits.size(); ++i) { 1117 CXXCtorInitializer *MemberInit = AggregatedInits[i]; 1118 QualType FieldType = MemberInit->getAnyMember()->getType(); 1119 QualType::DestructionKind dtorKind = FieldType.isDestructedType(); 1120 if (!CGF.needsEHCleanup(dtorKind)) 1121 continue; 1122 LValue FieldLHS = LHS; 1123 EmitLValueForAnyFieldInitialization(CGF, MemberInit, FieldLHS); 1124 CGF.pushEHDestroy(dtorKind, FieldLHS.getAddress(CGF), FieldType); 1125 } 1126 } 1127 1128 void finish() { 1129 emitAggregatedInits(); 1130 } 1131 1132 private: 1133 const CXXConstructorDecl *ConstructorDecl; 1134 bool MemcpyableCtor; 1135 FunctionArgList &Args; 1136 SmallVector<CXXCtorInitializer*, 16> AggregatedInits; 1137 }; 1138 1139 class AssignmentMemcpyizer : public FieldMemcpyizer { 1140 private: 1141 // Returns the memcpyable field copied by the given statement, if one 1142 // exists. Otherwise returns null. 1143 FieldDecl *getMemcpyableField(Stmt *S) { 1144 if (!AssignmentsMemcpyable) 1145 return nullptr; 1146 if (BinaryOperator *BO = dyn_cast<BinaryOperator>(S)) { 1147 // Recognise trivial assignments. 1148 if (BO->getOpcode() != BO_Assign) 1149 return nullptr; 1150 MemberExpr *ME = dyn_cast<MemberExpr>(BO->getLHS()); 1151 if (!ME) 1152 return nullptr; 1153 FieldDecl *Field = dyn_cast<FieldDecl>(ME->getMemberDecl()); 1154 if (!Field || !isMemcpyableField(Field)) 1155 return nullptr; 1156 Stmt *RHS = BO->getRHS(); 1157 if (ImplicitCastExpr *EC = dyn_cast<ImplicitCastExpr>(RHS)) 1158 RHS = EC->getSubExpr(); 1159 if (!RHS) 1160 return nullptr; 1161 if (MemberExpr *ME2 = dyn_cast<MemberExpr>(RHS)) { 1162 if (ME2->getMemberDecl() == Field) 1163 return Field; 1164 } 1165 return nullptr; 1166 } else if (CXXMemberCallExpr *MCE = dyn_cast<CXXMemberCallExpr>(S)) { 1167 CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(MCE->getCalleeDecl()); 1168 if (!(MD && isMemcpyEquivalentSpecialMember(MD))) 1169 return nullptr; 1170 MemberExpr *IOA = dyn_cast<MemberExpr>(MCE->getImplicitObjectArgument()); 1171 if (!IOA) 1172 return nullptr; 1173 FieldDecl *Field = dyn_cast<FieldDecl>(IOA->getMemberDecl()); 1174 if (!Field || !isMemcpyableField(Field)) 1175 return nullptr; 1176 MemberExpr *Arg0 = dyn_cast<MemberExpr>(MCE->getArg(0)); 1177 if (!Arg0 || Field != dyn_cast<FieldDecl>(Arg0->getMemberDecl())) 1178 return nullptr; 1179 return Field; 1180 } else if (CallExpr *CE = dyn_cast<CallExpr>(S)) { 1181 FunctionDecl *FD = dyn_cast<FunctionDecl>(CE->getCalleeDecl()); 1182 if (!FD || FD->getBuiltinID() != Builtin::BI__builtin_memcpy) 1183 return nullptr; 1184 Expr *DstPtr = CE->getArg(0); 1185 if (ImplicitCastExpr *DC = dyn_cast<ImplicitCastExpr>(DstPtr)) 1186 DstPtr = DC->getSubExpr(); 1187 UnaryOperator *DUO = dyn_cast<UnaryOperator>(DstPtr); 1188 if (!DUO || DUO->getOpcode() != UO_AddrOf) 1189 return nullptr; 1190 MemberExpr *ME = dyn_cast<MemberExpr>(DUO->getSubExpr()); 1191 if (!ME) 1192 return nullptr; 1193 FieldDecl *Field = dyn_cast<FieldDecl>(ME->getMemberDecl()); 1194 if (!Field || !isMemcpyableField(Field)) 1195 return nullptr; 1196 Expr *SrcPtr = CE->getArg(1); 1197 if (ImplicitCastExpr *SC = dyn_cast<ImplicitCastExpr>(SrcPtr)) 1198 SrcPtr = SC->getSubExpr(); 1199 UnaryOperator *SUO = dyn_cast<UnaryOperator>(SrcPtr); 1200 if (!SUO || SUO->getOpcode() != UO_AddrOf) 1201 return nullptr; 1202 MemberExpr *ME2 = dyn_cast<MemberExpr>(SUO->getSubExpr()); 1203 if (!ME2 || Field != dyn_cast<FieldDecl>(ME2->getMemberDecl())) 1204 return nullptr; 1205 return Field; 1206 } 1207 1208 return nullptr; 1209 } 1210 1211 bool AssignmentsMemcpyable; 1212 SmallVector<Stmt*, 16> AggregatedStmts; 1213 1214 public: 1215 AssignmentMemcpyizer(CodeGenFunction &CGF, const CXXMethodDecl *AD, 1216 FunctionArgList &Args) 1217 : FieldMemcpyizer(CGF, AD->getParent(), Args[Args.size() - 1]), 1218 AssignmentsMemcpyable(CGF.getLangOpts().getGC() == LangOptions::NonGC) { 1219 assert(Args.size() == 2); 1220 } 1221 1222 void emitAssignment(Stmt *S) { 1223 FieldDecl *F = getMemcpyableField(S); 1224 if (F) { 1225 addMemcpyableField(F); 1226 AggregatedStmts.push_back(S); 1227 } else { 1228 emitAggregatedStmts(); 1229 CGF.EmitStmt(S); 1230 } 1231 } 1232 1233 void emitAggregatedStmts() { 1234 if (AggregatedStmts.size() <= 1) { 1235 if (!AggregatedStmts.empty()) { 1236 CopyingValueRepresentation CVR(CGF); 1237 CGF.EmitStmt(AggregatedStmts[0]); 1238 } 1239 reset(); 1240 } 1241 1242 emitMemcpy(); 1243 AggregatedStmts.clear(); 1244 } 1245 1246 void finish() { 1247 emitAggregatedStmts(); 1248 } 1249 }; 1250 } // end anonymous namespace 1251 1252 static bool isInitializerOfDynamicClass(const CXXCtorInitializer *BaseInit) { 1253 const Type *BaseType = BaseInit->getBaseClass(); 1254 const auto *BaseClassDecl = 1255 cast<CXXRecordDecl>(BaseType->castAs<RecordType>()->getDecl()); 1256 return BaseClassDecl->isDynamicClass(); 1257 } 1258 1259 /// EmitCtorPrologue - This routine generates necessary code to initialize 1260 /// base classes and non-static data members belonging to this constructor. 1261 void CodeGenFunction::EmitCtorPrologue(const CXXConstructorDecl *CD, 1262 CXXCtorType CtorType, 1263 FunctionArgList &Args) { 1264 if (CD->isDelegatingConstructor()) 1265 return EmitDelegatingCXXConstructorCall(CD, Args); 1266 1267 const CXXRecordDecl *ClassDecl = CD->getParent(); 1268 1269 CXXConstructorDecl::init_const_iterator B = CD->init_begin(), 1270 E = CD->init_end(); 1271 1272 // Virtual base initializers first, if any. They aren't needed if: 1273 // - This is a base ctor variant 1274 // - There are no vbases 1275 // - The class is abstract, so a complete object of it cannot be constructed 1276 // 1277 // The check for an abstract class is necessary because sema may not have 1278 // marked virtual base destructors referenced. 1279 bool ConstructVBases = CtorType != Ctor_Base && 1280 ClassDecl->getNumVBases() != 0 && 1281 !ClassDecl->isAbstract(); 1282 1283 // In the Microsoft C++ ABI, there are no constructor variants. Instead, the 1284 // constructor of a class with virtual bases takes an additional parameter to 1285 // conditionally construct the virtual bases. Emit that check here. 1286 llvm::BasicBlock *BaseCtorContinueBB = nullptr; 1287 if (ConstructVBases && 1288 !CGM.getTarget().getCXXABI().hasConstructorVariants()) { 1289 BaseCtorContinueBB = 1290 CGM.getCXXABI().EmitCtorCompleteObjectHandler(*this, ClassDecl); 1291 assert(BaseCtorContinueBB); 1292 } 1293 1294 llvm::Value *const OldThis = CXXThisValue; 1295 for (; B != E && (*B)->isBaseInitializer() && (*B)->isBaseVirtual(); B++) { 1296 if (!ConstructVBases) 1297 continue; 1298 if (CGM.getCodeGenOpts().StrictVTablePointers && 1299 CGM.getCodeGenOpts().OptimizationLevel > 0 && 1300 isInitializerOfDynamicClass(*B)) 1301 CXXThisValue = Builder.CreateLaunderInvariantGroup(LoadCXXThis()); 1302 EmitBaseInitializer(*this, ClassDecl, *B); 1303 } 1304 1305 if (BaseCtorContinueBB) { 1306 // Complete object handler should continue to the remaining initializers. 1307 Builder.CreateBr(BaseCtorContinueBB); 1308 EmitBlock(BaseCtorContinueBB); 1309 } 1310 1311 // Then, non-virtual base initializers. 1312 for (; B != E && (*B)->isBaseInitializer(); B++) { 1313 assert(!(*B)->isBaseVirtual()); 1314 1315 if (CGM.getCodeGenOpts().StrictVTablePointers && 1316 CGM.getCodeGenOpts().OptimizationLevel > 0 && 1317 isInitializerOfDynamicClass(*B)) 1318 CXXThisValue = Builder.CreateLaunderInvariantGroup(LoadCXXThis()); 1319 EmitBaseInitializer(*this, ClassDecl, *B); 1320 } 1321 1322 CXXThisValue = OldThis; 1323 1324 InitializeVTablePointers(ClassDecl); 1325 1326 // And finally, initialize class members. 1327 FieldConstructionScope FCS(*this, LoadCXXThisAddress()); 1328 ConstructorMemcpyizer CM(*this, CD, Args); 1329 for (; B != E; B++) { 1330 CXXCtorInitializer *Member = (*B); 1331 assert(!Member->isBaseInitializer()); 1332 assert(Member->isAnyMemberInitializer() && 1333 "Delegating initializer on non-delegating constructor"); 1334 CM.addMemberInitializer(Member); 1335 } 1336 CM.finish(); 1337 } 1338 1339 static bool 1340 FieldHasTrivialDestructorBody(ASTContext &Context, const FieldDecl *Field); 1341 1342 static bool 1343 HasTrivialDestructorBody(ASTContext &Context, 1344 const CXXRecordDecl *BaseClassDecl, 1345 const CXXRecordDecl *MostDerivedClassDecl) 1346 { 1347 // If the destructor is trivial we don't have to check anything else. 1348 if (BaseClassDecl->hasTrivialDestructor()) 1349 return true; 1350 1351 if (!BaseClassDecl->getDestructor()->hasTrivialBody()) 1352 return false; 1353 1354 // Check fields. 1355 for (const auto *Field : BaseClassDecl->fields()) 1356 if (!FieldHasTrivialDestructorBody(Context, Field)) 1357 return false; 1358 1359 // Check non-virtual bases. 1360 for (const auto &I : BaseClassDecl->bases()) { 1361 if (I.isVirtual()) 1362 continue; 1363 1364 const CXXRecordDecl *NonVirtualBase = 1365 cast<CXXRecordDecl>(I.getType()->castAs<RecordType>()->getDecl()); 1366 if (!HasTrivialDestructorBody(Context, NonVirtualBase, 1367 MostDerivedClassDecl)) 1368 return false; 1369 } 1370 1371 if (BaseClassDecl == MostDerivedClassDecl) { 1372 // Check virtual bases. 1373 for (const auto &I : BaseClassDecl->vbases()) { 1374 const CXXRecordDecl *VirtualBase = 1375 cast<CXXRecordDecl>(I.getType()->castAs<RecordType>()->getDecl()); 1376 if (!HasTrivialDestructorBody(Context, VirtualBase, 1377 MostDerivedClassDecl)) 1378 return false; 1379 } 1380 } 1381 1382 return true; 1383 } 1384 1385 static bool 1386 FieldHasTrivialDestructorBody(ASTContext &Context, 1387 const FieldDecl *Field) 1388 { 1389 QualType FieldBaseElementType = Context.getBaseElementType(Field->getType()); 1390 1391 const RecordType *RT = FieldBaseElementType->getAs<RecordType>(); 1392 if (!RT) 1393 return true; 1394 1395 CXXRecordDecl *FieldClassDecl = cast<CXXRecordDecl>(RT->getDecl()); 1396 1397 // The destructor for an implicit anonymous union member is never invoked. 1398 if (FieldClassDecl->isUnion() && FieldClassDecl->isAnonymousStructOrUnion()) 1399 return false; 1400 1401 return HasTrivialDestructorBody(Context, FieldClassDecl, FieldClassDecl); 1402 } 1403 1404 /// CanSkipVTablePointerInitialization - Check whether we need to initialize 1405 /// any vtable pointers before calling this destructor. 1406 static bool CanSkipVTablePointerInitialization(CodeGenFunction &CGF, 1407 const CXXDestructorDecl *Dtor) { 1408 const CXXRecordDecl *ClassDecl = Dtor->getParent(); 1409 if (!ClassDecl->isDynamicClass()) 1410 return true; 1411 1412 // For a final class, the vtable pointer is known to already point to the 1413 // class's vtable. 1414 if (ClassDecl->isEffectivelyFinal()) 1415 return true; 1416 1417 if (!Dtor->hasTrivialBody()) 1418 return false; 1419 1420 // Check the fields. 1421 for (const auto *Field : ClassDecl->fields()) 1422 if (!FieldHasTrivialDestructorBody(CGF.getContext(), Field)) 1423 return false; 1424 1425 return true; 1426 } 1427 1428 /// EmitDestructorBody - Emits the body of the current destructor. 1429 void CodeGenFunction::EmitDestructorBody(FunctionArgList &Args) { 1430 const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CurGD.getDecl()); 1431 CXXDtorType DtorType = CurGD.getDtorType(); 1432 1433 // For an abstract class, non-base destructors are never used (and can't 1434 // be emitted in general, because vbase dtors may not have been validated 1435 // by Sema), but the Itanium ABI doesn't make them optional and Clang may 1436 // in fact emit references to them from other compilations, so emit them 1437 // as functions containing a trap instruction. 1438 if (DtorType != Dtor_Base && Dtor->getParent()->isAbstract()) { 1439 llvm::CallInst *TrapCall = EmitTrapCall(llvm::Intrinsic::trap); 1440 TrapCall->setDoesNotReturn(); 1441 TrapCall->setDoesNotThrow(); 1442 Builder.CreateUnreachable(); 1443 Builder.ClearInsertionPoint(); 1444 return; 1445 } 1446 1447 Stmt *Body = Dtor->getBody(); 1448 if (Body) 1449 incrementProfileCounter(Body); 1450 1451 // The call to operator delete in a deleting destructor happens 1452 // outside of the function-try-block, which means it's always 1453 // possible to delegate the destructor body to the complete 1454 // destructor. Do so. 1455 if (DtorType == Dtor_Deleting) { 1456 RunCleanupsScope DtorEpilogue(*this); 1457 EnterDtorCleanups(Dtor, Dtor_Deleting); 1458 if (HaveInsertPoint()) { 1459 QualType ThisTy = Dtor->getThisObjectType(); 1460 EmitCXXDestructorCall(Dtor, Dtor_Complete, /*ForVirtualBase=*/false, 1461 /*Delegating=*/false, LoadCXXThisAddress(), ThisTy); 1462 } 1463 return; 1464 } 1465 1466 // If the body is a function-try-block, enter the try before 1467 // anything else. 1468 bool isTryBody = (Body && isa<CXXTryStmt>(Body)); 1469 if (isTryBody) 1470 EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true); 1471 EmitAsanPrologueOrEpilogue(false); 1472 1473 // Enter the epilogue cleanups. 1474 RunCleanupsScope DtorEpilogue(*this); 1475 1476 // If this is the complete variant, just invoke the base variant; 1477 // the epilogue will destruct the virtual bases. But we can't do 1478 // this optimization if the body is a function-try-block, because 1479 // we'd introduce *two* handler blocks. In the Microsoft ABI, we 1480 // always delegate because we might not have a definition in this TU. 1481 switch (DtorType) { 1482 case Dtor_Comdat: llvm_unreachable("not expecting a COMDAT"); 1483 case Dtor_Deleting: llvm_unreachable("already handled deleting case"); 1484 1485 case Dtor_Complete: 1486 assert((Body || getTarget().getCXXABI().isMicrosoft()) && 1487 "can't emit a dtor without a body for non-Microsoft ABIs"); 1488 1489 // Enter the cleanup scopes for virtual bases. 1490 EnterDtorCleanups(Dtor, Dtor_Complete); 1491 1492 if (!isTryBody) { 1493 QualType ThisTy = Dtor->getThisObjectType(); 1494 EmitCXXDestructorCall(Dtor, Dtor_Base, /*ForVirtualBase=*/false, 1495 /*Delegating=*/false, LoadCXXThisAddress(), ThisTy); 1496 break; 1497 } 1498 1499 // Fallthrough: act like we're in the base variant. 1500 [[fallthrough]]; 1501 1502 case Dtor_Base: 1503 assert(Body); 1504 1505 // Enter the cleanup scopes for fields and non-virtual bases. 1506 EnterDtorCleanups(Dtor, Dtor_Base); 1507 1508 // Initialize the vtable pointers before entering the body. 1509 if (!CanSkipVTablePointerInitialization(*this, Dtor)) { 1510 // Insert the llvm.launder.invariant.group intrinsic before initializing 1511 // the vptrs to cancel any previous assumptions we might have made. 1512 if (CGM.getCodeGenOpts().StrictVTablePointers && 1513 CGM.getCodeGenOpts().OptimizationLevel > 0) 1514 CXXThisValue = Builder.CreateLaunderInvariantGroup(LoadCXXThis()); 1515 InitializeVTablePointers(Dtor->getParent()); 1516 } 1517 1518 if (isTryBody) 1519 EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock()); 1520 else if (Body) 1521 EmitStmt(Body); 1522 else { 1523 assert(Dtor->isImplicit() && "bodyless dtor not implicit"); 1524 // nothing to do besides what's in the epilogue 1525 } 1526 // -fapple-kext must inline any call to this dtor into 1527 // the caller's body. 1528 if (getLangOpts().AppleKext) 1529 CurFn->addFnAttr(llvm::Attribute::AlwaysInline); 1530 1531 break; 1532 } 1533 1534 // Jump out through the epilogue cleanups. 1535 DtorEpilogue.ForceCleanup(); 1536 1537 // Exit the try if applicable. 1538 if (isTryBody) 1539 ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true); 1540 } 1541 1542 void CodeGenFunction::emitImplicitAssignmentOperatorBody(FunctionArgList &Args) { 1543 const CXXMethodDecl *AssignOp = cast<CXXMethodDecl>(CurGD.getDecl()); 1544 const Stmt *RootS = AssignOp->getBody(); 1545 assert(isa<CompoundStmt>(RootS) && 1546 "Body of an implicit assignment operator should be compound stmt."); 1547 const CompoundStmt *RootCS = cast<CompoundStmt>(RootS); 1548 1549 LexicalScope Scope(*this, RootCS->getSourceRange()); 1550 1551 incrementProfileCounter(RootCS); 1552 AssignmentMemcpyizer AM(*this, AssignOp, Args); 1553 for (auto *I : RootCS->body()) 1554 AM.emitAssignment(I); 1555 AM.finish(); 1556 } 1557 1558 namespace { 1559 llvm::Value *LoadThisForDtorDelete(CodeGenFunction &CGF, 1560 const CXXDestructorDecl *DD) { 1561 if (Expr *ThisArg = DD->getOperatorDeleteThisArg()) 1562 return CGF.EmitScalarExpr(ThisArg); 1563 return CGF.LoadCXXThis(); 1564 } 1565 1566 /// Call the operator delete associated with the current destructor. 1567 struct CallDtorDelete final : EHScopeStack::Cleanup { 1568 CallDtorDelete() {} 1569 1570 void Emit(CodeGenFunction &CGF, Flags flags) override { 1571 const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CGF.CurCodeDecl); 1572 const CXXRecordDecl *ClassDecl = Dtor->getParent(); 1573 CGF.EmitDeleteCall(Dtor->getOperatorDelete(), 1574 LoadThisForDtorDelete(CGF, Dtor), 1575 CGF.getContext().getTagDeclType(ClassDecl)); 1576 } 1577 }; 1578 1579 void EmitConditionalDtorDeleteCall(CodeGenFunction &CGF, 1580 llvm::Value *ShouldDeleteCondition, 1581 bool ReturnAfterDelete) { 1582 llvm::BasicBlock *callDeleteBB = CGF.createBasicBlock("dtor.call_delete"); 1583 llvm::BasicBlock *continueBB = CGF.createBasicBlock("dtor.continue"); 1584 llvm::Value *ShouldCallDelete 1585 = CGF.Builder.CreateIsNull(ShouldDeleteCondition); 1586 CGF.Builder.CreateCondBr(ShouldCallDelete, continueBB, callDeleteBB); 1587 1588 CGF.EmitBlock(callDeleteBB); 1589 const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CGF.CurCodeDecl); 1590 const CXXRecordDecl *ClassDecl = Dtor->getParent(); 1591 CGF.EmitDeleteCall(Dtor->getOperatorDelete(), 1592 LoadThisForDtorDelete(CGF, Dtor), 1593 CGF.getContext().getTagDeclType(ClassDecl)); 1594 assert(Dtor->getOperatorDelete()->isDestroyingOperatorDelete() == 1595 ReturnAfterDelete && 1596 "unexpected value for ReturnAfterDelete"); 1597 if (ReturnAfterDelete) 1598 CGF.EmitBranchThroughCleanup(CGF.ReturnBlock); 1599 else 1600 CGF.Builder.CreateBr(continueBB); 1601 1602 CGF.EmitBlock(continueBB); 1603 } 1604 1605 struct CallDtorDeleteConditional final : EHScopeStack::Cleanup { 1606 llvm::Value *ShouldDeleteCondition; 1607 1608 public: 1609 CallDtorDeleteConditional(llvm::Value *ShouldDeleteCondition) 1610 : ShouldDeleteCondition(ShouldDeleteCondition) { 1611 assert(ShouldDeleteCondition != nullptr); 1612 } 1613 1614 void Emit(CodeGenFunction &CGF, Flags flags) override { 1615 EmitConditionalDtorDeleteCall(CGF, ShouldDeleteCondition, 1616 /*ReturnAfterDelete*/false); 1617 } 1618 }; 1619 1620 class DestroyField final : public EHScopeStack::Cleanup { 1621 const FieldDecl *field; 1622 CodeGenFunction::Destroyer *destroyer; 1623 bool useEHCleanupForArray; 1624 1625 public: 1626 DestroyField(const FieldDecl *field, CodeGenFunction::Destroyer *destroyer, 1627 bool useEHCleanupForArray) 1628 : field(field), destroyer(destroyer), 1629 useEHCleanupForArray(useEHCleanupForArray) {} 1630 1631 void Emit(CodeGenFunction &CGF, Flags flags) override { 1632 // Find the address of the field. 1633 Address thisValue = CGF.LoadCXXThisAddress(); 1634 QualType RecordTy = CGF.getContext().getTagDeclType(field->getParent()); 1635 LValue ThisLV = CGF.MakeAddrLValue(thisValue, RecordTy); 1636 LValue LV = CGF.EmitLValueForField(ThisLV, field); 1637 assert(LV.isSimple()); 1638 1639 CGF.emitDestroy(LV.getAddress(CGF), field->getType(), destroyer, 1640 flags.isForNormalCleanup() && useEHCleanupForArray); 1641 } 1642 }; 1643 1644 class DeclAsInlineDebugLocation { 1645 CGDebugInfo *DI; 1646 llvm::MDNode *InlinedAt; 1647 std::optional<ApplyDebugLocation> Location; 1648 1649 public: 1650 DeclAsInlineDebugLocation(CodeGenFunction &CGF, const NamedDecl &Decl) 1651 : DI(CGF.getDebugInfo()) { 1652 if (!DI) 1653 return; 1654 InlinedAt = DI->getInlinedAt(); 1655 DI->setInlinedAt(CGF.Builder.getCurrentDebugLocation()); 1656 Location.emplace(CGF, Decl.getLocation()); 1657 } 1658 1659 ~DeclAsInlineDebugLocation() { 1660 if (!DI) 1661 return; 1662 Location.reset(); 1663 DI->setInlinedAt(InlinedAt); 1664 } 1665 }; 1666 1667 static void EmitSanitizerDtorCallback( 1668 CodeGenFunction &CGF, StringRef Name, llvm::Value *Ptr, 1669 std::optional<CharUnits::QuantityType> PoisonSize = {}) { 1670 CodeGenFunction::SanitizerScope SanScope(&CGF); 1671 // Pass in void pointer and size of region as arguments to runtime 1672 // function 1673 SmallVector<llvm::Value *, 2> Args = {Ptr}; 1674 SmallVector<llvm::Type *, 2> ArgTypes = {CGF.VoidPtrTy}; 1675 1676 if (PoisonSize.has_value()) { 1677 Args.emplace_back(llvm::ConstantInt::get(CGF.SizeTy, *PoisonSize)); 1678 ArgTypes.emplace_back(CGF.SizeTy); 1679 } 1680 1681 llvm::FunctionType *FnType = 1682 llvm::FunctionType::get(CGF.VoidTy, ArgTypes, false); 1683 llvm::FunctionCallee Fn = CGF.CGM.CreateRuntimeFunction(FnType, Name); 1684 1685 CGF.EmitNounwindRuntimeCall(Fn, Args); 1686 } 1687 1688 static void 1689 EmitSanitizerDtorFieldsCallback(CodeGenFunction &CGF, llvm::Value *Ptr, 1690 CharUnits::QuantityType PoisonSize) { 1691 EmitSanitizerDtorCallback(CGF, "__sanitizer_dtor_callback_fields", Ptr, 1692 PoisonSize); 1693 } 1694 1695 /// Poison base class with a trivial destructor. 1696 struct SanitizeDtorTrivialBase final : EHScopeStack::Cleanup { 1697 const CXXRecordDecl *BaseClass; 1698 bool BaseIsVirtual; 1699 SanitizeDtorTrivialBase(const CXXRecordDecl *Base, bool BaseIsVirtual) 1700 : BaseClass(Base), BaseIsVirtual(BaseIsVirtual) {} 1701 1702 void Emit(CodeGenFunction &CGF, Flags flags) override { 1703 const CXXRecordDecl *DerivedClass = 1704 cast<CXXMethodDecl>(CGF.CurCodeDecl)->getParent(); 1705 1706 Address Addr = CGF.GetAddressOfDirectBaseInCompleteClass( 1707 CGF.LoadCXXThisAddress(), DerivedClass, BaseClass, BaseIsVirtual); 1708 1709 const ASTRecordLayout &BaseLayout = 1710 CGF.getContext().getASTRecordLayout(BaseClass); 1711 CharUnits BaseSize = BaseLayout.getSize(); 1712 1713 if (!BaseSize.isPositive()) 1714 return; 1715 1716 // Use the base class declaration location as inline DebugLocation. All 1717 // fields of the class are destroyed. 1718 DeclAsInlineDebugLocation InlineHere(CGF, *BaseClass); 1719 EmitSanitizerDtorFieldsCallback(CGF, Addr.getPointer(), 1720 BaseSize.getQuantity()); 1721 1722 // Prevent the current stack frame from disappearing from the stack trace. 1723 CGF.CurFn->addFnAttr("disable-tail-calls", "true"); 1724 } 1725 }; 1726 1727 class SanitizeDtorFieldRange final : public EHScopeStack::Cleanup { 1728 const CXXDestructorDecl *Dtor; 1729 unsigned StartIndex; 1730 unsigned EndIndex; 1731 1732 public: 1733 SanitizeDtorFieldRange(const CXXDestructorDecl *Dtor, unsigned StartIndex, 1734 unsigned EndIndex) 1735 : Dtor(Dtor), StartIndex(StartIndex), EndIndex(EndIndex) {} 1736 1737 // Generate function call for handling object poisoning. 1738 // Disables tail call elimination, to prevent the current stack frame 1739 // from disappearing from the stack trace. 1740 void Emit(CodeGenFunction &CGF, Flags flags) override { 1741 const ASTContext &Context = CGF.getContext(); 1742 const ASTRecordLayout &Layout = 1743 Context.getASTRecordLayout(Dtor->getParent()); 1744 1745 // It's a first trivial field so it should be at the begining of a char, 1746 // still round up start offset just in case. 1747 CharUnits PoisonStart = Context.toCharUnitsFromBits( 1748 Layout.getFieldOffset(StartIndex) + Context.getCharWidth() - 1); 1749 llvm::ConstantInt *OffsetSizePtr = 1750 llvm::ConstantInt::get(CGF.SizeTy, PoisonStart.getQuantity()); 1751 1752 llvm::Value *OffsetPtr = 1753 CGF.Builder.CreateGEP(CGF.Int8Ty, CGF.LoadCXXThis(), OffsetSizePtr); 1754 1755 CharUnits PoisonEnd; 1756 if (EndIndex >= Layout.getFieldCount()) { 1757 PoisonEnd = Layout.getNonVirtualSize(); 1758 } else { 1759 PoisonEnd = 1760 Context.toCharUnitsFromBits(Layout.getFieldOffset(EndIndex)); 1761 } 1762 CharUnits PoisonSize = PoisonEnd - PoisonStart; 1763 if (!PoisonSize.isPositive()) 1764 return; 1765 1766 // Use the top field declaration location as inline DebugLocation. 1767 DeclAsInlineDebugLocation InlineHere( 1768 CGF, **std::next(Dtor->getParent()->field_begin(), StartIndex)); 1769 EmitSanitizerDtorFieldsCallback(CGF, OffsetPtr, PoisonSize.getQuantity()); 1770 1771 // Prevent the current stack frame from disappearing from the stack trace. 1772 CGF.CurFn->addFnAttr("disable-tail-calls", "true"); 1773 } 1774 }; 1775 1776 class SanitizeDtorVTable final : public EHScopeStack::Cleanup { 1777 const CXXDestructorDecl *Dtor; 1778 1779 public: 1780 SanitizeDtorVTable(const CXXDestructorDecl *Dtor) : Dtor(Dtor) {} 1781 1782 // Generate function call for handling vtable pointer poisoning. 1783 void Emit(CodeGenFunction &CGF, Flags flags) override { 1784 assert(Dtor->getParent()->isDynamicClass()); 1785 (void)Dtor; 1786 // Poison vtable and vtable ptr if they exist for this class. 1787 llvm::Value *VTablePtr = CGF.LoadCXXThis(); 1788 1789 // Pass in void pointer and size of region as arguments to runtime 1790 // function 1791 EmitSanitizerDtorCallback(CGF, "__sanitizer_dtor_callback_vptr", 1792 VTablePtr); 1793 } 1794 }; 1795 1796 class SanitizeDtorCleanupBuilder { 1797 ASTContext &Context; 1798 EHScopeStack &EHStack; 1799 const CXXDestructorDecl *DD; 1800 std::optional<unsigned> StartIndex; 1801 1802 public: 1803 SanitizeDtorCleanupBuilder(ASTContext &Context, EHScopeStack &EHStack, 1804 const CXXDestructorDecl *DD) 1805 : Context(Context), EHStack(EHStack), DD(DD), StartIndex(std::nullopt) {} 1806 void PushCleanupForField(const FieldDecl *Field) { 1807 if (Field->isZeroSize(Context)) 1808 return; 1809 unsigned FieldIndex = Field->getFieldIndex(); 1810 if (FieldHasTrivialDestructorBody(Context, Field)) { 1811 if (!StartIndex) 1812 StartIndex = FieldIndex; 1813 } else if (StartIndex) { 1814 EHStack.pushCleanup<SanitizeDtorFieldRange>(NormalAndEHCleanup, DD, 1815 *StartIndex, FieldIndex); 1816 StartIndex = std::nullopt; 1817 } 1818 } 1819 void End() { 1820 if (StartIndex) 1821 EHStack.pushCleanup<SanitizeDtorFieldRange>(NormalAndEHCleanup, DD, 1822 *StartIndex, -1); 1823 } 1824 }; 1825 } // end anonymous namespace 1826 1827 /// Emit all code that comes at the end of class's 1828 /// destructor. This is to call destructors on members and base classes 1829 /// in reverse order of their construction. 1830 /// 1831 /// For a deleting destructor, this also handles the case where a destroying 1832 /// operator delete completely overrides the definition. 1833 void CodeGenFunction::EnterDtorCleanups(const CXXDestructorDecl *DD, 1834 CXXDtorType DtorType) { 1835 assert((!DD->isTrivial() || DD->hasAttr<DLLExportAttr>()) && 1836 "Should not emit dtor epilogue for non-exported trivial dtor!"); 1837 1838 // The deleting-destructor phase just needs to call the appropriate 1839 // operator delete that Sema picked up. 1840 if (DtorType == Dtor_Deleting) { 1841 assert(DD->getOperatorDelete() && 1842 "operator delete missing - EnterDtorCleanups"); 1843 if (CXXStructorImplicitParamValue) { 1844 // If there is an implicit param to the deleting dtor, it's a boolean 1845 // telling whether this is a deleting destructor. 1846 if (DD->getOperatorDelete()->isDestroyingOperatorDelete()) 1847 EmitConditionalDtorDeleteCall(*this, CXXStructorImplicitParamValue, 1848 /*ReturnAfterDelete*/true); 1849 else 1850 EHStack.pushCleanup<CallDtorDeleteConditional>( 1851 NormalAndEHCleanup, CXXStructorImplicitParamValue); 1852 } else { 1853 if (DD->getOperatorDelete()->isDestroyingOperatorDelete()) { 1854 const CXXRecordDecl *ClassDecl = DD->getParent(); 1855 EmitDeleteCall(DD->getOperatorDelete(), 1856 LoadThisForDtorDelete(*this, DD), 1857 getContext().getTagDeclType(ClassDecl)); 1858 EmitBranchThroughCleanup(ReturnBlock); 1859 } else { 1860 EHStack.pushCleanup<CallDtorDelete>(NormalAndEHCleanup); 1861 } 1862 } 1863 return; 1864 } 1865 1866 const CXXRecordDecl *ClassDecl = DD->getParent(); 1867 1868 // Unions have no bases and do not call field destructors. 1869 if (ClassDecl->isUnion()) 1870 return; 1871 1872 // The complete-destructor phase just destructs all the virtual bases. 1873 if (DtorType == Dtor_Complete) { 1874 // Poison the vtable pointer such that access after the base 1875 // and member destructors are invoked is invalid. 1876 if (CGM.getCodeGenOpts().SanitizeMemoryUseAfterDtor && 1877 SanOpts.has(SanitizerKind::Memory) && ClassDecl->getNumVBases() && 1878 ClassDecl->isPolymorphic()) 1879 EHStack.pushCleanup<SanitizeDtorVTable>(NormalAndEHCleanup, DD); 1880 1881 // We push them in the forward order so that they'll be popped in 1882 // the reverse order. 1883 for (const auto &Base : ClassDecl->vbases()) { 1884 auto *BaseClassDecl = 1885 cast<CXXRecordDecl>(Base.getType()->castAs<RecordType>()->getDecl()); 1886 1887 if (BaseClassDecl->hasTrivialDestructor()) { 1888 // Under SanitizeMemoryUseAfterDtor, poison the trivial base class 1889 // memory. For non-trival base classes the same is done in the class 1890 // destructor. 1891 if (CGM.getCodeGenOpts().SanitizeMemoryUseAfterDtor && 1892 SanOpts.has(SanitizerKind::Memory) && !BaseClassDecl->isEmpty()) 1893 EHStack.pushCleanup<SanitizeDtorTrivialBase>(NormalAndEHCleanup, 1894 BaseClassDecl, 1895 /*BaseIsVirtual*/ true); 1896 } else { 1897 EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup, BaseClassDecl, 1898 /*BaseIsVirtual*/ true); 1899 } 1900 } 1901 1902 return; 1903 } 1904 1905 assert(DtorType == Dtor_Base); 1906 // Poison the vtable pointer if it has no virtual bases, but inherits 1907 // virtual functions. 1908 if (CGM.getCodeGenOpts().SanitizeMemoryUseAfterDtor && 1909 SanOpts.has(SanitizerKind::Memory) && !ClassDecl->getNumVBases() && 1910 ClassDecl->isPolymorphic()) 1911 EHStack.pushCleanup<SanitizeDtorVTable>(NormalAndEHCleanup, DD); 1912 1913 // Destroy non-virtual bases. 1914 for (const auto &Base : ClassDecl->bases()) { 1915 // Ignore virtual bases. 1916 if (Base.isVirtual()) 1917 continue; 1918 1919 CXXRecordDecl *BaseClassDecl = Base.getType()->getAsCXXRecordDecl(); 1920 1921 if (BaseClassDecl->hasTrivialDestructor()) { 1922 if (CGM.getCodeGenOpts().SanitizeMemoryUseAfterDtor && 1923 SanOpts.has(SanitizerKind::Memory) && !BaseClassDecl->isEmpty()) 1924 EHStack.pushCleanup<SanitizeDtorTrivialBase>(NormalAndEHCleanup, 1925 BaseClassDecl, 1926 /*BaseIsVirtual*/ false); 1927 } else { 1928 EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup, BaseClassDecl, 1929 /*BaseIsVirtual*/ false); 1930 } 1931 } 1932 1933 // Poison fields such that access after their destructors are 1934 // invoked, and before the base class destructor runs, is invalid. 1935 bool SanitizeFields = CGM.getCodeGenOpts().SanitizeMemoryUseAfterDtor && 1936 SanOpts.has(SanitizerKind::Memory); 1937 SanitizeDtorCleanupBuilder SanitizeBuilder(getContext(), EHStack, DD); 1938 1939 // Destroy direct fields. 1940 for (const auto *Field : ClassDecl->fields()) { 1941 if (SanitizeFields) 1942 SanitizeBuilder.PushCleanupForField(Field); 1943 1944 QualType type = Field->getType(); 1945 QualType::DestructionKind dtorKind = type.isDestructedType(); 1946 if (!dtorKind) 1947 continue; 1948 1949 // Anonymous union members do not have their destructors called. 1950 const RecordType *RT = type->getAsUnionType(); 1951 if (RT && RT->getDecl()->isAnonymousStructOrUnion()) 1952 continue; 1953 1954 CleanupKind cleanupKind = getCleanupKind(dtorKind); 1955 EHStack.pushCleanup<DestroyField>( 1956 cleanupKind, Field, getDestroyer(dtorKind), cleanupKind & EHCleanup); 1957 } 1958 1959 if (SanitizeFields) 1960 SanitizeBuilder.End(); 1961 } 1962 1963 /// EmitCXXAggrConstructorCall - Emit a loop to call a particular 1964 /// constructor for each of several members of an array. 1965 /// 1966 /// \param ctor the constructor to call for each element 1967 /// \param arrayType the type of the array to initialize 1968 /// \param arrayBegin an arrayType* 1969 /// \param zeroInitialize true if each element should be 1970 /// zero-initialized before it is constructed 1971 void CodeGenFunction::EmitCXXAggrConstructorCall( 1972 const CXXConstructorDecl *ctor, const ArrayType *arrayType, 1973 Address arrayBegin, const CXXConstructExpr *E, bool NewPointerIsChecked, 1974 bool zeroInitialize) { 1975 QualType elementType; 1976 llvm::Value *numElements = 1977 emitArrayLength(arrayType, elementType, arrayBegin); 1978 1979 EmitCXXAggrConstructorCall(ctor, numElements, arrayBegin, E, 1980 NewPointerIsChecked, zeroInitialize); 1981 } 1982 1983 /// EmitCXXAggrConstructorCall - Emit a loop to call a particular 1984 /// constructor for each of several members of an array. 1985 /// 1986 /// \param ctor the constructor to call for each element 1987 /// \param numElements the number of elements in the array; 1988 /// may be zero 1989 /// \param arrayBase a T*, where T is the type constructed by ctor 1990 /// \param zeroInitialize true if each element should be 1991 /// zero-initialized before it is constructed 1992 void CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *ctor, 1993 llvm::Value *numElements, 1994 Address arrayBase, 1995 const CXXConstructExpr *E, 1996 bool NewPointerIsChecked, 1997 bool zeroInitialize) { 1998 // It's legal for numElements to be zero. This can happen both 1999 // dynamically, because x can be zero in 'new A[x]', and statically, 2000 // because of GCC extensions that permit zero-length arrays. There 2001 // are probably legitimate places where we could assume that this 2002 // doesn't happen, but it's not clear that it's worth it. 2003 llvm::BranchInst *zeroCheckBranch = nullptr; 2004 2005 // Optimize for a constant count. 2006 llvm::ConstantInt *constantCount 2007 = dyn_cast<llvm::ConstantInt>(numElements); 2008 if (constantCount) { 2009 // Just skip out if the constant count is zero. 2010 if (constantCount->isZero()) return; 2011 2012 // Otherwise, emit the check. 2013 } else { 2014 llvm::BasicBlock *loopBB = createBasicBlock("new.ctorloop"); 2015 llvm::Value *iszero = Builder.CreateIsNull(numElements, "isempty"); 2016 zeroCheckBranch = Builder.CreateCondBr(iszero, loopBB, loopBB); 2017 EmitBlock(loopBB); 2018 } 2019 2020 // Find the end of the array. 2021 llvm::Type *elementType = arrayBase.getElementType(); 2022 llvm::Value *arrayBegin = arrayBase.getPointer(); 2023 llvm::Value *arrayEnd = Builder.CreateInBoundsGEP( 2024 elementType, arrayBegin, numElements, "arrayctor.end"); 2025 2026 // Enter the loop, setting up a phi for the current location to initialize. 2027 llvm::BasicBlock *entryBB = Builder.GetInsertBlock(); 2028 llvm::BasicBlock *loopBB = createBasicBlock("arrayctor.loop"); 2029 EmitBlock(loopBB); 2030 llvm::PHINode *cur = Builder.CreatePHI(arrayBegin->getType(), 2, 2031 "arrayctor.cur"); 2032 cur->addIncoming(arrayBegin, entryBB); 2033 2034 // Inside the loop body, emit the constructor call on the array element. 2035 2036 // The alignment of the base, adjusted by the size of a single element, 2037 // provides a conservative estimate of the alignment of every element. 2038 // (This assumes we never start tracking offsetted alignments.) 2039 // 2040 // Note that these are complete objects and so we don't need to 2041 // use the non-virtual size or alignment. 2042 QualType type = getContext().getTypeDeclType(ctor->getParent()); 2043 CharUnits eltAlignment = 2044 arrayBase.getAlignment() 2045 .alignmentOfArrayElement(getContext().getTypeSizeInChars(type)); 2046 Address curAddr = Address(cur, elementType, eltAlignment); 2047 2048 // Zero initialize the storage, if requested. 2049 if (zeroInitialize) 2050 EmitNullInitialization(curAddr, type); 2051 2052 // C++ [class.temporary]p4: 2053 // There are two contexts in which temporaries are destroyed at a different 2054 // point than the end of the full-expression. The first context is when a 2055 // default constructor is called to initialize an element of an array. 2056 // If the constructor has one or more default arguments, the destruction of 2057 // every temporary created in a default argument expression is sequenced 2058 // before the construction of the next array element, if any. 2059 2060 { 2061 RunCleanupsScope Scope(*this); 2062 2063 // Evaluate the constructor and its arguments in a regular 2064 // partial-destroy cleanup. 2065 if (getLangOpts().Exceptions && 2066 !ctor->getParent()->hasTrivialDestructor()) { 2067 Destroyer *destroyer = destroyCXXObject; 2068 pushRegularPartialArrayCleanup(arrayBegin, cur, type, eltAlignment, 2069 *destroyer); 2070 } 2071 auto currAVS = AggValueSlot::forAddr( 2072 curAddr, type.getQualifiers(), AggValueSlot::IsDestructed, 2073 AggValueSlot::DoesNotNeedGCBarriers, AggValueSlot::IsNotAliased, 2074 AggValueSlot::DoesNotOverlap, AggValueSlot::IsNotZeroed, 2075 NewPointerIsChecked ? AggValueSlot::IsSanitizerChecked 2076 : AggValueSlot::IsNotSanitizerChecked); 2077 EmitCXXConstructorCall(ctor, Ctor_Complete, /*ForVirtualBase=*/false, 2078 /*Delegating=*/false, currAVS, E); 2079 } 2080 2081 // Go to the next element. 2082 llvm::Value *next = Builder.CreateInBoundsGEP( 2083 elementType, cur, llvm::ConstantInt::get(SizeTy, 1), "arrayctor.next"); 2084 cur->addIncoming(next, Builder.GetInsertBlock()); 2085 2086 // Check whether that's the end of the loop. 2087 llvm::Value *done = Builder.CreateICmpEQ(next, arrayEnd, "arrayctor.done"); 2088 llvm::BasicBlock *contBB = createBasicBlock("arrayctor.cont"); 2089 Builder.CreateCondBr(done, contBB, loopBB); 2090 2091 // Patch the earlier check to skip over the loop. 2092 if (zeroCheckBranch) zeroCheckBranch->setSuccessor(0, contBB); 2093 2094 EmitBlock(contBB); 2095 } 2096 2097 void CodeGenFunction::destroyCXXObject(CodeGenFunction &CGF, 2098 Address addr, 2099 QualType type) { 2100 const RecordType *rtype = type->castAs<RecordType>(); 2101 const CXXRecordDecl *record = cast<CXXRecordDecl>(rtype->getDecl()); 2102 const CXXDestructorDecl *dtor = record->getDestructor(); 2103 assert(!dtor->isTrivial()); 2104 CGF.EmitCXXDestructorCall(dtor, Dtor_Complete, /*for vbase*/ false, 2105 /*Delegating=*/false, addr, type); 2106 } 2107 2108 void CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D, 2109 CXXCtorType Type, 2110 bool ForVirtualBase, 2111 bool Delegating, 2112 AggValueSlot ThisAVS, 2113 const CXXConstructExpr *E) { 2114 CallArgList Args; 2115 Address This = ThisAVS.getAddress(); 2116 LangAS SlotAS = ThisAVS.getQualifiers().getAddressSpace(); 2117 LangAS ThisAS = D->getThisObjectType().getAddressSpace(); 2118 llvm::Value *ThisPtr = This.getPointer(); 2119 2120 if (SlotAS != ThisAS) { 2121 unsigned TargetThisAS = getContext().getTargetAddressSpace(ThisAS); 2122 llvm::Type *NewType = 2123 llvm::PointerType::get(getLLVMContext(), TargetThisAS); 2124 ThisPtr = getTargetHooks().performAddrSpaceCast(*this, This.getPointer(), 2125 ThisAS, SlotAS, NewType); 2126 } 2127 2128 // Push the this ptr. 2129 Args.add(RValue::get(ThisPtr), D->getThisType()); 2130 2131 // If this is a trivial constructor, emit a memcpy now before we lose 2132 // the alignment information on the argument. 2133 // FIXME: It would be better to preserve alignment information into CallArg. 2134 if (isMemcpyEquivalentSpecialMember(D)) { 2135 assert(E->getNumArgs() == 1 && "unexpected argcount for trivial ctor"); 2136 2137 const Expr *Arg = E->getArg(0); 2138 LValue Src = EmitLValue(Arg); 2139 QualType DestTy = getContext().getTypeDeclType(D->getParent()); 2140 LValue Dest = MakeAddrLValue(This, DestTy); 2141 EmitAggregateCopyCtor(Dest, Src, ThisAVS.mayOverlap()); 2142 return; 2143 } 2144 2145 // Add the rest of the user-supplied arguments. 2146 const FunctionProtoType *FPT = D->getType()->castAs<FunctionProtoType>(); 2147 EvaluationOrder Order = E->isListInitialization() 2148 ? EvaluationOrder::ForceLeftToRight 2149 : EvaluationOrder::Default; 2150 EmitCallArgs(Args, FPT, E->arguments(), E->getConstructor(), 2151 /*ParamsToSkip*/ 0, Order); 2152 2153 EmitCXXConstructorCall(D, Type, ForVirtualBase, Delegating, This, Args, 2154 ThisAVS.mayOverlap(), E->getExprLoc(), 2155 ThisAVS.isSanitizerChecked()); 2156 } 2157 2158 static bool canEmitDelegateCallArgs(CodeGenFunction &CGF, 2159 const CXXConstructorDecl *Ctor, 2160 CXXCtorType Type, CallArgList &Args) { 2161 // We can't forward a variadic call. 2162 if (Ctor->isVariadic()) 2163 return false; 2164 2165 if (CGF.getTarget().getCXXABI().areArgsDestroyedLeftToRightInCallee()) { 2166 // If the parameters are callee-cleanup, it's not safe to forward. 2167 for (auto *P : Ctor->parameters()) 2168 if (P->needsDestruction(CGF.getContext())) 2169 return false; 2170 2171 // Likewise if they're inalloca. 2172 const CGFunctionInfo &Info = 2173 CGF.CGM.getTypes().arrangeCXXConstructorCall(Args, Ctor, Type, 0, 0); 2174 if (Info.usesInAlloca()) 2175 return false; 2176 } 2177 2178 // Anything else should be OK. 2179 return true; 2180 } 2181 2182 void CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D, 2183 CXXCtorType Type, 2184 bool ForVirtualBase, 2185 bool Delegating, 2186 Address This, 2187 CallArgList &Args, 2188 AggValueSlot::Overlap_t Overlap, 2189 SourceLocation Loc, 2190 bool NewPointerIsChecked) { 2191 const CXXRecordDecl *ClassDecl = D->getParent(); 2192 2193 if (!NewPointerIsChecked) 2194 EmitTypeCheck(CodeGenFunction::TCK_ConstructorCall, Loc, This.getPointer(), 2195 getContext().getRecordType(ClassDecl), CharUnits::Zero()); 2196 2197 if (D->isTrivial() && D->isDefaultConstructor()) { 2198 assert(Args.size() == 1 && "trivial default ctor with args"); 2199 return; 2200 } 2201 2202 // If this is a trivial constructor, just emit what's needed. If this is a 2203 // union copy constructor, we must emit a memcpy, because the AST does not 2204 // model that copy. 2205 if (isMemcpyEquivalentSpecialMember(D)) { 2206 assert(Args.size() == 2 && "unexpected argcount for trivial ctor"); 2207 2208 QualType SrcTy = D->getParamDecl(0)->getType().getNonReferenceType(); 2209 Address Src = Address(Args[1].getRValue(*this).getScalarVal(), ConvertTypeForMem(SrcTy), 2210 CGM.getNaturalTypeAlignment(SrcTy)); 2211 LValue SrcLVal = MakeAddrLValue(Src, SrcTy); 2212 QualType DestTy = getContext().getTypeDeclType(ClassDecl); 2213 LValue DestLVal = MakeAddrLValue(This, DestTy); 2214 EmitAggregateCopyCtor(DestLVal, SrcLVal, Overlap); 2215 return; 2216 } 2217 2218 bool PassPrototypeArgs = true; 2219 // Check whether we can actually emit the constructor before trying to do so. 2220 if (auto Inherited = D->getInheritedConstructor()) { 2221 PassPrototypeArgs = getTypes().inheritingCtorHasParams(Inherited, Type); 2222 if (PassPrototypeArgs && !canEmitDelegateCallArgs(*this, D, Type, Args)) { 2223 EmitInlinedInheritingCXXConstructorCall(D, Type, ForVirtualBase, 2224 Delegating, Args); 2225 return; 2226 } 2227 } 2228 2229 // Insert any ABI-specific implicit constructor arguments. 2230 CGCXXABI::AddedStructorArgCounts ExtraArgs = 2231 CGM.getCXXABI().addImplicitConstructorArgs(*this, D, Type, ForVirtualBase, 2232 Delegating, Args); 2233 2234 // Emit the call. 2235 llvm::Constant *CalleePtr = CGM.getAddrOfCXXStructor(GlobalDecl(D, Type)); 2236 const CGFunctionInfo &Info = CGM.getTypes().arrangeCXXConstructorCall( 2237 Args, D, Type, ExtraArgs.Prefix, ExtraArgs.Suffix, PassPrototypeArgs); 2238 CGCallee Callee = CGCallee::forDirect(CalleePtr, GlobalDecl(D, Type)); 2239 EmitCall(Info, Callee, ReturnValueSlot(), Args, nullptr, false, Loc); 2240 2241 // Generate vtable assumptions if we're constructing a complete object 2242 // with a vtable. We don't do this for base subobjects for two reasons: 2243 // first, it's incorrect for classes with virtual bases, and second, we're 2244 // about to overwrite the vptrs anyway. 2245 // We also have to make sure if we can refer to vtable: 2246 // - Otherwise we can refer to vtable if it's safe to speculatively emit. 2247 // FIXME: If vtable is used by ctor/dtor, or if vtable is external and we are 2248 // sure that definition of vtable is not hidden, 2249 // then we are always safe to refer to it. 2250 // FIXME: It looks like InstCombine is very inefficient on dealing with 2251 // assumes. Make assumption loads require -fstrict-vtable-pointers temporarily. 2252 if (CGM.getCodeGenOpts().OptimizationLevel > 0 && 2253 ClassDecl->isDynamicClass() && Type != Ctor_Base && 2254 CGM.getCXXABI().canSpeculativelyEmitVTable(ClassDecl) && 2255 CGM.getCodeGenOpts().StrictVTablePointers) 2256 EmitVTableAssumptionLoads(ClassDecl, This); 2257 } 2258 2259 void CodeGenFunction::EmitInheritedCXXConstructorCall( 2260 const CXXConstructorDecl *D, bool ForVirtualBase, Address This, 2261 bool InheritedFromVBase, const CXXInheritedCtorInitExpr *E) { 2262 CallArgList Args; 2263 CallArg ThisArg(RValue::get(This.getPointer()), D->getThisType()); 2264 2265 // Forward the parameters. 2266 if (InheritedFromVBase && 2267 CGM.getTarget().getCXXABI().hasConstructorVariants()) { 2268 // Nothing to do; this construction is not responsible for constructing 2269 // the base class containing the inherited constructor. 2270 // FIXME: Can we just pass undef's for the remaining arguments if we don't 2271 // have constructor variants? 2272 Args.push_back(ThisArg); 2273 } else if (!CXXInheritedCtorInitExprArgs.empty()) { 2274 // The inheriting constructor was inlined; just inject its arguments. 2275 assert(CXXInheritedCtorInitExprArgs.size() >= D->getNumParams() && 2276 "wrong number of parameters for inherited constructor call"); 2277 Args = CXXInheritedCtorInitExprArgs; 2278 Args[0] = ThisArg; 2279 } else { 2280 // The inheriting constructor was not inlined. Emit delegating arguments. 2281 Args.push_back(ThisArg); 2282 const auto *OuterCtor = cast<CXXConstructorDecl>(CurCodeDecl); 2283 assert(OuterCtor->getNumParams() == D->getNumParams()); 2284 assert(!OuterCtor->isVariadic() && "should have been inlined"); 2285 2286 for (const auto *Param : OuterCtor->parameters()) { 2287 assert(getContext().hasSameUnqualifiedType( 2288 OuterCtor->getParamDecl(Param->getFunctionScopeIndex())->getType(), 2289 Param->getType())); 2290 EmitDelegateCallArg(Args, Param, E->getLocation()); 2291 2292 // Forward __attribute__(pass_object_size). 2293 if (Param->hasAttr<PassObjectSizeAttr>()) { 2294 auto *POSParam = SizeArguments[Param]; 2295 assert(POSParam && "missing pass_object_size value for forwarding"); 2296 EmitDelegateCallArg(Args, POSParam, E->getLocation()); 2297 } 2298 } 2299 } 2300 2301 EmitCXXConstructorCall(D, Ctor_Base, ForVirtualBase, /*Delegating*/false, 2302 This, Args, AggValueSlot::MayOverlap, 2303 E->getLocation(), /*NewPointerIsChecked*/true); 2304 } 2305 2306 void CodeGenFunction::EmitInlinedInheritingCXXConstructorCall( 2307 const CXXConstructorDecl *Ctor, CXXCtorType CtorType, bool ForVirtualBase, 2308 bool Delegating, CallArgList &Args) { 2309 GlobalDecl GD(Ctor, CtorType); 2310 InlinedInheritingConstructorScope Scope(*this, GD); 2311 ApplyInlineDebugLocation DebugScope(*this, GD); 2312 RunCleanupsScope RunCleanups(*this); 2313 2314 // Save the arguments to be passed to the inherited constructor. 2315 CXXInheritedCtorInitExprArgs = Args; 2316 2317 FunctionArgList Params; 2318 QualType RetType = BuildFunctionArgList(CurGD, Params); 2319 FnRetTy = RetType; 2320 2321 // Insert any ABI-specific implicit constructor arguments. 2322 CGM.getCXXABI().addImplicitConstructorArgs(*this, Ctor, CtorType, 2323 ForVirtualBase, Delegating, Args); 2324 2325 // Emit a simplified prolog. We only need to emit the implicit params. 2326 assert(Args.size() >= Params.size() && "too few arguments for call"); 2327 for (unsigned I = 0, N = Args.size(); I != N; ++I) { 2328 if (I < Params.size() && isa<ImplicitParamDecl>(Params[I])) { 2329 const RValue &RV = Args[I].getRValue(*this); 2330 assert(!RV.isComplex() && "complex indirect params not supported"); 2331 ParamValue Val = RV.isScalar() 2332 ? ParamValue::forDirect(RV.getScalarVal()) 2333 : ParamValue::forIndirect(RV.getAggregateAddress()); 2334 EmitParmDecl(*Params[I], Val, I + 1); 2335 } 2336 } 2337 2338 // Create a return value slot if the ABI implementation wants one. 2339 // FIXME: This is dumb, we should ask the ABI not to try to set the return 2340 // value instead. 2341 if (!RetType->isVoidType()) 2342 ReturnValue = CreateIRTemp(RetType, "retval.inhctor"); 2343 2344 CGM.getCXXABI().EmitInstanceFunctionProlog(*this); 2345 CXXThisValue = CXXABIThisValue; 2346 2347 // Directly emit the constructor initializers. 2348 EmitCtorPrologue(Ctor, CtorType, Params); 2349 } 2350 2351 void CodeGenFunction::EmitVTableAssumptionLoad(const VPtr &Vptr, Address This) { 2352 llvm::Value *VTableGlobal = 2353 CGM.getCXXABI().getVTableAddressPoint(Vptr.Base, Vptr.VTableClass); 2354 if (!VTableGlobal) 2355 return; 2356 2357 // We can just use the base offset in the complete class. 2358 CharUnits NonVirtualOffset = Vptr.Base.getBaseOffset(); 2359 2360 if (!NonVirtualOffset.isZero()) 2361 This = 2362 ApplyNonVirtualAndVirtualOffset(*this, This, NonVirtualOffset, nullptr, 2363 Vptr.VTableClass, Vptr.NearestVBase); 2364 2365 llvm::Value *VPtrValue = 2366 GetVTablePtr(This, VTableGlobal->getType(), Vptr.VTableClass); 2367 llvm::Value *Cmp = 2368 Builder.CreateICmpEQ(VPtrValue, VTableGlobal, "cmp.vtables"); 2369 Builder.CreateAssumption(Cmp); 2370 } 2371 2372 void CodeGenFunction::EmitVTableAssumptionLoads(const CXXRecordDecl *ClassDecl, 2373 Address This) { 2374 if (CGM.getCXXABI().doStructorsInitializeVPtrs(ClassDecl)) 2375 for (const VPtr &Vptr : getVTablePointers(ClassDecl)) 2376 EmitVTableAssumptionLoad(Vptr, This); 2377 } 2378 2379 void 2380 CodeGenFunction::EmitSynthesizedCXXCopyCtorCall(const CXXConstructorDecl *D, 2381 Address This, Address Src, 2382 const CXXConstructExpr *E) { 2383 const FunctionProtoType *FPT = D->getType()->castAs<FunctionProtoType>(); 2384 2385 CallArgList Args; 2386 2387 // Push the this ptr. 2388 Args.add(RValue::get(This.getPointer()), D->getThisType()); 2389 2390 // Push the src ptr. 2391 QualType QT = *(FPT->param_type_begin()); 2392 llvm::Type *t = CGM.getTypes().ConvertType(QT); 2393 llvm::Value *SrcVal = Builder.CreateBitCast(Src.getPointer(), t); 2394 Args.add(RValue::get(SrcVal), QT); 2395 2396 // Skip over first argument (Src). 2397 EmitCallArgs(Args, FPT, drop_begin(E->arguments(), 1), E->getConstructor(), 2398 /*ParamsToSkip*/ 1); 2399 2400 EmitCXXConstructorCall(D, Ctor_Complete, /*ForVirtualBase*/false, 2401 /*Delegating*/false, This, Args, 2402 AggValueSlot::MayOverlap, E->getExprLoc(), 2403 /*NewPointerIsChecked*/false); 2404 } 2405 2406 void 2407 CodeGenFunction::EmitDelegateCXXConstructorCall(const CXXConstructorDecl *Ctor, 2408 CXXCtorType CtorType, 2409 const FunctionArgList &Args, 2410 SourceLocation Loc) { 2411 CallArgList DelegateArgs; 2412 2413 FunctionArgList::const_iterator I = Args.begin(), E = Args.end(); 2414 assert(I != E && "no parameters to constructor"); 2415 2416 // this 2417 Address This = LoadCXXThisAddress(); 2418 DelegateArgs.add(RValue::get(This.getPointer()), (*I)->getType()); 2419 ++I; 2420 2421 // FIXME: The location of the VTT parameter in the parameter list is 2422 // specific to the Itanium ABI and shouldn't be hardcoded here. 2423 if (CGM.getCXXABI().NeedsVTTParameter(CurGD)) { 2424 assert(I != E && "cannot skip vtt parameter, already done with args"); 2425 assert((*I)->getType()->isPointerType() && 2426 "skipping parameter not of vtt type"); 2427 ++I; 2428 } 2429 2430 // Explicit arguments. 2431 for (; I != E; ++I) { 2432 const VarDecl *param = *I; 2433 // FIXME: per-argument source location 2434 EmitDelegateCallArg(DelegateArgs, param, Loc); 2435 } 2436 2437 EmitCXXConstructorCall(Ctor, CtorType, /*ForVirtualBase=*/false, 2438 /*Delegating=*/true, This, DelegateArgs, 2439 AggValueSlot::MayOverlap, Loc, 2440 /*NewPointerIsChecked=*/true); 2441 } 2442 2443 namespace { 2444 struct CallDelegatingCtorDtor final : EHScopeStack::Cleanup { 2445 const CXXDestructorDecl *Dtor; 2446 Address Addr; 2447 CXXDtorType Type; 2448 2449 CallDelegatingCtorDtor(const CXXDestructorDecl *D, Address Addr, 2450 CXXDtorType Type) 2451 : Dtor(D), Addr(Addr), Type(Type) {} 2452 2453 void Emit(CodeGenFunction &CGF, Flags flags) override { 2454 // We are calling the destructor from within the constructor. 2455 // Therefore, "this" should have the expected type. 2456 QualType ThisTy = Dtor->getThisObjectType(); 2457 CGF.EmitCXXDestructorCall(Dtor, Type, /*ForVirtualBase=*/false, 2458 /*Delegating=*/true, Addr, ThisTy); 2459 } 2460 }; 2461 } // end anonymous namespace 2462 2463 void 2464 CodeGenFunction::EmitDelegatingCXXConstructorCall(const CXXConstructorDecl *Ctor, 2465 const FunctionArgList &Args) { 2466 assert(Ctor->isDelegatingConstructor()); 2467 2468 Address ThisPtr = LoadCXXThisAddress(); 2469 2470 AggValueSlot AggSlot = 2471 AggValueSlot::forAddr(ThisPtr, Qualifiers(), 2472 AggValueSlot::IsDestructed, 2473 AggValueSlot::DoesNotNeedGCBarriers, 2474 AggValueSlot::IsNotAliased, 2475 AggValueSlot::MayOverlap, 2476 AggValueSlot::IsNotZeroed, 2477 // Checks are made by the code that calls constructor. 2478 AggValueSlot::IsSanitizerChecked); 2479 2480 EmitAggExpr(Ctor->init_begin()[0]->getInit(), AggSlot); 2481 2482 const CXXRecordDecl *ClassDecl = Ctor->getParent(); 2483 if (CGM.getLangOpts().Exceptions && !ClassDecl->hasTrivialDestructor()) { 2484 CXXDtorType Type = 2485 CurGD.getCtorType() == Ctor_Complete ? Dtor_Complete : Dtor_Base; 2486 2487 EHStack.pushCleanup<CallDelegatingCtorDtor>(EHCleanup, 2488 ClassDecl->getDestructor(), 2489 ThisPtr, Type); 2490 } 2491 } 2492 2493 void CodeGenFunction::EmitCXXDestructorCall(const CXXDestructorDecl *DD, 2494 CXXDtorType Type, 2495 bool ForVirtualBase, 2496 bool Delegating, Address This, 2497 QualType ThisTy) { 2498 CGM.getCXXABI().EmitDestructorCall(*this, DD, Type, ForVirtualBase, 2499 Delegating, This, ThisTy); 2500 } 2501 2502 namespace { 2503 struct CallLocalDtor final : EHScopeStack::Cleanup { 2504 const CXXDestructorDecl *Dtor; 2505 Address Addr; 2506 QualType Ty; 2507 2508 CallLocalDtor(const CXXDestructorDecl *D, Address Addr, QualType Ty) 2509 : Dtor(D), Addr(Addr), Ty(Ty) {} 2510 2511 void Emit(CodeGenFunction &CGF, Flags flags) override { 2512 CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete, 2513 /*ForVirtualBase=*/false, 2514 /*Delegating=*/false, Addr, Ty); 2515 } 2516 }; 2517 } // end anonymous namespace 2518 2519 void CodeGenFunction::PushDestructorCleanup(const CXXDestructorDecl *D, 2520 QualType T, Address Addr) { 2521 EHStack.pushCleanup<CallLocalDtor>(NormalAndEHCleanup, D, Addr, T); 2522 } 2523 2524 void CodeGenFunction::PushDestructorCleanup(QualType T, Address Addr) { 2525 CXXRecordDecl *ClassDecl = T->getAsCXXRecordDecl(); 2526 if (!ClassDecl) return; 2527 if (ClassDecl->hasTrivialDestructor()) return; 2528 2529 const CXXDestructorDecl *D = ClassDecl->getDestructor(); 2530 assert(D && D->isUsed() && "destructor not marked as used!"); 2531 PushDestructorCleanup(D, T, Addr); 2532 } 2533 2534 void CodeGenFunction::InitializeVTablePointer(const VPtr &Vptr) { 2535 // Compute the address point. 2536 llvm::Value *VTableAddressPoint = 2537 CGM.getCXXABI().getVTableAddressPointInStructor( 2538 *this, Vptr.VTableClass, Vptr.Base, Vptr.NearestVBase); 2539 2540 if (!VTableAddressPoint) 2541 return; 2542 2543 // Compute where to store the address point. 2544 llvm::Value *VirtualOffset = nullptr; 2545 CharUnits NonVirtualOffset = CharUnits::Zero(); 2546 2547 if (CGM.getCXXABI().isVirtualOffsetNeededForVTableField(*this, Vptr)) { 2548 // We need to use the virtual base offset offset because the virtual base 2549 // might have a different offset in the most derived class. 2550 2551 VirtualOffset = CGM.getCXXABI().GetVirtualBaseClassOffset( 2552 *this, LoadCXXThisAddress(), Vptr.VTableClass, Vptr.NearestVBase); 2553 NonVirtualOffset = Vptr.OffsetFromNearestVBase; 2554 } else { 2555 // We can just use the base offset in the complete class. 2556 NonVirtualOffset = Vptr.Base.getBaseOffset(); 2557 } 2558 2559 // Apply the offsets. 2560 Address VTableField = LoadCXXThisAddress(); 2561 if (!NonVirtualOffset.isZero() || VirtualOffset) 2562 VTableField = ApplyNonVirtualAndVirtualOffset( 2563 *this, VTableField, NonVirtualOffset, VirtualOffset, Vptr.VTableClass, 2564 Vptr.NearestVBase); 2565 2566 // Finally, store the address point. Use the same LLVM types as the field to 2567 // support optimization. 2568 unsigned GlobalsAS = CGM.getDataLayout().getDefaultGlobalsAddressSpace(); 2569 llvm::Type *PtrTy = llvm::PointerType::get(CGM.getLLVMContext(), GlobalsAS); 2570 // vtable field is derived from `this` pointer, therefore they should be in 2571 // the same addr space. Note that this might not be LLVM address space 0. 2572 VTableField = VTableField.withElementType(PtrTy); 2573 2574 llvm::StoreInst *Store = Builder.CreateStore(VTableAddressPoint, VTableField); 2575 TBAAAccessInfo TBAAInfo = CGM.getTBAAVTablePtrAccessInfo(PtrTy); 2576 CGM.DecorateInstructionWithTBAA(Store, TBAAInfo); 2577 if (CGM.getCodeGenOpts().OptimizationLevel > 0 && 2578 CGM.getCodeGenOpts().StrictVTablePointers) 2579 CGM.DecorateInstructionWithInvariantGroup(Store, Vptr.VTableClass); 2580 } 2581 2582 CodeGenFunction::VPtrsVector 2583 CodeGenFunction::getVTablePointers(const CXXRecordDecl *VTableClass) { 2584 CodeGenFunction::VPtrsVector VPtrsResult; 2585 VisitedVirtualBasesSetTy VBases; 2586 getVTablePointers(BaseSubobject(VTableClass, CharUnits::Zero()), 2587 /*NearestVBase=*/nullptr, 2588 /*OffsetFromNearestVBase=*/CharUnits::Zero(), 2589 /*BaseIsNonVirtualPrimaryBase=*/false, VTableClass, VBases, 2590 VPtrsResult); 2591 return VPtrsResult; 2592 } 2593 2594 void CodeGenFunction::getVTablePointers(BaseSubobject Base, 2595 const CXXRecordDecl *NearestVBase, 2596 CharUnits OffsetFromNearestVBase, 2597 bool BaseIsNonVirtualPrimaryBase, 2598 const CXXRecordDecl *VTableClass, 2599 VisitedVirtualBasesSetTy &VBases, 2600 VPtrsVector &Vptrs) { 2601 // If this base is a non-virtual primary base the address point has already 2602 // been set. 2603 if (!BaseIsNonVirtualPrimaryBase) { 2604 // Initialize the vtable pointer for this base. 2605 VPtr Vptr = {Base, NearestVBase, OffsetFromNearestVBase, VTableClass}; 2606 Vptrs.push_back(Vptr); 2607 } 2608 2609 const CXXRecordDecl *RD = Base.getBase(); 2610 2611 // Traverse bases. 2612 for (const auto &I : RD->bases()) { 2613 auto *BaseDecl = 2614 cast<CXXRecordDecl>(I.getType()->castAs<RecordType>()->getDecl()); 2615 2616 // Ignore classes without a vtable. 2617 if (!BaseDecl->isDynamicClass()) 2618 continue; 2619 2620 CharUnits BaseOffset; 2621 CharUnits BaseOffsetFromNearestVBase; 2622 bool BaseDeclIsNonVirtualPrimaryBase; 2623 2624 if (I.isVirtual()) { 2625 // Check if we've visited this virtual base before. 2626 if (!VBases.insert(BaseDecl).second) 2627 continue; 2628 2629 const ASTRecordLayout &Layout = 2630 getContext().getASTRecordLayout(VTableClass); 2631 2632 BaseOffset = Layout.getVBaseClassOffset(BaseDecl); 2633 BaseOffsetFromNearestVBase = CharUnits::Zero(); 2634 BaseDeclIsNonVirtualPrimaryBase = false; 2635 } else { 2636 const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD); 2637 2638 BaseOffset = Base.getBaseOffset() + Layout.getBaseClassOffset(BaseDecl); 2639 BaseOffsetFromNearestVBase = 2640 OffsetFromNearestVBase + Layout.getBaseClassOffset(BaseDecl); 2641 BaseDeclIsNonVirtualPrimaryBase = Layout.getPrimaryBase() == BaseDecl; 2642 } 2643 2644 getVTablePointers( 2645 BaseSubobject(BaseDecl, BaseOffset), 2646 I.isVirtual() ? BaseDecl : NearestVBase, BaseOffsetFromNearestVBase, 2647 BaseDeclIsNonVirtualPrimaryBase, VTableClass, VBases, Vptrs); 2648 } 2649 } 2650 2651 void CodeGenFunction::InitializeVTablePointers(const CXXRecordDecl *RD) { 2652 // Ignore classes without a vtable. 2653 if (!RD->isDynamicClass()) 2654 return; 2655 2656 // Initialize the vtable pointers for this class and all of its bases. 2657 if (CGM.getCXXABI().doStructorsInitializeVPtrs(RD)) 2658 for (const VPtr &Vptr : getVTablePointers(RD)) 2659 InitializeVTablePointer(Vptr); 2660 2661 if (RD->getNumVBases()) 2662 CGM.getCXXABI().initializeHiddenVirtualInheritanceMembers(*this, RD); 2663 } 2664 2665 llvm::Value *CodeGenFunction::GetVTablePtr(Address This, 2666 llvm::Type *VTableTy, 2667 const CXXRecordDecl *RD) { 2668 Address VTablePtrSrc = This.withElementType(VTableTy); 2669 llvm::Instruction *VTable = Builder.CreateLoad(VTablePtrSrc, "vtable"); 2670 TBAAAccessInfo TBAAInfo = CGM.getTBAAVTablePtrAccessInfo(VTableTy); 2671 CGM.DecorateInstructionWithTBAA(VTable, TBAAInfo); 2672 2673 if (CGM.getCodeGenOpts().OptimizationLevel > 0 && 2674 CGM.getCodeGenOpts().StrictVTablePointers) 2675 CGM.DecorateInstructionWithInvariantGroup(VTable, RD); 2676 2677 return VTable; 2678 } 2679 2680 // If a class has a single non-virtual base and does not introduce or override 2681 // virtual member functions or fields, it will have the same layout as its base. 2682 // This function returns the least derived such class. 2683 // 2684 // Casting an instance of a base class to such a derived class is technically 2685 // undefined behavior, but it is a relatively common hack for introducing member 2686 // functions on class instances with specific properties (e.g. llvm::Operator) 2687 // that works under most compilers and should not have security implications, so 2688 // we allow it by default. It can be disabled with -fsanitize=cfi-cast-strict. 2689 static const CXXRecordDecl * 2690 LeastDerivedClassWithSameLayout(const CXXRecordDecl *RD) { 2691 if (!RD->field_empty()) 2692 return RD; 2693 2694 if (RD->getNumVBases() != 0) 2695 return RD; 2696 2697 if (RD->getNumBases() != 1) 2698 return RD; 2699 2700 for (const CXXMethodDecl *MD : RD->methods()) { 2701 if (MD->isVirtual()) { 2702 // Virtual member functions are only ok if they are implicit destructors 2703 // because the implicit destructor will have the same semantics as the 2704 // base class's destructor if no fields are added. 2705 if (isa<CXXDestructorDecl>(MD) && MD->isImplicit()) 2706 continue; 2707 return RD; 2708 } 2709 } 2710 2711 return LeastDerivedClassWithSameLayout( 2712 RD->bases_begin()->getType()->getAsCXXRecordDecl()); 2713 } 2714 2715 void CodeGenFunction::EmitTypeMetadataCodeForVCall(const CXXRecordDecl *RD, 2716 llvm::Value *VTable, 2717 SourceLocation Loc) { 2718 if (SanOpts.has(SanitizerKind::CFIVCall)) 2719 EmitVTablePtrCheckForCall(RD, VTable, CodeGenFunction::CFITCK_VCall, Loc); 2720 else if (CGM.getCodeGenOpts().WholeProgramVTables && 2721 // Don't insert type test assumes if we are forcing public 2722 // visibility. 2723 !CGM.AlwaysHasLTOVisibilityPublic(RD)) { 2724 QualType Ty = QualType(RD->getTypeForDecl(), 0); 2725 llvm::Metadata *MD = CGM.CreateMetadataIdentifierForType(Ty); 2726 llvm::Value *TypeId = 2727 llvm::MetadataAsValue::get(CGM.getLLVMContext(), MD); 2728 2729 // If we already know that the call has hidden LTO visibility, emit 2730 // @llvm.type.test(). Otherwise emit @llvm.public.type.test(), which WPD 2731 // will convert to @llvm.type.test() if we assert at link time that we have 2732 // whole program visibility. 2733 llvm::Intrinsic::ID IID = CGM.HasHiddenLTOVisibility(RD) 2734 ? llvm::Intrinsic::type_test 2735 : llvm::Intrinsic::public_type_test; 2736 llvm::Value *TypeTest = 2737 Builder.CreateCall(CGM.getIntrinsic(IID), {VTable, TypeId}); 2738 Builder.CreateCall(CGM.getIntrinsic(llvm::Intrinsic::assume), TypeTest); 2739 } 2740 } 2741 2742 void CodeGenFunction::EmitVTablePtrCheckForCall(const CXXRecordDecl *RD, 2743 llvm::Value *VTable, 2744 CFITypeCheckKind TCK, 2745 SourceLocation Loc) { 2746 if (!SanOpts.has(SanitizerKind::CFICastStrict)) 2747 RD = LeastDerivedClassWithSameLayout(RD); 2748 2749 EmitVTablePtrCheck(RD, VTable, TCK, Loc); 2750 } 2751 2752 void CodeGenFunction::EmitVTablePtrCheckForCast(QualType T, Address Derived, 2753 bool MayBeNull, 2754 CFITypeCheckKind TCK, 2755 SourceLocation Loc) { 2756 if (!getLangOpts().CPlusPlus) 2757 return; 2758 2759 auto *ClassTy = T->getAs<RecordType>(); 2760 if (!ClassTy) 2761 return; 2762 2763 const CXXRecordDecl *ClassDecl = cast<CXXRecordDecl>(ClassTy->getDecl()); 2764 2765 if (!ClassDecl->isCompleteDefinition() || !ClassDecl->isDynamicClass()) 2766 return; 2767 2768 if (!SanOpts.has(SanitizerKind::CFICastStrict)) 2769 ClassDecl = LeastDerivedClassWithSameLayout(ClassDecl); 2770 2771 llvm::BasicBlock *ContBlock = nullptr; 2772 2773 if (MayBeNull) { 2774 llvm::Value *DerivedNotNull = 2775 Builder.CreateIsNotNull(Derived.getPointer(), "cast.nonnull"); 2776 2777 llvm::BasicBlock *CheckBlock = createBasicBlock("cast.check"); 2778 ContBlock = createBasicBlock("cast.cont"); 2779 2780 Builder.CreateCondBr(DerivedNotNull, CheckBlock, ContBlock); 2781 2782 EmitBlock(CheckBlock); 2783 } 2784 2785 llvm::Value *VTable; 2786 std::tie(VTable, ClassDecl) = 2787 CGM.getCXXABI().LoadVTablePtr(*this, Derived, ClassDecl); 2788 2789 EmitVTablePtrCheck(ClassDecl, VTable, TCK, Loc); 2790 2791 if (MayBeNull) { 2792 Builder.CreateBr(ContBlock); 2793 EmitBlock(ContBlock); 2794 } 2795 } 2796 2797 void CodeGenFunction::EmitVTablePtrCheck(const CXXRecordDecl *RD, 2798 llvm::Value *VTable, 2799 CFITypeCheckKind TCK, 2800 SourceLocation Loc) { 2801 if (!CGM.getCodeGenOpts().SanitizeCfiCrossDso && 2802 !CGM.HasHiddenLTOVisibility(RD)) 2803 return; 2804 2805 SanitizerMask M; 2806 llvm::SanitizerStatKind SSK; 2807 switch (TCK) { 2808 case CFITCK_VCall: 2809 M = SanitizerKind::CFIVCall; 2810 SSK = llvm::SanStat_CFI_VCall; 2811 break; 2812 case CFITCK_NVCall: 2813 M = SanitizerKind::CFINVCall; 2814 SSK = llvm::SanStat_CFI_NVCall; 2815 break; 2816 case CFITCK_DerivedCast: 2817 M = SanitizerKind::CFIDerivedCast; 2818 SSK = llvm::SanStat_CFI_DerivedCast; 2819 break; 2820 case CFITCK_UnrelatedCast: 2821 M = SanitizerKind::CFIUnrelatedCast; 2822 SSK = llvm::SanStat_CFI_UnrelatedCast; 2823 break; 2824 case CFITCK_ICall: 2825 case CFITCK_NVMFCall: 2826 case CFITCK_VMFCall: 2827 llvm_unreachable("unexpected sanitizer kind"); 2828 } 2829 2830 std::string TypeName = RD->getQualifiedNameAsString(); 2831 if (getContext().getNoSanitizeList().containsType(M, TypeName)) 2832 return; 2833 2834 SanitizerScope SanScope(this); 2835 EmitSanitizerStatReport(SSK); 2836 2837 llvm::Metadata *MD = 2838 CGM.CreateMetadataIdentifierForType(QualType(RD->getTypeForDecl(), 0)); 2839 llvm::Value *TypeId = llvm::MetadataAsValue::get(getLLVMContext(), MD); 2840 2841 llvm::Value *TypeTest = Builder.CreateCall( 2842 CGM.getIntrinsic(llvm::Intrinsic::type_test), {VTable, TypeId}); 2843 2844 llvm::Constant *StaticData[] = { 2845 llvm::ConstantInt::get(Int8Ty, TCK), 2846 EmitCheckSourceLocation(Loc), 2847 EmitCheckTypeDescriptor(QualType(RD->getTypeForDecl(), 0)), 2848 }; 2849 2850 auto CrossDsoTypeId = CGM.CreateCrossDsoCfiTypeId(MD); 2851 if (CGM.getCodeGenOpts().SanitizeCfiCrossDso && CrossDsoTypeId) { 2852 EmitCfiSlowPathCheck(M, TypeTest, CrossDsoTypeId, VTable, StaticData); 2853 return; 2854 } 2855 2856 if (CGM.getCodeGenOpts().SanitizeTrap.has(M)) { 2857 EmitTrapCheck(TypeTest, SanitizerHandler::CFICheckFail); 2858 return; 2859 } 2860 2861 llvm::Value *AllVtables = llvm::MetadataAsValue::get( 2862 CGM.getLLVMContext(), 2863 llvm::MDString::get(CGM.getLLVMContext(), "all-vtables")); 2864 llvm::Value *ValidVtable = Builder.CreateCall( 2865 CGM.getIntrinsic(llvm::Intrinsic::type_test), {VTable, AllVtables}); 2866 EmitCheck(std::make_pair(TypeTest, M), SanitizerHandler::CFICheckFail, 2867 StaticData, {VTable, ValidVtable}); 2868 } 2869 2870 bool CodeGenFunction::ShouldEmitVTableTypeCheckedLoad(const CXXRecordDecl *RD) { 2871 if (!CGM.getCodeGenOpts().WholeProgramVTables || 2872 !CGM.HasHiddenLTOVisibility(RD)) 2873 return false; 2874 2875 if (CGM.getCodeGenOpts().VirtualFunctionElimination) 2876 return true; 2877 2878 if (!SanOpts.has(SanitizerKind::CFIVCall) || 2879 !CGM.getCodeGenOpts().SanitizeTrap.has(SanitizerKind::CFIVCall)) 2880 return false; 2881 2882 std::string TypeName = RD->getQualifiedNameAsString(); 2883 return !getContext().getNoSanitizeList().containsType(SanitizerKind::CFIVCall, 2884 TypeName); 2885 } 2886 2887 llvm::Value *CodeGenFunction::EmitVTableTypeCheckedLoad( 2888 const CXXRecordDecl *RD, llvm::Value *VTable, llvm::Type *VTableTy, 2889 uint64_t VTableByteOffset) { 2890 SanitizerScope SanScope(this); 2891 2892 EmitSanitizerStatReport(llvm::SanStat_CFI_VCall); 2893 2894 llvm::Metadata *MD = 2895 CGM.CreateMetadataIdentifierForType(QualType(RD->getTypeForDecl(), 0)); 2896 llvm::Value *TypeId = llvm::MetadataAsValue::get(CGM.getLLVMContext(), MD); 2897 2898 llvm::Value *CheckedLoad = Builder.CreateCall( 2899 CGM.getIntrinsic(llvm::Intrinsic::type_checked_load), 2900 {VTable, llvm::ConstantInt::get(Int32Ty, VTableByteOffset), TypeId}); 2901 llvm::Value *CheckResult = Builder.CreateExtractValue(CheckedLoad, 1); 2902 2903 std::string TypeName = RD->getQualifiedNameAsString(); 2904 if (SanOpts.has(SanitizerKind::CFIVCall) && 2905 !getContext().getNoSanitizeList().containsType(SanitizerKind::CFIVCall, 2906 TypeName)) { 2907 EmitCheck(std::make_pair(CheckResult, SanitizerKind::CFIVCall), 2908 SanitizerHandler::CFICheckFail, {}, {}); 2909 } 2910 2911 return Builder.CreateBitCast(Builder.CreateExtractValue(CheckedLoad, 0), 2912 VTableTy); 2913 } 2914 2915 void CodeGenFunction::EmitForwardingCallToLambda( 2916 const CXXMethodDecl *callOperator, CallArgList &callArgs, 2917 const CGFunctionInfo *calleeFnInfo, llvm::Constant *calleePtr) { 2918 // Get the address of the call operator. 2919 if (!calleeFnInfo) 2920 calleeFnInfo = &CGM.getTypes().arrangeCXXMethodDeclaration(callOperator); 2921 2922 if (!calleePtr) 2923 calleePtr = 2924 CGM.GetAddrOfFunction(GlobalDecl(callOperator), 2925 CGM.getTypes().GetFunctionType(*calleeFnInfo)); 2926 2927 // Prepare the return slot. 2928 const FunctionProtoType *FPT = 2929 callOperator->getType()->castAs<FunctionProtoType>(); 2930 QualType resultType = FPT->getReturnType(); 2931 ReturnValueSlot returnSlot; 2932 if (!resultType->isVoidType() && 2933 calleeFnInfo->getReturnInfo().getKind() == ABIArgInfo::Indirect && 2934 !hasScalarEvaluationKind(calleeFnInfo->getReturnType())) 2935 returnSlot = 2936 ReturnValueSlot(ReturnValue, resultType.isVolatileQualified(), 2937 /*IsUnused=*/false, /*IsExternallyDestructed=*/true); 2938 2939 // We don't need to separately arrange the call arguments because 2940 // the call can't be variadic anyway --- it's impossible to forward 2941 // variadic arguments. 2942 2943 // Now emit our call. 2944 auto callee = CGCallee::forDirect(calleePtr, GlobalDecl(callOperator)); 2945 RValue RV = EmitCall(*calleeFnInfo, callee, returnSlot, callArgs); 2946 2947 // If necessary, copy the returned value into the slot. 2948 if (!resultType->isVoidType() && returnSlot.isNull()) { 2949 if (getLangOpts().ObjCAutoRefCount && resultType->isObjCRetainableType()) { 2950 RV = RValue::get(EmitARCRetainAutoreleasedReturnValue(RV.getScalarVal())); 2951 } 2952 EmitReturnOfRValue(RV, resultType); 2953 } else 2954 EmitBranchThroughCleanup(ReturnBlock); 2955 } 2956 2957 void CodeGenFunction::EmitLambdaBlockInvokeBody() { 2958 const BlockDecl *BD = BlockInfo->getBlockDecl(); 2959 const VarDecl *variable = BD->capture_begin()->getVariable(); 2960 const CXXRecordDecl *Lambda = variable->getType()->getAsCXXRecordDecl(); 2961 const CXXMethodDecl *CallOp = Lambda->getLambdaCallOperator(); 2962 2963 if (CallOp->isVariadic()) { 2964 // FIXME: Making this work correctly is nasty because it requires either 2965 // cloning the body of the call operator or making the call operator 2966 // forward. 2967 CGM.ErrorUnsupported(CurCodeDecl, "lambda conversion to variadic function"); 2968 return; 2969 } 2970 2971 // Start building arguments for forwarding call 2972 CallArgList CallArgs; 2973 2974 QualType ThisType = getContext().getPointerType(getContext().getRecordType(Lambda)); 2975 Address ThisPtr = GetAddrOfBlockDecl(variable); 2976 CallArgs.add(RValue::get(ThisPtr.getPointer()), ThisType); 2977 2978 // Add the rest of the parameters. 2979 for (auto *param : BD->parameters()) 2980 EmitDelegateCallArg(CallArgs, param, param->getBeginLoc()); 2981 2982 assert(!Lambda->isGenericLambda() && 2983 "generic lambda interconversion to block not implemented"); 2984 EmitForwardingCallToLambda(CallOp, CallArgs); 2985 } 2986 2987 void CodeGenFunction::EmitLambdaStaticInvokeBody(const CXXMethodDecl *MD) { 2988 if (MD->isVariadic()) { 2989 // FIXME: Making this work correctly is nasty because it requires either 2990 // cloning the body of the call operator or making the call operator 2991 // forward. 2992 CGM.ErrorUnsupported(MD, "lambda conversion to variadic function"); 2993 return; 2994 } 2995 2996 const CXXRecordDecl *Lambda = MD->getParent(); 2997 2998 // Start building arguments for forwarding call 2999 CallArgList CallArgs; 3000 3001 QualType LambdaType = getContext().getRecordType(Lambda); 3002 QualType ThisType = getContext().getPointerType(LambdaType); 3003 Address ThisPtr = CreateMemTemp(LambdaType, "unused.capture"); 3004 CallArgs.add(RValue::get(ThisPtr.getPointer()), ThisType); 3005 3006 EmitLambdaDelegatingInvokeBody(MD, CallArgs); 3007 } 3008 3009 void CodeGenFunction::EmitLambdaDelegatingInvokeBody(const CXXMethodDecl *MD, 3010 CallArgList &CallArgs) { 3011 // Add the rest of the forwarded parameters. 3012 for (auto *Param : MD->parameters()) 3013 EmitDelegateCallArg(CallArgs, Param, Param->getBeginLoc()); 3014 3015 const CXXRecordDecl *Lambda = MD->getParent(); 3016 const CXXMethodDecl *CallOp = Lambda->getLambdaCallOperator(); 3017 // For a generic lambda, find the corresponding call operator specialization 3018 // to which the call to the static-invoker shall be forwarded. 3019 if (Lambda->isGenericLambda()) { 3020 assert(MD->isFunctionTemplateSpecialization()); 3021 const TemplateArgumentList *TAL = MD->getTemplateSpecializationArgs(); 3022 FunctionTemplateDecl *CallOpTemplate = CallOp->getDescribedFunctionTemplate(); 3023 void *InsertPos = nullptr; 3024 FunctionDecl *CorrespondingCallOpSpecialization = 3025 CallOpTemplate->findSpecialization(TAL->asArray(), InsertPos); 3026 assert(CorrespondingCallOpSpecialization); 3027 CallOp = cast<CXXMethodDecl>(CorrespondingCallOpSpecialization); 3028 } 3029 3030 // Special lambda forwarding when there are inalloca parameters. 3031 if (hasInAllocaArg(MD)) { 3032 const CGFunctionInfo *ImplFnInfo = nullptr; 3033 llvm::Function *ImplFn = nullptr; 3034 EmitLambdaInAllocaImplFn(CallOp, &ImplFnInfo, &ImplFn); 3035 3036 EmitForwardingCallToLambda(CallOp, CallArgs, ImplFnInfo, ImplFn); 3037 return; 3038 } 3039 3040 EmitForwardingCallToLambda(CallOp, CallArgs); 3041 } 3042 3043 void CodeGenFunction::EmitLambdaInAllocaCallOpBody(const CXXMethodDecl *MD) { 3044 if (MD->isVariadic()) { 3045 // FIXME: Making this work correctly is nasty because it requires either 3046 // cloning the body of the call operator or making the call operator forward. 3047 CGM.ErrorUnsupported(MD, "lambda conversion to variadic function"); 3048 return; 3049 } 3050 3051 // Forward %this argument. 3052 CallArgList CallArgs; 3053 QualType LambdaType = getContext().getRecordType(MD->getParent()); 3054 QualType ThisType = getContext().getPointerType(LambdaType); 3055 llvm::Value *ThisArg = CurFn->getArg(0); 3056 CallArgs.add(RValue::get(ThisArg), ThisType); 3057 3058 EmitLambdaDelegatingInvokeBody(MD, CallArgs); 3059 } 3060 3061 void CodeGenFunction::EmitLambdaInAllocaImplFn( 3062 const CXXMethodDecl *CallOp, const CGFunctionInfo **ImplFnInfo, 3063 llvm::Function **ImplFn) { 3064 const CGFunctionInfo &FnInfo = 3065 CGM.getTypes().arrangeCXXMethodDeclaration(CallOp); 3066 llvm::Function *CallOpFn = 3067 cast<llvm::Function>(CGM.GetAddrOfFunction(GlobalDecl(CallOp))); 3068 3069 // Emit function containing the original call op body. __invoke will delegate 3070 // to this function. 3071 SmallVector<CanQualType, 4> ArgTypes; 3072 for (auto I = FnInfo.arg_begin(); I != FnInfo.arg_end(); ++I) 3073 ArgTypes.push_back(I->type); 3074 *ImplFnInfo = &CGM.getTypes().arrangeLLVMFunctionInfo( 3075 FnInfo.getReturnType(), FnInfoOpts::IsDelegateCall, ArgTypes, 3076 FnInfo.getExtInfo(), {}, FnInfo.getRequiredArgs()); 3077 3078 // Create mangled name as if this was a method named __impl. If for some 3079 // reason the name doesn't look as expected then just tack __impl to the 3080 // front. 3081 // TODO: Use the name mangler to produce the right name instead of using 3082 // string replacement. 3083 StringRef CallOpName = CallOpFn->getName(); 3084 std::string ImplName; 3085 if (size_t Pos = CallOpName.find_first_of("<lambda")) 3086 ImplName = ("?__impl@" + CallOpName.drop_front(Pos)).str(); 3087 else 3088 ImplName = ("__impl" + CallOpName).str(); 3089 3090 llvm::Function *Fn = CallOpFn->getParent()->getFunction(ImplName); 3091 if (!Fn) { 3092 Fn = llvm::Function::Create(CGM.getTypes().GetFunctionType(**ImplFnInfo), 3093 llvm::GlobalValue::InternalLinkage, ImplName, 3094 CGM.getModule()); 3095 CGM.SetInternalFunctionAttributes(CallOp, Fn, **ImplFnInfo); 3096 3097 const GlobalDecl &GD = GlobalDecl(CallOp); 3098 const auto *D = cast<FunctionDecl>(GD.getDecl()); 3099 CodeGenFunction(CGM).GenerateCode(GD, Fn, **ImplFnInfo); 3100 CGM.SetLLVMFunctionAttributesForDefinition(D, Fn); 3101 } 3102 *ImplFn = Fn; 3103 } 3104