1 //===--- CGClass.cpp - Emit LLVM Code for C++ classes -----------*- C++ -*-===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This contains code dealing with C++ code generation of classes 10 // 11 //===----------------------------------------------------------------------===// 12 13 #include "CGBlocks.h" 14 #include "CGCXXABI.h" 15 #include "CGDebugInfo.h" 16 #include "CGRecordLayout.h" 17 #include "CodeGenFunction.h" 18 #include "TargetInfo.h" 19 #include "clang/AST/Attr.h" 20 #include "clang/AST/CXXInheritance.h" 21 #include "clang/AST/CharUnits.h" 22 #include "clang/AST/DeclTemplate.h" 23 #include "clang/AST/EvaluatedExprVisitor.h" 24 #include "clang/AST/RecordLayout.h" 25 #include "clang/AST/StmtCXX.h" 26 #include "clang/Basic/CodeGenOptions.h" 27 #include "clang/Basic/TargetBuiltins.h" 28 #include "clang/CodeGen/CGFunctionInfo.h" 29 #include "llvm/IR/Intrinsics.h" 30 #include "llvm/IR/Metadata.h" 31 #include "llvm/Transforms/Utils/SanitizerStats.h" 32 33 using namespace clang; 34 using namespace CodeGen; 35 36 /// Return the best known alignment for an unknown pointer to a 37 /// particular class. 38 CharUnits CodeGenModule::getClassPointerAlignment(const CXXRecordDecl *RD) { 39 if (!RD->hasDefinition()) 40 return CharUnits::One(); // Hopefully won't be used anywhere. 41 42 auto &layout = getContext().getASTRecordLayout(RD); 43 44 // If the class is final, then we know that the pointer points to an 45 // object of that type and can use the full alignment. 46 if (RD->isEffectivelyFinal()) 47 return layout.getAlignment(); 48 49 // Otherwise, we have to assume it could be a subclass. 50 return layout.getNonVirtualAlignment(); 51 } 52 53 /// Return the smallest possible amount of storage that might be allocated 54 /// starting from the beginning of an object of a particular class. 55 /// 56 /// This may be smaller than sizeof(RD) if RD has virtual base classes. 57 CharUnits CodeGenModule::getMinimumClassObjectSize(const CXXRecordDecl *RD) { 58 if (!RD->hasDefinition()) 59 return CharUnits::One(); 60 61 auto &layout = getContext().getASTRecordLayout(RD); 62 63 // If the class is final, then we know that the pointer points to an 64 // object of that type and can use the full alignment. 65 if (RD->isEffectivelyFinal()) 66 return layout.getSize(); 67 68 // Otherwise, we have to assume it could be a subclass. 69 return std::max(layout.getNonVirtualSize(), CharUnits::One()); 70 } 71 72 /// Return the best known alignment for a pointer to a virtual base, 73 /// given the alignment of a pointer to the derived class. 74 CharUnits CodeGenModule::getVBaseAlignment(CharUnits actualDerivedAlign, 75 const CXXRecordDecl *derivedClass, 76 const CXXRecordDecl *vbaseClass) { 77 // The basic idea here is that an underaligned derived pointer might 78 // indicate an underaligned base pointer. 79 80 assert(vbaseClass->isCompleteDefinition()); 81 auto &baseLayout = getContext().getASTRecordLayout(vbaseClass); 82 CharUnits expectedVBaseAlign = baseLayout.getNonVirtualAlignment(); 83 84 return getDynamicOffsetAlignment(actualDerivedAlign, derivedClass, 85 expectedVBaseAlign); 86 } 87 88 CharUnits 89 CodeGenModule::getDynamicOffsetAlignment(CharUnits actualBaseAlign, 90 const CXXRecordDecl *baseDecl, 91 CharUnits expectedTargetAlign) { 92 // If the base is an incomplete type (which is, alas, possible with 93 // member pointers), be pessimistic. 94 if (!baseDecl->isCompleteDefinition()) 95 return std::min(actualBaseAlign, expectedTargetAlign); 96 97 auto &baseLayout = getContext().getASTRecordLayout(baseDecl); 98 CharUnits expectedBaseAlign = baseLayout.getNonVirtualAlignment(); 99 100 // If the class is properly aligned, assume the target offset is, too. 101 // 102 // This actually isn't necessarily the right thing to do --- if the 103 // class is a complete object, but it's only properly aligned for a 104 // base subobject, then the alignments of things relative to it are 105 // probably off as well. (Note that this requires the alignment of 106 // the target to be greater than the NV alignment of the derived 107 // class.) 108 // 109 // However, our approach to this kind of under-alignment can only 110 // ever be best effort; after all, we're never going to propagate 111 // alignments through variables or parameters. Note, in particular, 112 // that constructing a polymorphic type in an address that's less 113 // than pointer-aligned will generally trap in the constructor, 114 // unless we someday add some sort of attribute to change the 115 // assumed alignment of 'this'. So our goal here is pretty much 116 // just to allow the user to explicitly say that a pointer is 117 // under-aligned and then safely access its fields and vtables. 118 if (actualBaseAlign >= expectedBaseAlign) { 119 return expectedTargetAlign; 120 } 121 122 // Otherwise, we might be offset by an arbitrary multiple of the 123 // actual alignment. The correct adjustment is to take the min of 124 // the two alignments. 125 return std::min(actualBaseAlign, expectedTargetAlign); 126 } 127 128 Address CodeGenFunction::LoadCXXThisAddress() { 129 assert(CurFuncDecl && "loading 'this' without a func declaration?"); 130 assert(isa<CXXMethodDecl>(CurFuncDecl)); 131 132 // Lazily compute CXXThisAlignment. 133 if (CXXThisAlignment.isZero()) { 134 // Just use the best known alignment for the parent. 135 // TODO: if we're currently emitting a complete-object ctor/dtor, 136 // we can always use the complete-object alignment. 137 auto RD = cast<CXXMethodDecl>(CurFuncDecl)->getParent(); 138 CXXThisAlignment = CGM.getClassPointerAlignment(RD); 139 } 140 141 return Address(LoadCXXThis(), CXXThisAlignment); 142 } 143 144 /// Emit the address of a field using a member data pointer. 145 /// 146 /// \param E Only used for emergency diagnostics 147 Address 148 CodeGenFunction::EmitCXXMemberDataPointerAddress(const Expr *E, Address base, 149 llvm::Value *memberPtr, 150 const MemberPointerType *memberPtrType, 151 LValueBaseInfo *BaseInfo, 152 TBAAAccessInfo *TBAAInfo) { 153 // Ask the ABI to compute the actual address. 154 llvm::Value *ptr = 155 CGM.getCXXABI().EmitMemberDataPointerAddress(*this, E, base, 156 memberPtr, memberPtrType); 157 158 QualType memberType = memberPtrType->getPointeeType(); 159 CharUnits memberAlign = 160 CGM.getNaturalTypeAlignment(memberType, BaseInfo, TBAAInfo); 161 memberAlign = 162 CGM.getDynamicOffsetAlignment(base.getAlignment(), 163 memberPtrType->getClass()->getAsCXXRecordDecl(), 164 memberAlign); 165 return Address(ptr, memberAlign); 166 } 167 168 CharUnits CodeGenModule::computeNonVirtualBaseClassOffset( 169 const CXXRecordDecl *DerivedClass, CastExpr::path_const_iterator Start, 170 CastExpr::path_const_iterator End) { 171 CharUnits Offset = CharUnits::Zero(); 172 173 const ASTContext &Context = getContext(); 174 const CXXRecordDecl *RD = DerivedClass; 175 176 for (CastExpr::path_const_iterator I = Start; I != End; ++I) { 177 const CXXBaseSpecifier *Base = *I; 178 assert(!Base->isVirtual() && "Should not see virtual bases here!"); 179 180 // Get the layout. 181 const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD); 182 183 const auto *BaseDecl = 184 cast<CXXRecordDecl>(Base->getType()->castAs<RecordType>()->getDecl()); 185 186 // Add the offset. 187 Offset += Layout.getBaseClassOffset(BaseDecl); 188 189 RD = BaseDecl; 190 } 191 192 return Offset; 193 } 194 195 llvm::Constant * 196 CodeGenModule::GetNonVirtualBaseClassOffset(const CXXRecordDecl *ClassDecl, 197 CastExpr::path_const_iterator PathBegin, 198 CastExpr::path_const_iterator PathEnd) { 199 assert(PathBegin != PathEnd && "Base path should not be empty!"); 200 201 CharUnits Offset = 202 computeNonVirtualBaseClassOffset(ClassDecl, PathBegin, PathEnd); 203 if (Offset.isZero()) 204 return nullptr; 205 206 llvm::Type *PtrDiffTy = 207 Types.ConvertType(getContext().getPointerDiffType()); 208 209 return llvm::ConstantInt::get(PtrDiffTy, Offset.getQuantity()); 210 } 211 212 /// Gets the address of a direct base class within a complete object. 213 /// This should only be used for (1) non-virtual bases or (2) virtual bases 214 /// when the type is known to be complete (e.g. in complete destructors). 215 /// 216 /// The object pointed to by 'This' is assumed to be non-null. 217 Address 218 CodeGenFunction::GetAddressOfDirectBaseInCompleteClass(Address This, 219 const CXXRecordDecl *Derived, 220 const CXXRecordDecl *Base, 221 bool BaseIsVirtual) { 222 // 'this' must be a pointer (in some address space) to Derived. 223 assert(This.getElementType() == ConvertType(Derived)); 224 225 // Compute the offset of the virtual base. 226 CharUnits Offset; 227 const ASTRecordLayout &Layout = getContext().getASTRecordLayout(Derived); 228 if (BaseIsVirtual) 229 Offset = Layout.getVBaseClassOffset(Base); 230 else 231 Offset = Layout.getBaseClassOffset(Base); 232 233 // Shift and cast down to the base type. 234 // TODO: for complete types, this should be possible with a GEP. 235 Address V = This; 236 if (!Offset.isZero()) { 237 V = Builder.CreateElementBitCast(V, Int8Ty); 238 V = Builder.CreateConstInBoundsByteGEP(V, Offset); 239 } 240 V = Builder.CreateElementBitCast(V, ConvertType(Base)); 241 242 return V; 243 } 244 245 static Address 246 ApplyNonVirtualAndVirtualOffset(CodeGenFunction &CGF, Address addr, 247 CharUnits nonVirtualOffset, 248 llvm::Value *virtualOffset, 249 const CXXRecordDecl *derivedClass, 250 const CXXRecordDecl *nearestVBase) { 251 // Assert that we have something to do. 252 assert(!nonVirtualOffset.isZero() || virtualOffset != nullptr); 253 254 // Compute the offset from the static and dynamic components. 255 llvm::Value *baseOffset; 256 if (!nonVirtualOffset.isZero()) { 257 llvm::Type *OffsetType = 258 (CGF.CGM.getTarget().getCXXABI().isItaniumFamily() && 259 CGF.CGM.getItaniumVTableContext().isRelativeLayout()) 260 ? CGF.Int32Ty 261 : CGF.PtrDiffTy; 262 baseOffset = 263 llvm::ConstantInt::get(OffsetType, nonVirtualOffset.getQuantity()); 264 if (virtualOffset) { 265 baseOffset = CGF.Builder.CreateAdd(virtualOffset, baseOffset); 266 } 267 } else { 268 baseOffset = virtualOffset; 269 } 270 271 // Apply the base offset. 272 llvm::Value *ptr = addr.getPointer(); 273 unsigned AddrSpace = ptr->getType()->getPointerAddressSpace(); 274 ptr = CGF.Builder.CreateBitCast(ptr, CGF.Int8Ty->getPointerTo(AddrSpace)); 275 ptr = CGF.Builder.CreateInBoundsGEP(CGF.Int8Ty, ptr, baseOffset, "add.ptr"); 276 277 // If we have a virtual component, the alignment of the result will 278 // be relative only to the known alignment of that vbase. 279 CharUnits alignment; 280 if (virtualOffset) { 281 assert(nearestVBase && "virtual offset without vbase?"); 282 alignment = CGF.CGM.getVBaseAlignment(addr.getAlignment(), 283 derivedClass, nearestVBase); 284 } else { 285 alignment = addr.getAlignment(); 286 } 287 alignment = alignment.alignmentAtOffset(nonVirtualOffset); 288 289 return Address(ptr, alignment); 290 } 291 292 Address CodeGenFunction::GetAddressOfBaseClass( 293 Address Value, const CXXRecordDecl *Derived, 294 CastExpr::path_const_iterator PathBegin, 295 CastExpr::path_const_iterator PathEnd, bool NullCheckValue, 296 SourceLocation Loc) { 297 assert(PathBegin != PathEnd && "Base path should not be empty!"); 298 299 CastExpr::path_const_iterator Start = PathBegin; 300 const CXXRecordDecl *VBase = nullptr; 301 302 // Sema has done some convenient canonicalization here: if the 303 // access path involved any virtual steps, the conversion path will 304 // *start* with a step down to the correct virtual base subobject, 305 // and hence will not require any further steps. 306 if ((*Start)->isVirtual()) { 307 VBase = cast<CXXRecordDecl>( 308 (*Start)->getType()->castAs<RecordType>()->getDecl()); 309 ++Start; 310 } 311 312 // Compute the static offset of the ultimate destination within its 313 // allocating subobject (the virtual base, if there is one, or else 314 // the "complete" object that we see). 315 CharUnits NonVirtualOffset = CGM.computeNonVirtualBaseClassOffset( 316 VBase ? VBase : Derived, Start, PathEnd); 317 318 // If there's a virtual step, we can sometimes "devirtualize" it. 319 // For now, that's limited to when the derived type is final. 320 // TODO: "devirtualize" this for accesses to known-complete objects. 321 if (VBase && Derived->hasAttr<FinalAttr>()) { 322 const ASTRecordLayout &layout = getContext().getASTRecordLayout(Derived); 323 CharUnits vBaseOffset = layout.getVBaseClassOffset(VBase); 324 NonVirtualOffset += vBaseOffset; 325 VBase = nullptr; // we no longer have a virtual step 326 } 327 328 // Get the base pointer type. 329 llvm::Type *BasePtrTy = 330 ConvertType((PathEnd[-1])->getType()) 331 ->getPointerTo(Value.getType()->getPointerAddressSpace()); 332 333 QualType DerivedTy = getContext().getRecordType(Derived); 334 CharUnits DerivedAlign = CGM.getClassPointerAlignment(Derived); 335 336 // If the static offset is zero and we don't have a virtual step, 337 // just do a bitcast; null checks are unnecessary. 338 if (NonVirtualOffset.isZero() && !VBase) { 339 if (sanitizePerformTypeCheck()) { 340 SanitizerSet SkippedChecks; 341 SkippedChecks.set(SanitizerKind::Null, !NullCheckValue); 342 EmitTypeCheck(TCK_Upcast, Loc, Value.getPointer(), 343 DerivedTy, DerivedAlign, SkippedChecks); 344 } 345 return Builder.CreateBitCast(Value, BasePtrTy); 346 } 347 348 llvm::BasicBlock *origBB = nullptr; 349 llvm::BasicBlock *endBB = nullptr; 350 351 // Skip over the offset (and the vtable load) if we're supposed to 352 // null-check the pointer. 353 if (NullCheckValue) { 354 origBB = Builder.GetInsertBlock(); 355 llvm::BasicBlock *notNullBB = createBasicBlock("cast.notnull"); 356 endBB = createBasicBlock("cast.end"); 357 358 llvm::Value *isNull = Builder.CreateIsNull(Value.getPointer()); 359 Builder.CreateCondBr(isNull, endBB, notNullBB); 360 EmitBlock(notNullBB); 361 } 362 363 if (sanitizePerformTypeCheck()) { 364 SanitizerSet SkippedChecks; 365 SkippedChecks.set(SanitizerKind::Null, true); 366 EmitTypeCheck(VBase ? TCK_UpcastToVirtualBase : TCK_Upcast, Loc, 367 Value.getPointer(), DerivedTy, DerivedAlign, SkippedChecks); 368 } 369 370 // Compute the virtual offset. 371 llvm::Value *VirtualOffset = nullptr; 372 if (VBase) { 373 VirtualOffset = 374 CGM.getCXXABI().GetVirtualBaseClassOffset(*this, Value, Derived, VBase); 375 } 376 377 // Apply both offsets. 378 Value = ApplyNonVirtualAndVirtualOffset(*this, Value, NonVirtualOffset, 379 VirtualOffset, Derived, VBase); 380 381 // Cast to the destination type. 382 Value = Builder.CreateBitCast(Value, BasePtrTy); 383 384 // Build a phi if we needed a null check. 385 if (NullCheckValue) { 386 llvm::BasicBlock *notNullBB = Builder.GetInsertBlock(); 387 Builder.CreateBr(endBB); 388 EmitBlock(endBB); 389 390 llvm::PHINode *PHI = Builder.CreatePHI(BasePtrTy, 2, "cast.result"); 391 PHI->addIncoming(Value.getPointer(), notNullBB); 392 PHI->addIncoming(llvm::Constant::getNullValue(BasePtrTy), origBB); 393 Value = Address(PHI, Value.getAlignment()); 394 } 395 396 return Value; 397 } 398 399 Address 400 CodeGenFunction::GetAddressOfDerivedClass(Address BaseAddr, 401 const CXXRecordDecl *Derived, 402 CastExpr::path_const_iterator PathBegin, 403 CastExpr::path_const_iterator PathEnd, 404 bool NullCheckValue) { 405 assert(PathBegin != PathEnd && "Base path should not be empty!"); 406 407 QualType DerivedTy = 408 getContext().getCanonicalType(getContext().getTagDeclType(Derived)); 409 unsigned AddrSpace = 410 BaseAddr.getPointer()->getType()->getPointerAddressSpace(); 411 llvm::Type *DerivedPtrTy = ConvertType(DerivedTy)->getPointerTo(AddrSpace); 412 413 llvm::Value *NonVirtualOffset = 414 CGM.GetNonVirtualBaseClassOffset(Derived, PathBegin, PathEnd); 415 416 if (!NonVirtualOffset) { 417 // No offset, we can just cast back. 418 return Builder.CreateBitCast(BaseAddr, DerivedPtrTy); 419 } 420 421 llvm::BasicBlock *CastNull = nullptr; 422 llvm::BasicBlock *CastNotNull = nullptr; 423 llvm::BasicBlock *CastEnd = nullptr; 424 425 if (NullCheckValue) { 426 CastNull = createBasicBlock("cast.null"); 427 CastNotNull = createBasicBlock("cast.notnull"); 428 CastEnd = createBasicBlock("cast.end"); 429 430 llvm::Value *IsNull = Builder.CreateIsNull(BaseAddr.getPointer()); 431 Builder.CreateCondBr(IsNull, CastNull, CastNotNull); 432 EmitBlock(CastNotNull); 433 } 434 435 // Apply the offset. 436 llvm::Value *Value = Builder.CreateBitCast(BaseAddr.getPointer(), Int8PtrTy); 437 Value = Builder.CreateInBoundsGEP( 438 Int8Ty, Value, Builder.CreateNeg(NonVirtualOffset), "sub.ptr"); 439 440 // Just cast. 441 Value = Builder.CreateBitCast(Value, DerivedPtrTy); 442 443 // Produce a PHI if we had a null-check. 444 if (NullCheckValue) { 445 Builder.CreateBr(CastEnd); 446 EmitBlock(CastNull); 447 Builder.CreateBr(CastEnd); 448 EmitBlock(CastEnd); 449 450 llvm::PHINode *PHI = Builder.CreatePHI(Value->getType(), 2); 451 PHI->addIncoming(Value, CastNotNull); 452 PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()), CastNull); 453 Value = PHI; 454 } 455 456 return Address(Value, CGM.getClassPointerAlignment(Derived)); 457 } 458 459 llvm::Value *CodeGenFunction::GetVTTParameter(GlobalDecl GD, 460 bool ForVirtualBase, 461 bool Delegating) { 462 if (!CGM.getCXXABI().NeedsVTTParameter(GD)) { 463 // This constructor/destructor does not need a VTT parameter. 464 return nullptr; 465 } 466 467 const CXXRecordDecl *RD = cast<CXXMethodDecl>(CurCodeDecl)->getParent(); 468 const CXXRecordDecl *Base = cast<CXXMethodDecl>(GD.getDecl())->getParent(); 469 470 uint64_t SubVTTIndex; 471 472 if (Delegating) { 473 // If this is a delegating constructor call, just load the VTT. 474 return LoadCXXVTT(); 475 } else if (RD == Base) { 476 // If the record matches the base, this is the complete ctor/dtor 477 // variant calling the base variant in a class with virtual bases. 478 assert(!CGM.getCXXABI().NeedsVTTParameter(CurGD) && 479 "doing no-op VTT offset in base dtor/ctor?"); 480 assert(!ForVirtualBase && "Can't have same class as virtual base!"); 481 SubVTTIndex = 0; 482 } else { 483 const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD); 484 CharUnits BaseOffset = ForVirtualBase ? 485 Layout.getVBaseClassOffset(Base) : 486 Layout.getBaseClassOffset(Base); 487 488 SubVTTIndex = 489 CGM.getVTables().getSubVTTIndex(RD, BaseSubobject(Base, BaseOffset)); 490 assert(SubVTTIndex != 0 && "Sub-VTT index must be greater than zero!"); 491 } 492 493 if (CGM.getCXXABI().NeedsVTTParameter(CurGD)) { 494 // A VTT parameter was passed to the constructor, use it. 495 llvm::Value *VTT = LoadCXXVTT(); 496 return Builder.CreateConstInBoundsGEP1_64(VoidPtrTy, VTT, SubVTTIndex); 497 } else { 498 // We're the complete constructor, so get the VTT by name. 499 llvm::GlobalValue *VTT = CGM.getVTables().GetAddrOfVTT(RD); 500 return Builder.CreateConstInBoundsGEP2_64( 501 VTT->getValueType(), VTT, 0, SubVTTIndex); 502 } 503 } 504 505 namespace { 506 /// Call the destructor for a direct base class. 507 struct CallBaseDtor final : EHScopeStack::Cleanup { 508 const CXXRecordDecl *BaseClass; 509 bool BaseIsVirtual; 510 CallBaseDtor(const CXXRecordDecl *Base, bool BaseIsVirtual) 511 : BaseClass(Base), BaseIsVirtual(BaseIsVirtual) {} 512 513 void Emit(CodeGenFunction &CGF, Flags flags) override { 514 const CXXRecordDecl *DerivedClass = 515 cast<CXXMethodDecl>(CGF.CurCodeDecl)->getParent(); 516 517 const CXXDestructorDecl *D = BaseClass->getDestructor(); 518 // We are already inside a destructor, so presumably the object being 519 // destroyed should have the expected type. 520 QualType ThisTy = D->getThisObjectType(); 521 Address Addr = 522 CGF.GetAddressOfDirectBaseInCompleteClass(CGF.LoadCXXThisAddress(), 523 DerivedClass, BaseClass, 524 BaseIsVirtual); 525 CGF.EmitCXXDestructorCall(D, Dtor_Base, BaseIsVirtual, 526 /*Delegating=*/false, Addr, ThisTy); 527 } 528 }; 529 530 /// A visitor which checks whether an initializer uses 'this' in a 531 /// way which requires the vtable to be properly set. 532 struct DynamicThisUseChecker : ConstEvaluatedExprVisitor<DynamicThisUseChecker> { 533 typedef ConstEvaluatedExprVisitor<DynamicThisUseChecker> super; 534 535 bool UsesThis; 536 537 DynamicThisUseChecker(const ASTContext &C) : super(C), UsesThis(false) {} 538 539 // Black-list all explicit and implicit references to 'this'. 540 // 541 // Do we need to worry about external references to 'this' derived 542 // from arbitrary code? If so, then anything which runs arbitrary 543 // external code might potentially access the vtable. 544 void VisitCXXThisExpr(const CXXThisExpr *E) { UsesThis = true; } 545 }; 546 } // end anonymous namespace 547 548 static bool BaseInitializerUsesThis(ASTContext &C, const Expr *Init) { 549 DynamicThisUseChecker Checker(C); 550 Checker.Visit(Init); 551 return Checker.UsesThis; 552 } 553 554 static void EmitBaseInitializer(CodeGenFunction &CGF, 555 const CXXRecordDecl *ClassDecl, 556 CXXCtorInitializer *BaseInit) { 557 assert(BaseInit->isBaseInitializer() && 558 "Must have base initializer!"); 559 560 Address ThisPtr = CGF.LoadCXXThisAddress(); 561 562 const Type *BaseType = BaseInit->getBaseClass(); 563 const auto *BaseClassDecl = 564 cast<CXXRecordDecl>(BaseType->castAs<RecordType>()->getDecl()); 565 566 bool isBaseVirtual = BaseInit->isBaseVirtual(); 567 568 // If the initializer for the base (other than the constructor 569 // itself) accesses 'this' in any way, we need to initialize the 570 // vtables. 571 if (BaseInitializerUsesThis(CGF.getContext(), BaseInit->getInit())) 572 CGF.InitializeVTablePointers(ClassDecl); 573 574 // We can pretend to be a complete class because it only matters for 575 // virtual bases, and we only do virtual bases for complete ctors. 576 Address V = 577 CGF.GetAddressOfDirectBaseInCompleteClass(ThisPtr, ClassDecl, 578 BaseClassDecl, 579 isBaseVirtual); 580 AggValueSlot AggSlot = 581 AggValueSlot::forAddr( 582 V, Qualifiers(), 583 AggValueSlot::IsDestructed, 584 AggValueSlot::DoesNotNeedGCBarriers, 585 AggValueSlot::IsNotAliased, 586 CGF.getOverlapForBaseInit(ClassDecl, BaseClassDecl, isBaseVirtual)); 587 588 CGF.EmitAggExpr(BaseInit->getInit(), AggSlot); 589 590 if (CGF.CGM.getLangOpts().Exceptions && 591 !BaseClassDecl->hasTrivialDestructor()) 592 CGF.EHStack.pushCleanup<CallBaseDtor>(EHCleanup, BaseClassDecl, 593 isBaseVirtual); 594 } 595 596 static bool isMemcpyEquivalentSpecialMember(const CXXMethodDecl *D) { 597 auto *CD = dyn_cast<CXXConstructorDecl>(D); 598 if (!(CD && CD->isCopyOrMoveConstructor()) && 599 !D->isCopyAssignmentOperator() && !D->isMoveAssignmentOperator()) 600 return false; 601 602 // We can emit a memcpy for a trivial copy or move constructor/assignment. 603 if (D->isTrivial() && !D->getParent()->mayInsertExtraPadding()) 604 return true; 605 606 // We *must* emit a memcpy for a defaulted union copy or move op. 607 if (D->getParent()->isUnion() && D->isDefaulted()) 608 return true; 609 610 return false; 611 } 612 613 static void EmitLValueForAnyFieldInitialization(CodeGenFunction &CGF, 614 CXXCtorInitializer *MemberInit, 615 LValue &LHS) { 616 FieldDecl *Field = MemberInit->getAnyMember(); 617 if (MemberInit->isIndirectMemberInitializer()) { 618 // If we are initializing an anonymous union field, drill down to the field. 619 IndirectFieldDecl *IndirectField = MemberInit->getIndirectMember(); 620 for (const auto *I : IndirectField->chain()) 621 LHS = CGF.EmitLValueForFieldInitialization(LHS, cast<FieldDecl>(I)); 622 } else { 623 LHS = CGF.EmitLValueForFieldInitialization(LHS, Field); 624 } 625 } 626 627 static void EmitMemberInitializer(CodeGenFunction &CGF, 628 const CXXRecordDecl *ClassDecl, 629 CXXCtorInitializer *MemberInit, 630 const CXXConstructorDecl *Constructor, 631 FunctionArgList &Args) { 632 ApplyDebugLocation Loc(CGF, MemberInit->getSourceLocation()); 633 assert(MemberInit->isAnyMemberInitializer() && 634 "Must have member initializer!"); 635 assert(MemberInit->getInit() && "Must have initializer!"); 636 637 // non-static data member initializers. 638 FieldDecl *Field = MemberInit->getAnyMember(); 639 QualType FieldType = Field->getType(); 640 641 llvm::Value *ThisPtr = CGF.LoadCXXThis(); 642 QualType RecordTy = CGF.getContext().getTypeDeclType(ClassDecl); 643 LValue LHS; 644 645 // If a base constructor is being emitted, create an LValue that has the 646 // non-virtual alignment. 647 if (CGF.CurGD.getCtorType() == Ctor_Base) 648 LHS = CGF.MakeNaturalAlignPointeeAddrLValue(ThisPtr, RecordTy); 649 else 650 LHS = CGF.MakeNaturalAlignAddrLValue(ThisPtr, RecordTy); 651 652 EmitLValueForAnyFieldInitialization(CGF, MemberInit, LHS); 653 654 // Special case: if we are in a copy or move constructor, and we are copying 655 // an array of PODs or classes with trivial copy constructors, ignore the 656 // AST and perform the copy we know is equivalent. 657 // FIXME: This is hacky at best... if we had a bit more explicit information 658 // in the AST, we could generalize it more easily. 659 const ConstantArrayType *Array 660 = CGF.getContext().getAsConstantArrayType(FieldType); 661 if (Array && Constructor->isDefaulted() && 662 Constructor->isCopyOrMoveConstructor()) { 663 QualType BaseElementTy = CGF.getContext().getBaseElementType(Array); 664 CXXConstructExpr *CE = dyn_cast<CXXConstructExpr>(MemberInit->getInit()); 665 if (BaseElementTy.isPODType(CGF.getContext()) || 666 (CE && isMemcpyEquivalentSpecialMember(CE->getConstructor()))) { 667 unsigned SrcArgIndex = 668 CGF.CGM.getCXXABI().getSrcArgforCopyCtor(Constructor, Args); 669 llvm::Value *SrcPtr 670 = CGF.Builder.CreateLoad(CGF.GetAddrOfLocalVar(Args[SrcArgIndex])); 671 LValue ThisRHSLV = CGF.MakeNaturalAlignAddrLValue(SrcPtr, RecordTy); 672 LValue Src = CGF.EmitLValueForFieldInitialization(ThisRHSLV, Field); 673 674 // Copy the aggregate. 675 CGF.EmitAggregateCopy(LHS, Src, FieldType, CGF.getOverlapForFieldInit(Field), 676 LHS.isVolatileQualified()); 677 // Ensure that we destroy the objects if an exception is thrown later in 678 // the constructor. 679 QualType::DestructionKind dtorKind = FieldType.isDestructedType(); 680 if (CGF.needsEHCleanup(dtorKind)) 681 CGF.pushEHDestroy(dtorKind, LHS.getAddress(CGF), FieldType); 682 return; 683 } 684 } 685 686 CGF.EmitInitializerForField(Field, LHS, MemberInit->getInit()); 687 } 688 689 void CodeGenFunction::EmitInitializerForField(FieldDecl *Field, LValue LHS, 690 Expr *Init) { 691 QualType FieldType = Field->getType(); 692 switch (getEvaluationKind(FieldType)) { 693 case TEK_Scalar: 694 if (LHS.isSimple()) { 695 EmitExprAsInit(Init, Field, LHS, false); 696 } else { 697 RValue RHS = RValue::get(EmitScalarExpr(Init)); 698 EmitStoreThroughLValue(RHS, LHS); 699 } 700 break; 701 case TEK_Complex: 702 EmitComplexExprIntoLValue(Init, LHS, /*isInit*/ true); 703 break; 704 case TEK_Aggregate: { 705 AggValueSlot Slot = AggValueSlot::forLValue( 706 LHS, *this, AggValueSlot::IsDestructed, 707 AggValueSlot::DoesNotNeedGCBarriers, AggValueSlot::IsNotAliased, 708 getOverlapForFieldInit(Field), AggValueSlot::IsNotZeroed, 709 // Checks are made by the code that calls constructor. 710 AggValueSlot::IsSanitizerChecked); 711 EmitAggExpr(Init, Slot); 712 break; 713 } 714 } 715 716 // Ensure that we destroy this object if an exception is thrown 717 // later in the constructor. 718 QualType::DestructionKind dtorKind = FieldType.isDestructedType(); 719 if (needsEHCleanup(dtorKind)) 720 pushEHDestroy(dtorKind, LHS.getAddress(*this), FieldType); 721 } 722 723 /// Checks whether the given constructor is a valid subject for the 724 /// complete-to-base constructor delegation optimization, i.e. 725 /// emitting the complete constructor as a simple call to the base 726 /// constructor. 727 bool CodeGenFunction::IsConstructorDelegationValid( 728 const CXXConstructorDecl *Ctor) { 729 730 // Currently we disable the optimization for classes with virtual 731 // bases because (1) the addresses of parameter variables need to be 732 // consistent across all initializers but (2) the delegate function 733 // call necessarily creates a second copy of the parameter variable. 734 // 735 // The limiting example (purely theoretical AFAIK): 736 // struct A { A(int &c) { c++; } }; 737 // struct B : virtual A { 738 // B(int count) : A(count) { printf("%d\n", count); } 739 // }; 740 // ...although even this example could in principle be emitted as a 741 // delegation since the address of the parameter doesn't escape. 742 if (Ctor->getParent()->getNumVBases()) { 743 // TODO: white-list trivial vbase initializers. This case wouldn't 744 // be subject to the restrictions below. 745 746 // TODO: white-list cases where: 747 // - there are no non-reference parameters to the constructor 748 // - the initializers don't access any non-reference parameters 749 // - the initializers don't take the address of non-reference 750 // parameters 751 // - etc. 752 // If we ever add any of the above cases, remember that: 753 // - function-try-blocks will always exclude this optimization 754 // - we need to perform the constructor prologue and cleanup in 755 // EmitConstructorBody. 756 757 return false; 758 } 759 760 // We also disable the optimization for variadic functions because 761 // it's impossible to "re-pass" varargs. 762 if (Ctor->getType()->castAs<FunctionProtoType>()->isVariadic()) 763 return false; 764 765 // FIXME: Decide if we can do a delegation of a delegating constructor. 766 if (Ctor->isDelegatingConstructor()) 767 return false; 768 769 return true; 770 } 771 772 // Emit code in ctor (Prologue==true) or dtor (Prologue==false) 773 // to poison the extra field paddings inserted under 774 // -fsanitize-address-field-padding=1|2. 775 void CodeGenFunction::EmitAsanPrologueOrEpilogue(bool Prologue) { 776 ASTContext &Context = getContext(); 777 const CXXRecordDecl *ClassDecl = 778 Prologue ? cast<CXXConstructorDecl>(CurGD.getDecl())->getParent() 779 : cast<CXXDestructorDecl>(CurGD.getDecl())->getParent(); 780 if (!ClassDecl->mayInsertExtraPadding()) return; 781 782 struct SizeAndOffset { 783 uint64_t Size; 784 uint64_t Offset; 785 }; 786 787 unsigned PtrSize = CGM.getDataLayout().getPointerSizeInBits(); 788 const ASTRecordLayout &Info = Context.getASTRecordLayout(ClassDecl); 789 790 // Populate sizes and offsets of fields. 791 SmallVector<SizeAndOffset, 16> SSV(Info.getFieldCount()); 792 for (unsigned i = 0, e = Info.getFieldCount(); i != e; ++i) 793 SSV[i].Offset = 794 Context.toCharUnitsFromBits(Info.getFieldOffset(i)).getQuantity(); 795 796 size_t NumFields = 0; 797 for (const auto *Field : ClassDecl->fields()) { 798 const FieldDecl *D = Field; 799 auto FieldInfo = Context.getTypeInfoInChars(D->getType()); 800 CharUnits FieldSize = FieldInfo.Width; 801 assert(NumFields < SSV.size()); 802 SSV[NumFields].Size = D->isBitField() ? 0 : FieldSize.getQuantity(); 803 NumFields++; 804 } 805 assert(NumFields == SSV.size()); 806 if (SSV.size() <= 1) return; 807 808 // We will insert calls to __asan_* run-time functions. 809 // LLVM AddressSanitizer pass may decide to inline them later. 810 llvm::Type *Args[2] = {IntPtrTy, IntPtrTy}; 811 llvm::FunctionType *FTy = 812 llvm::FunctionType::get(CGM.VoidTy, Args, false); 813 llvm::FunctionCallee F = CGM.CreateRuntimeFunction( 814 FTy, Prologue ? "__asan_poison_intra_object_redzone" 815 : "__asan_unpoison_intra_object_redzone"); 816 817 llvm::Value *ThisPtr = LoadCXXThis(); 818 ThisPtr = Builder.CreatePtrToInt(ThisPtr, IntPtrTy); 819 uint64_t TypeSize = Info.getNonVirtualSize().getQuantity(); 820 // For each field check if it has sufficient padding, 821 // if so (un)poison it with a call. 822 for (size_t i = 0; i < SSV.size(); i++) { 823 uint64_t AsanAlignment = 8; 824 uint64_t NextField = i == SSV.size() - 1 ? TypeSize : SSV[i + 1].Offset; 825 uint64_t PoisonSize = NextField - SSV[i].Offset - SSV[i].Size; 826 uint64_t EndOffset = SSV[i].Offset + SSV[i].Size; 827 if (PoisonSize < AsanAlignment || !SSV[i].Size || 828 (NextField % AsanAlignment) != 0) 829 continue; 830 Builder.CreateCall( 831 F, {Builder.CreateAdd(ThisPtr, Builder.getIntN(PtrSize, EndOffset)), 832 Builder.getIntN(PtrSize, PoisonSize)}); 833 } 834 } 835 836 /// EmitConstructorBody - Emits the body of the current constructor. 837 void CodeGenFunction::EmitConstructorBody(FunctionArgList &Args) { 838 EmitAsanPrologueOrEpilogue(true); 839 const CXXConstructorDecl *Ctor = cast<CXXConstructorDecl>(CurGD.getDecl()); 840 CXXCtorType CtorType = CurGD.getCtorType(); 841 842 assert((CGM.getTarget().getCXXABI().hasConstructorVariants() || 843 CtorType == Ctor_Complete) && 844 "can only generate complete ctor for this ABI"); 845 846 // Before we go any further, try the complete->base constructor 847 // delegation optimization. 848 if (CtorType == Ctor_Complete && IsConstructorDelegationValid(Ctor) && 849 CGM.getTarget().getCXXABI().hasConstructorVariants()) { 850 EmitDelegateCXXConstructorCall(Ctor, Ctor_Base, Args, Ctor->getEndLoc()); 851 return; 852 } 853 854 const FunctionDecl *Definition = nullptr; 855 Stmt *Body = Ctor->getBody(Definition); 856 assert(Definition == Ctor && "emitting wrong constructor body"); 857 858 // Enter the function-try-block before the constructor prologue if 859 // applicable. 860 bool IsTryBody = (Body && isa<CXXTryStmt>(Body)); 861 if (IsTryBody) 862 EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true); 863 864 incrementProfileCounter(Body); 865 866 RunCleanupsScope RunCleanups(*this); 867 868 // TODO: in restricted cases, we can emit the vbase initializers of 869 // a complete ctor and then delegate to the base ctor. 870 871 // Emit the constructor prologue, i.e. the base and member 872 // initializers. 873 EmitCtorPrologue(Ctor, CtorType, Args); 874 875 // Emit the body of the statement. 876 if (IsTryBody) 877 EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock()); 878 else if (Body) 879 EmitStmt(Body); 880 881 // Emit any cleanup blocks associated with the member or base 882 // initializers, which includes (along the exceptional path) the 883 // destructors for those members and bases that were fully 884 // constructed. 885 RunCleanups.ForceCleanup(); 886 887 if (IsTryBody) 888 ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true); 889 } 890 891 namespace { 892 /// RAII object to indicate that codegen is copying the value representation 893 /// instead of the object representation. Useful when copying a struct or 894 /// class which has uninitialized members and we're only performing 895 /// lvalue-to-rvalue conversion on the object but not its members. 896 class CopyingValueRepresentation { 897 public: 898 explicit CopyingValueRepresentation(CodeGenFunction &CGF) 899 : CGF(CGF), OldSanOpts(CGF.SanOpts) { 900 CGF.SanOpts.set(SanitizerKind::Bool, false); 901 CGF.SanOpts.set(SanitizerKind::Enum, false); 902 } 903 ~CopyingValueRepresentation() { 904 CGF.SanOpts = OldSanOpts; 905 } 906 private: 907 CodeGenFunction &CGF; 908 SanitizerSet OldSanOpts; 909 }; 910 } // end anonymous namespace 911 912 namespace { 913 class FieldMemcpyizer { 914 public: 915 FieldMemcpyizer(CodeGenFunction &CGF, const CXXRecordDecl *ClassDecl, 916 const VarDecl *SrcRec) 917 : CGF(CGF), ClassDecl(ClassDecl), SrcRec(SrcRec), 918 RecLayout(CGF.getContext().getASTRecordLayout(ClassDecl)), 919 FirstField(nullptr), LastField(nullptr), FirstFieldOffset(0), 920 LastFieldOffset(0), LastAddedFieldIndex(0) {} 921 922 bool isMemcpyableField(FieldDecl *F) const { 923 // Never memcpy fields when we are adding poisoned paddings. 924 if (CGF.getContext().getLangOpts().SanitizeAddressFieldPadding) 925 return false; 926 Qualifiers Qual = F->getType().getQualifiers(); 927 if (Qual.hasVolatile() || Qual.hasObjCLifetime()) 928 return false; 929 return true; 930 } 931 932 void addMemcpyableField(FieldDecl *F) { 933 if (F->isZeroSize(CGF.getContext())) 934 return; 935 if (!FirstField) 936 addInitialField(F); 937 else 938 addNextField(F); 939 } 940 941 CharUnits getMemcpySize(uint64_t FirstByteOffset) const { 942 ASTContext &Ctx = CGF.getContext(); 943 unsigned LastFieldSize = 944 LastField->isBitField() 945 ? LastField->getBitWidthValue(Ctx) 946 : Ctx.toBits( 947 Ctx.getTypeInfoDataSizeInChars(LastField->getType()).Width); 948 uint64_t MemcpySizeBits = LastFieldOffset + LastFieldSize - 949 FirstByteOffset + Ctx.getCharWidth() - 1; 950 CharUnits MemcpySize = Ctx.toCharUnitsFromBits(MemcpySizeBits); 951 return MemcpySize; 952 } 953 954 void emitMemcpy() { 955 // Give the subclass a chance to bail out if it feels the memcpy isn't 956 // worth it (e.g. Hasn't aggregated enough data). 957 if (!FirstField) { 958 return; 959 } 960 961 uint64_t FirstByteOffset; 962 if (FirstField->isBitField()) { 963 const CGRecordLayout &RL = 964 CGF.getTypes().getCGRecordLayout(FirstField->getParent()); 965 const CGBitFieldInfo &BFInfo = RL.getBitFieldInfo(FirstField); 966 // FirstFieldOffset is not appropriate for bitfields, 967 // we need to use the storage offset instead. 968 FirstByteOffset = CGF.getContext().toBits(BFInfo.StorageOffset); 969 } else { 970 FirstByteOffset = FirstFieldOffset; 971 } 972 973 CharUnits MemcpySize = getMemcpySize(FirstByteOffset); 974 QualType RecordTy = CGF.getContext().getTypeDeclType(ClassDecl); 975 Address ThisPtr = CGF.LoadCXXThisAddress(); 976 LValue DestLV = CGF.MakeAddrLValue(ThisPtr, RecordTy); 977 LValue Dest = CGF.EmitLValueForFieldInitialization(DestLV, FirstField); 978 llvm::Value *SrcPtr = CGF.Builder.CreateLoad(CGF.GetAddrOfLocalVar(SrcRec)); 979 LValue SrcLV = CGF.MakeNaturalAlignAddrLValue(SrcPtr, RecordTy); 980 LValue Src = CGF.EmitLValueForFieldInitialization(SrcLV, FirstField); 981 982 emitMemcpyIR( 983 Dest.isBitField() ? Dest.getBitFieldAddress() : Dest.getAddress(CGF), 984 Src.isBitField() ? Src.getBitFieldAddress() : Src.getAddress(CGF), 985 MemcpySize); 986 reset(); 987 } 988 989 void reset() { 990 FirstField = nullptr; 991 } 992 993 protected: 994 CodeGenFunction &CGF; 995 const CXXRecordDecl *ClassDecl; 996 997 private: 998 void emitMemcpyIR(Address DestPtr, Address SrcPtr, CharUnits Size) { 999 llvm::PointerType *DPT = DestPtr.getType(); 1000 llvm::Type *DBP = 1001 llvm::Type::getInt8PtrTy(CGF.getLLVMContext(), DPT->getAddressSpace()); 1002 DestPtr = CGF.Builder.CreateBitCast(DestPtr, DBP); 1003 1004 llvm::PointerType *SPT = SrcPtr.getType(); 1005 llvm::Type *SBP = 1006 llvm::Type::getInt8PtrTy(CGF.getLLVMContext(), SPT->getAddressSpace()); 1007 SrcPtr = CGF.Builder.CreateBitCast(SrcPtr, SBP); 1008 1009 CGF.Builder.CreateMemCpy(DestPtr, SrcPtr, Size.getQuantity()); 1010 } 1011 1012 void addInitialField(FieldDecl *F) { 1013 FirstField = F; 1014 LastField = F; 1015 FirstFieldOffset = RecLayout.getFieldOffset(F->getFieldIndex()); 1016 LastFieldOffset = FirstFieldOffset; 1017 LastAddedFieldIndex = F->getFieldIndex(); 1018 } 1019 1020 void addNextField(FieldDecl *F) { 1021 // For the most part, the following invariant will hold: 1022 // F->getFieldIndex() == LastAddedFieldIndex + 1 1023 // The one exception is that Sema won't add a copy-initializer for an 1024 // unnamed bitfield, which will show up here as a gap in the sequence. 1025 assert(F->getFieldIndex() >= LastAddedFieldIndex + 1 && 1026 "Cannot aggregate fields out of order."); 1027 LastAddedFieldIndex = F->getFieldIndex(); 1028 1029 // The 'first' and 'last' fields are chosen by offset, rather than field 1030 // index. This allows the code to support bitfields, as well as regular 1031 // fields. 1032 uint64_t FOffset = RecLayout.getFieldOffset(F->getFieldIndex()); 1033 if (FOffset < FirstFieldOffset) { 1034 FirstField = F; 1035 FirstFieldOffset = FOffset; 1036 } else if (FOffset >= LastFieldOffset) { 1037 LastField = F; 1038 LastFieldOffset = FOffset; 1039 } 1040 } 1041 1042 const VarDecl *SrcRec; 1043 const ASTRecordLayout &RecLayout; 1044 FieldDecl *FirstField; 1045 FieldDecl *LastField; 1046 uint64_t FirstFieldOffset, LastFieldOffset; 1047 unsigned LastAddedFieldIndex; 1048 }; 1049 1050 class ConstructorMemcpyizer : public FieldMemcpyizer { 1051 private: 1052 /// Get source argument for copy constructor. Returns null if not a copy 1053 /// constructor. 1054 static const VarDecl *getTrivialCopySource(CodeGenFunction &CGF, 1055 const CXXConstructorDecl *CD, 1056 FunctionArgList &Args) { 1057 if (CD->isCopyOrMoveConstructor() && CD->isDefaulted()) 1058 return Args[CGF.CGM.getCXXABI().getSrcArgforCopyCtor(CD, Args)]; 1059 return nullptr; 1060 } 1061 1062 // Returns true if a CXXCtorInitializer represents a member initialization 1063 // that can be rolled into a memcpy. 1064 bool isMemberInitMemcpyable(CXXCtorInitializer *MemberInit) const { 1065 if (!MemcpyableCtor) 1066 return false; 1067 FieldDecl *Field = MemberInit->getMember(); 1068 assert(Field && "No field for member init."); 1069 QualType FieldType = Field->getType(); 1070 CXXConstructExpr *CE = dyn_cast<CXXConstructExpr>(MemberInit->getInit()); 1071 1072 // Bail out on non-memcpyable, not-trivially-copyable members. 1073 if (!(CE && isMemcpyEquivalentSpecialMember(CE->getConstructor())) && 1074 !(FieldType.isTriviallyCopyableType(CGF.getContext()) || 1075 FieldType->isReferenceType())) 1076 return false; 1077 1078 // Bail out on volatile fields. 1079 if (!isMemcpyableField(Field)) 1080 return false; 1081 1082 // Otherwise we're good. 1083 return true; 1084 } 1085 1086 public: 1087 ConstructorMemcpyizer(CodeGenFunction &CGF, const CXXConstructorDecl *CD, 1088 FunctionArgList &Args) 1089 : FieldMemcpyizer(CGF, CD->getParent(), getTrivialCopySource(CGF, CD, Args)), 1090 ConstructorDecl(CD), 1091 MemcpyableCtor(CD->isDefaulted() && 1092 CD->isCopyOrMoveConstructor() && 1093 CGF.getLangOpts().getGC() == LangOptions::NonGC), 1094 Args(Args) { } 1095 1096 void addMemberInitializer(CXXCtorInitializer *MemberInit) { 1097 if (isMemberInitMemcpyable(MemberInit)) { 1098 AggregatedInits.push_back(MemberInit); 1099 addMemcpyableField(MemberInit->getMember()); 1100 } else { 1101 emitAggregatedInits(); 1102 EmitMemberInitializer(CGF, ConstructorDecl->getParent(), MemberInit, 1103 ConstructorDecl, Args); 1104 } 1105 } 1106 1107 void emitAggregatedInits() { 1108 if (AggregatedInits.size() <= 1) { 1109 // This memcpy is too small to be worthwhile. Fall back on default 1110 // codegen. 1111 if (!AggregatedInits.empty()) { 1112 CopyingValueRepresentation CVR(CGF); 1113 EmitMemberInitializer(CGF, ConstructorDecl->getParent(), 1114 AggregatedInits[0], ConstructorDecl, Args); 1115 AggregatedInits.clear(); 1116 } 1117 reset(); 1118 return; 1119 } 1120 1121 pushEHDestructors(); 1122 emitMemcpy(); 1123 AggregatedInits.clear(); 1124 } 1125 1126 void pushEHDestructors() { 1127 Address ThisPtr = CGF.LoadCXXThisAddress(); 1128 QualType RecordTy = CGF.getContext().getTypeDeclType(ClassDecl); 1129 LValue LHS = CGF.MakeAddrLValue(ThisPtr, RecordTy); 1130 1131 for (unsigned i = 0; i < AggregatedInits.size(); ++i) { 1132 CXXCtorInitializer *MemberInit = AggregatedInits[i]; 1133 QualType FieldType = MemberInit->getAnyMember()->getType(); 1134 QualType::DestructionKind dtorKind = FieldType.isDestructedType(); 1135 if (!CGF.needsEHCleanup(dtorKind)) 1136 continue; 1137 LValue FieldLHS = LHS; 1138 EmitLValueForAnyFieldInitialization(CGF, MemberInit, FieldLHS); 1139 CGF.pushEHDestroy(dtorKind, FieldLHS.getAddress(CGF), FieldType); 1140 } 1141 } 1142 1143 void finish() { 1144 emitAggregatedInits(); 1145 } 1146 1147 private: 1148 const CXXConstructorDecl *ConstructorDecl; 1149 bool MemcpyableCtor; 1150 FunctionArgList &Args; 1151 SmallVector<CXXCtorInitializer*, 16> AggregatedInits; 1152 }; 1153 1154 class AssignmentMemcpyizer : public FieldMemcpyizer { 1155 private: 1156 // Returns the memcpyable field copied by the given statement, if one 1157 // exists. Otherwise returns null. 1158 FieldDecl *getMemcpyableField(Stmt *S) { 1159 if (!AssignmentsMemcpyable) 1160 return nullptr; 1161 if (BinaryOperator *BO = dyn_cast<BinaryOperator>(S)) { 1162 // Recognise trivial assignments. 1163 if (BO->getOpcode() != BO_Assign) 1164 return nullptr; 1165 MemberExpr *ME = dyn_cast<MemberExpr>(BO->getLHS()); 1166 if (!ME) 1167 return nullptr; 1168 FieldDecl *Field = dyn_cast<FieldDecl>(ME->getMemberDecl()); 1169 if (!Field || !isMemcpyableField(Field)) 1170 return nullptr; 1171 Stmt *RHS = BO->getRHS(); 1172 if (ImplicitCastExpr *EC = dyn_cast<ImplicitCastExpr>(RHS)) 1173 RHS = EC->getSubExpr(); 1174 if (!RHS) 1175 return nullptr; 1176 if (MemberExpr *ME2 = dyn_cast<MemberExpr>(RHS)) { 1177 if (ME2->getMemberDecl() == Field) 1178 return Field; 1179 } 1180 return nullptr; 1181 } else if (CXXMemberCallExpr *MCE = dyn_cast<CXXMemberCallExpr>(S)) { 1182 CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(MCE->getCalleeDecl()); 1183 if (!(MD && isMemcpyEquivalentSpecialMember(MD))) 1184 return nullptr; 1185 MemberExpr *IOA = dyn_cast<MemberExpr>(MCE->getImplicitObjectArgument()); 1186 if (!IOA) 1187 return nullptr; 1188 FieldDecl *Field = dyn_cast<FieldDecl>(IOA->getMemberDecl()); 1189 if (!Field || !isMemcpyableField(Field)) 1190 return nullptr; 1191 MemberExpr *Arg0 = dyn_cast<MemberExpr>(MCE->getArg(0)); 1192 if (!Arg0 || Field != dyn_cast<FieldDecl>(Arg0->getMemberDecl())) 1193 return nullptr; 1194 return Field; 1195 } else if (CallExpr *CE = dyn_cast<CallExpr>(S)) { 1196 FunctionDecl *FD = dyn_cast<FunctionDecl>(CE->getCalleeDecl()); 1197 if (!FD || FD->getBuiltinID() != Builtin::BI__builtin_memcpy) 1198 return nullptr; 1199 Expr *DstPtr = CE->getArg(0); 1200 if (ImplicitCastExpr *DC = dyn_cast<ImplicitCastExpr>(DstPtr)) 1201 DstPtr = DC->getSubExpr(); 1202 UnaryOperator *DUO = dyn_cast<UnaryOperator>(DstPtr); 1203 if (!DUO || DUO->getOpcode() != UO_AddrOf) 1204 return nullptr; 1205 MemberExpr *ME = dyn_cast<MemberExpr>(DUO->getSubExpr()); 1206 if (!ME) 1207 return nullptr; 1208 FieldDecl *Field = dyn_cast<FieldDecl>(ME->getMemberDecl()); 1209 if (!Field || !isMemcpyableField(Field)) 1210 return nullptr; 1211 Expr *SrcPtr = CE->getArg(1); 1212 if (ImplicitCastExpr *SC = dyn_cast<ImplicitCastExpr>(SrcPtr)) 1213 SrcPtr = SC->getSubExpr(); 1214 UnaryOperator *SUO = dyn_cast<UnaryOperator>(SrcPtr); 1215 if (!SUO || SUO->getOpcode() != UO_AddrOf) 1216 return nullptr; 1217 MemberExpr *ME2 = dyn_cast<MemberExpr>(SUO->getSubExpr()); 1218 if (!ME2 || Field != dyn_cast<FieldDecl>(ME2->getMemberDecl())) 1219 return nullptr; 1220 return Field; 1221 } 1222 1223 return nullptr; 1224 } 1225 1226 bool AssignmentsMemcpyable; 1227 SmallVector<Stmt*, 16> AggregatedStmts; 1228 1229 public: 1230 AssignmentMemcpyizer(CodeGenFunction &CGF, const CXXMethodDecl *AD, 1231 FunctionArgList &Args) 1232 : FieldMemcpyizer(CGF, AD->getParent(), Args[Args.size() - 1]), 1233 AssignmentsMemcpyable(CGF.getLangOpts().getGC() == LangOptions::NonGC) { 1234 assert(Args.size() == 2); 1235 } 1236 1237 void emitAssignment(Stmt *S) { 1238 FieldDecl *F = getMemcpyableField(S); 1239 if (F) { 1240 addMemcpyableField(F); 1241 AggregatedStmts.push_back(S); 1242 } else { 1243 emitAggregatedStmts(); 1244 CGF.EmitStmt(S); 1245 } 1246 } 1247 1248 void emitAggregatedStmts() { 1249 if (AggregatedStmts.size() <= 1) { 1250 if (!AggregatedStmts.empty()) { 1251 CopyingValueRepresentation CVR(CGF); 1252 CGF.EmitStmt(AggregatedStmts[0]); 1253 } 1254 reset(); 1255 } 1256 1257 emitMemcpy(); 1258 AggregatedStmts.clear(); 1259 } 1260 1261 void finish() { 1262 emitAggregatedStmts(); 1263 } 1264 }; 1265 } // end anonymous namespace 1266 1267 static bool isInitializerOfDynamicClass(const CXXCtorInitializer *BaseInit) { 1268 const Type *BaseType = BaseInit->getBaseClass(); 1269 const auto *BaseClassDecl = 1270 cast<CXXRecordDecl>(BaseType->castAs<RecordType>()->getDecl()); 1271 return BaseClassDecl->isDynamicClass(); 1272 } 1273 1274 /// EmitCtorPrologue - This routine generates necessary code to initialize 1275 /// base classes and non-static data members belonging to this constructor. 1276 void CodeGenFunction::EmitCtorPrologue(const CXXConstructorDecl *CD, 1277 CXXCtorType CtorType, 1278 FunctionArgList &Args) { 1279 if (CD->isDelegatingConstructor()) 1280 return EmitDelegatingCXXConstructorCall(CD, Args); 1281 1282 const CXXRecordDecl *ClassDecl = CD->getParent(); 1283 1284 CXXConstructorDecl::init_const_iterator B = CD->init_begin(), 1285 E = CD->init_end(); 1286 1287 // Virtual base initializers first, if any. They aren't needed if: 1288 // - This is a base ctor variant 1289 // - There are no vbases 1290 // - The class is abstract, so a complete object of it cannot be constructed 1291 // 1292 // The check for an abstract class is necessary because sema may not have 1293 // marked virtual base destructors referenced. 1294 bool ConstructVBases = CtorType != Ctor_Base && 1295 ClassDecl->getNumVBases() != 0 && 1296 !ClassDecl->isAbstract(); 1297 1298 // In the Microsoft C++ ABI, there are no constructor variants. Instead, the 1299 // constructor of a class with virtual bases takes an additional parameter to 1300 // conditionally construct the virtual bases. Emit that check here. 1301 llvm::BasicBlock *BaseCtorContinueBB = nullptr; 1302 if (ConstructVBases && 1303 !CGM.getTarget().getCXXABI().hasConstructorVariants()) { 1304 BaseCtorContinueBB = 1305 CGM.getCXXABI().EmitCtorCompleteObjectHandler(*this, ClassDecl); 1306 assert(BaseCtorContinueBB); 1307 } 1308 1309 llvm::Value *const OldThis = CXXThisValue; 1310 for (; B != E && (*B)->isBaseInitializer() && (*B)->isBaseVirtual(); B++) { 1311 if (!ConstructVBases) 1312 continue; 1313 if (CGM.getCodeGenOpts().StrictVTablePointers && 1314 CGM.getCodeGenOpts().OptimizationLevel > 0 && 1315 isInitializerOfDynamicClass(*B)) 1316 CXXThisValue = Builder.CreateLaunderInvariantGroup(LoadCXXThis()); 1317 EmitBaseInitializer(*this, ClassDecl, *B); 1318 } 1319 1320 if (BaseCtorContinueBB) { 1321 // Complete object handler should continue to the remaining initializers. 1322 Builder.CreateBr(BaseCtorContinueBB); 1323 EmitBlock(BaseCtorContinueBB); 1324 } 1325 1326 // Then, non-virtual base initializers. 1327 for (; B != E && (*B)->isBaseInitializer(); B++) { 1328 assert(!(*B)->isBaseVirtual()); 1329 1330 if (CGM.getCodeGenOpts().StrictVTablePointers && 1331 CGM.getCodeGenOpts().OptimizationLevel > 0 && 1332 isInitializerOfDynamicClass(*B)) 1333 CXXThisValue = Builder.CreateLaunderInvariantGroup(LoadCXXThis()); 1334 EmitBaseInitializer(*this, ClassDecl, *B); 1335 } 1336 1337 CXXThisValue = OldThis; 1338 1339 InitializeVTablePointers(ClassDecl); 1340 1341 // And finally, initialize class members. 1342 FieldConstructionScope FCS(*this, LoadCXXThisAddress()); 1343 ConstructorMemcpyizer CM(*this, CD, Args); 1344 for (; B != E; B++) { 1345 CXXCtorInitializer *Member = (*B); 1346 assert(!Member->isBaseInitializer()); 1347 assert(Member->isAnyMemberInitializer() && 1348 "Delegating initializer on non-delegating constructor"); 1349 CM.addMemberInitializer(Member); 1350 } 1351 CM.finish(); 1352 } 1353 1354 static bool 1355 FieldHasTrivialDestructorBody(ASTContext &Context, const FieldDecl *Field); 1356 1357 static bool 1358 HasTrivialDestructorBody(ASTContext &Context, 1359 const CXXRecordDecl *BaseClassDecl, 1360 const CXXRecordDecl *MostDerivedClassDecl) 1361 { 1362 // If the destructor is trivial we don't have to check anything else. 1363 if (BaseClassDecl->hasTrivialDestructor()) 1364 return true; 1365 1366 if (!BaseClassDecl->getDestructor()->hasTrivialBody()) 1367 return false; 1368 1369 // Check fields. 1370 for (const auto *Field : BaseClassDecl->fields()) 1371 if (!FieldHasTrivialDestructorBody(Context, Field)) 1372 return false; 1373 1374 // Check non-virtual bases. 1375 for (const auto &I : BaseClassDecl->bases()) { 1376 if (I.isVirtual()) 1377 continue; 1378 1379 const CXXRecordDecl *NonVirtualBase = 1380 cast<CXXRecordDecl>(I.getType()->castAs<RecordType>()->getDecl()); 1381 if (!HasTrivialDestructorBody(Context, NonVirtualBase, 1382 MostDerivedClassDecl)) 1383 return false; 1384 } 1385 1386 if (BaseClassDecl == MostDerivedClassDecl) { 1387 // Check virtual bases. 1388 for (const auto &I : BaseClassDecl->vbases()) { 1389 const CXXRecordDecl *VirtualBase = 1390 cast<CXXRecordDecl>(I.getType()->castAs<RecordType>()->getDecl()); 1391 if (!HasTrivialDestructorBody(Context, VirtualBase, 1392 MostDerivedClassDecl)) 1393 return false; 1394 } 1395 } 1396 1397 return true; 1398 } 1399 1400 static bool 1401 FieldHasTrivialDestructorBody(ASTContext &Context, 1402 const FieldDecl *Field) 1403 { 1404 QualType FieldBaseElementType = Context.getBaseElementType(Field->getType()); 1405 1406 const RecordType *RT = FieldBaseElementType->getAs<RecordType>(); 1407 if (!RT) 1408 return true; 1409 1410 CXXRecordDecl *FieldClassDecl = cast<CXXRecordDecl>(RT->getDecl()); 1411 1412 // The destructor for an implicit anonymous union member is never invoked. 1413 if (FieldClassDecl->isUnion() && FieldClassDecl->isAnonymousStructOrUnion()) 1414 return false; 1415 1416 return HasTrivialDestructorBody(Context, FieldClassDecl, FieldClassDecl); 1417 } 1418 1419 /// CanSkipVTablePointerInitialization - Check whether we need to initialize 1420 /// any vtable pointers before calling this destructor. 1421 static bool CanSkipVTablePointerInitialization(CodeGenFunction &CGF, 1422 const CXXDestructorDecl *Dtor) { 1423 const CXXRecordDecl *ClassDecl = Dtor->getParent(); 1424 if (!ClassDecl->isDynamicClass()) 1425 return true; 1426 1427 // For a final class, the vtable pointer is known to already point to the 1428 // class's vtable. 1429 if (ClassDecl->isEffectivelyFinal()) 1430 return true; 1431 1432 if (!Dtor->hasTrivialBody()) 1433 return false; 1434 1435 // Check the fields. 1436 for (const auto *Field : ClassDecl->fields()) 1437 if (!FieldHasTrivialDestructorBody(CGF.getContext(), Field)) 1438 return false; 1439 1440 return true; 1441 } 1442 1443 /// EmitDestructorBody - Emits the body of the current destructor. 1444 void CodeGenFunction::EmitDestructorBody(FunctionArgList &Args) { 1445 const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CurGD.getDecl()); 1446 CXXDtorType DtorType = CurGD.getDtorType(); 1447 1448 // For an abstract class, non-base destructors are never used (and can't 1449 // be emitted in general, because vbase dtors may not have been validated 1450 // by Sema), but the Itanium ABI doesn't make them optional and Clang may 1451 // in fact emit references to them from other compilations, so emit them 1452 // as functions containing a trap instruction. 1453 if (DtorType != Dtor_Base && Dtor->getParent()->isAbstract()) { 1454 llvm::CallInst *TrapCall = EmitTrapCall(llvm::Intrinsic::trap); 1455 TrapCall->setDoesNotReturn(); 1456 TrapCall->setDoesNotThrow(); 1457 Builder.CreateUnreachable(); 1458 Builder.ClearInsertionPoint(); 1459 return; 1460 } 1461 1462 Stmt *Body = Dtor->getBody(); 1463 if (Body) 1464 incrementProfileCounter(Body); 1465 1466 // The call to operator delete in a deleting destructor happens 1467 // outside of the function-try-block, which means it's always 1468 // possible to delegate the destructor body to the complete 1469 // destructor. Do so. 1470 if (DtorType == Dtor_Deleting) { 1471 RunCleanupsScope DtorEpilogue(*this); 1472 EnterDtorCleanups(Dtor, Dtor_Deleting); 1473 if (HaveInsertPoint()) { 1474 QualType ThisTy = Dtor->getThisObjectType(); 1475 EmitCXXDestructorCall(Dtor, Dtor_Complete, /*ForVirtualBase=*/false, 1476 /*Delegating=*/false, LoadCXXThisAddress(), ThisTy); 1477 } 1478 return; 1479 } 1480 1481 // If the body is a function-try-block, enter the try before 1482 // anything else. 1483 bool isTryBody = (Body && isa<CXXTryStmt>(Body)); 1484 if (isTryBody) 1485 EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true); 1486 EmitAsanPrologueOrEpilogue(false); 1487 1488 // Enter the epilogue cleanups. 1489 RunCleanupsScope DtorEpilogue(*this); 1490 1491 // If this is the complete variant, just invoke the base variant; 1492 // the epilogue will destruct the virtual bases. But we can't do 1493 // this optimization if the body is a function-try-block, because 1494 // we'd introduce *two* handler blocks. In the Microsoft ABI, we 1495 // always delegate because we might not have a definition in this TU. 1496 switch (DtorType) { 1497 case Dtor_Comdat: llvm_unreachable("not expecting a COMDAT"); 1498 case Dtor_Deleting: llvm_unreachable("already handled deleting case"); 1499 1500 case Dtor_Complete: 1501 assert((Body || getTarget().getCXXABI().isMicrosoft()) && 1502 "can't emit a dtor without a body for non-Microsoft ABIs"); 1503 1504 // Enter the cleanup scopes for virtual bases. 1505 EnterDtorCleanups(Dtor, Dtor_Complete); 1506 1507 if (!isTryBody) { 1508 QualType ThisTy = Dtor->getThisObjectType(); 1509 EmitCXXDestructorCall(Dtor, Dtor_Base, /*ForVirtualBase=*/false, 1510 /*Delegating=*/false, LoadCXXThisAddress(), ThisTy); 1511 break; 1512 } 1513 1514 // Fallthrough: act like we're in the base variant. 1515 LLVM_FALLTHROUGH; 1516 1517 case Dtor_Base: 1518 assert(Body); 1519 1520 // Enter the cleanup scopes for fields and non-virtual bases. 1521 EnterDtorCleanups(Dtor, Dtor_Base); 1522 1523 // Initialize the vtable pointers before entering the body. 1524 if (!CanSkipVTablePointerInitialization(*this, Dtor)) { 1525 // Insert the llvm.launder.invariant.group intrinsic before initializing 1526 // the vptrs to cancel any previous assumptions we might have made. 1527 if (CGM.getCodeGenOpts().StrictVTablePointers && 1528 CGM.getCodeGenOpts().OptimizationLevel > 0) 1529 CXXThisValue = Builder.CreateLaunderInvariantGroup(LoadCXXThis()); 1530 InitializeVTablePointers(Dtor->getParent()); 1531 } 1532 1533 if (isTryBody) 1534 EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock()); 1535 else if (Body) 1536 EmitStmt(Body); 1537 else { 1538 assert(Dtor->isImplicit() && "bodyless dtor not implicit"); 1539 // nothing to do besides what's in the epilogue 1540 } 1541 // -fapple-kext must inline any call to this dtor into 1542 // the caller's body. 1543 if (getLangOpts().AppleKext) 1544 CurFn->addFnAttr(llvm::Attribute::AlwaysInline); 1545 1546 break; 1547 } 1548 1549 // Jump out through the epilogue cleanups. 1550 DtorEpilogue.ForceCleanup(); 1551 1552 // Exit the try if applicable. 1553 if (isTryBody) 1554 ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true); 1555 } 1556 1557 void CodeGenFunction::emitImplicitAssignmentOperatorBody(FunctionArgList &Args) { 1558 const CXXMethodDecl *AssignOp = cast<CXXMethodDecl>(CurGD.getDecl()); 1559 const Stmt *RootS = AssignOp->getBody(); 1560 assert(isa<CompoundStmt>(RootS) && 1561 "Body of an implicit assignment operator should be compound stmt."); 1562 const CompoundStmt *RootCS = cast<CompoundStmt>(RootS); 1563 1564 LexicalScope Scope(*this, RootCS->getSourceRange()); 1565 1566 incrementProfileCounter(RootCS); 1567 AssignmentMemcpyizer AM(*this, AssignOp, Args); 1568 for (auto *I : RootCS->body()) 1569 AM.emitAssignment(I); 1570 AM.finish(); 1571 } 1572 1573 namespace { 1574 llvm::Value *LoadThisForDtorDelete(CodeGenFunction &CGF, 1575 const CXXDestructorDecl *DD) { 1576 if (Expr *ThisArg = DD->getOperatorDeleteThisArg()) 1577 return CGF.EmitScalarExpr(ThisArg); 1578 return CGF.LoadCXXThis(); 1579 } 1580 1581 /// Call the operator delete associated with the current destructor. 1582 struct CallDtorDelete final : EHScopeStack::Cleanup { 1583 CallDtorDelete() {} 1584 1585 void Emit(CodeGenFunction &CGF, Flags flags) override { 1586 const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CGF.CurCodeDecl); 1587 const CXXRecordDecl *ClassDecl = Dtor->getParent(); 1588 CGF.EmitDeleteCall(Dtor->getOperatorDelete(), 1589 LoadThisForDtorDelete(CGF, Dtor), 1590 CGF.getContext().getTagDeclType(ClassDecl)); 1591 } 1592 }; 1593 1594 void EmitConditionalDtorDeleteCall(CodeGenFunction &CGF, 1595 llvm::Value *ShouldDeleteCondition, 1596 bool ReturnAfterDelete) { 1597 llvm::BasicBlock *callDeleteBB = CGF.createBasicBlock("dtor.call_delete"); 1598 llvm::BasicBlock *continueBB = CGF.createBasicBlock("dtor.continue"); 1599 llvm::Value *ShouldCallDelete 1600 = CGF.Builder.CreateIsNull(ShouldDeleteCondition); 1601 CGF.Builder.CreateCondBr(ShouldCallDelete, continueBB, callDeleteBB); 1602 1603 CGF.EmitBlock(callDeleteBB); 1604 const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CGF.CurCodeDecl); 1605 const CXXRecordDecl *ClassDecl = Dtor->getParent(); 1606 CGF.EmitDeleteCall(Dtor->getOperatorDelete(), 1607 LoadThisForDtorDelete(CGF, Dtor), 1608 CGF.getContext().getTagDeclType(ClassDecl)); 1609 assert(Dtor->getOperatorDelete()->isDestroyingOperatorDelete() == 1610 ReturnAfterDelete && 1611 "unexpected value for ReturnAfterDelete"); 1612 if (ReturnAfterDelete) 1613 CGF.EmitBranchThroughCleanup(CGF.ReturnBlock); 1614 else 1615 CGF.Builder.CreateBr(continueBB); 1616 1617 CGF.EmitBlock(continueBB); 1618 } 1619 1620 struct CallDtorDeleteConditional final : EHScopeStack::Cleanup { 1621 llvm::Value *ShouldDeleteCondition; 1622 1623 public: 1624 CallDtorDeleteConditional(llvm::Value *ShouldDeleteCondition) 1625 : ShouldDeleteCondition(ShouldDeleteCondition) { 1626 assert(ShouldDeleteCondition != nullptr); 1627 } 1628 1629 void Emit(CodeGenFunction &CGF, Flags flags) override { 1630 EmitConditionalDtorDeleteCall(CGF, ShouldDeleteCondition, 1631 /*ReturnAfterDelete*/false); 1632 } 1633 }; 1634 1635 class DestroyField final : public EHScopeStack::Cleanup { 1636 const FieldDecl *field; 1637 CodeGenFunction::Destroyer *destroyer; 1638 bool useEHCleanupForArray; 1639 1640 public: 1641 DestroyField(const FieldDecl *field, CodeGenFunction::Destroyer *destroyer, 1642 bool useEHCleanupForArray) 1643 : field(field), destroyer(destroyer), 1644 useEHCleanupForArray(useEHCleanupForArray) {} 1645 1646 void Emit(CodeGenFunction &CGF, Flags flags) override { 1647 // Find the address of the field. 1648 Address thisValue = CGF.LoadCXXThisAddress(); 1649 QualType RecordTy = CGF.getContext().getTagDeclType(field->getParent()); 1650 LValue ThisLV = CGF.MakeAddrLValue(thisValue, RecordTy); 1651 LValue LV = CGF.EmitLValueForField(ThisLV, field); 1652 assert(LV.isSimple()); 1653 1654 CGF.emitDestroy(LV.getAddress(CGF), field->getType(), destroyer, 1655 flags.isForNormalCleanup() && useEHCleanupForArray); 1656 } 1657 }; 1658 1659 static void EmitSanitizerDtorCallback(CodeGenFunction &CGF, llvm::Value *Ptr, 1660 CharUnits::QuantityType PoisonSize) { 1661 CodeGenFunction::SanitizerScope SanScope(&CGF); 1662 // Pass in void pointer and size of region as arguments to runtime 1663 // function 1664 llvm::Value *Args[] = {CGF.Builder.CreateBitCast(Ptr, CGF.VoidPtrTy), 1665 llvm::ConstantInt::get(CGF.SizeTy, PoisonSize)}; 1666 1667 llvm::Type *ArgTypes[] = {CGF.VoidPtrTy, CGF.SizeTy}; 1668 1669 llvm::FunctionType *FnType = 1670 llvm::FunctionType::get(CGF.VoidTy, ArgTypes, false); 1671 llvm::FunctionCallee Fn = 1672 CGF.CGM.CreateRuntimeFunction(FnType, "__sanitizer_dtor_callback"); 1673 CGF.EmitNounwindRuntimeCall(Fn, Args); 1674 } 1675 1676 class SanitizeDtorMembers final : public EHScopeStack::Cleanup { 1677 const CXXDestructorDecl *Dtor; 1678 1679 public: 1680 SanitizeDtorMembers(const CXXDestructorDecl *Dtor) : Dtor(Dtor) {} 1681 1682 // Generate function call for handling object poisoning. 1683 // Disables tail call elimination, to prevent the current stack frame 1684 // from disappearing from the stack trace. 1685 void Emit(CodeGenFunction &CGF, Flags flags) override { 1686 const ASTRecordLayout &Layout = 1687 CGF.getContext().getASTRecordLayout(Dtor->getParent()); 1688 1689 // Nothing to poison. 1690 if (Layout.getFieldCount() == 0) 1691 return; 1692 1693 // Prevent the current stack frame from disappearing from the stack trace. 1694 CGF.CurFn->addFnAttr("disable-tail-calls", "true"); 1695 1696 // Construct pointer to region to begin poisoning, and calculate poison 1697 // size, so that only members declared in this class are poisoned. 1698 ASTContext &Context = CGF.getContext(); 1699 1700 const RecordDecl *Decl = Dtor->getParent(); 1701 auto Fields = Decl->fields(); 1702 auto IsTrivial = [&](const FieldDecl *F) { 1703 return FieldHasTrivialDestructorBody(Context, F); 1704 }; 1705 1706 auto IsZeroSize = [&](const FieldDecl *F) { 1707 return F->isZeroSize(Context); 1708 }; 1709 1710 // Poison blocks of fields with trivial destructors making sure that block 1711 // begin and end do not point to zero-sized fields. They don't have 1712 // correct offsets so can't be used to calculate poisoning range. 1713 for (auto It = Fields.begin(); It != Fields.end();) { 1714 It = std::find_if(It, Fields.end(), [&](const FieldDecl *F) { 1715 return IsTrivial(F) && !IsZeroSize(F); 1716 }); 1717 if (It == Fields.end()) 1718 break; 1719 auto Start = It++; 1720 It = std::find_if(It, Fields.end(), [&](const FieldDecl *F) { 1721 return !IsTrivial(F) && !IsZeroSize(F); 1722 }); 1723 1724 PoisonMembers(CGF, (*Start)->getFieldIndex(), 1725 It == Fields.end() ? -1 : (*It)->getFieldIndex()); 1726 } 1727 } 1728 1729 private: 1730 /// \param layoutStartOffset index of the ASTRecordLayout field to 1731 /// start poisoning (inclusive) 1732 /// \param layoutEndOffset index of the ASTRecordLayout field to 1733 /// end poisoning (exclusive) 1734 void PoisonMembers(CodeGenFunction &CGF, unsigned layoutStartOffset, 1735 unsigned layoutEndOffset) { 1736 ASTContext &Context = CGF.getContext(); 1737 const ASTRecordLayout &Layout = 1738 Context.getASTRecordLayout(Dtor->getParent()); 1739 1740 // It's a first trivia field so it should be at the begining of char, 1741 // still round up start offset just in case. 1742 CharUnits PoisonStart = 1743 Context.toCharUnitsFromBits(Layout.getFieldOffset(layoutStartOffset) + 1744 Context.getCharWidth() - 1); 1745 llvm::ConstantInt *OffsetSizePtr = 1746 llvm::ConstantInt::get(CGF.SizeTy, PoisonStart.getQuantity()); 1747 1748 llvm::Value *OffsetPtr = CGF.Builder.CreateGEP( 1749 CGF.Int8Ty, 1750 CGF.Builder.CreateBitCast(CGF.LoadCXXThis(), CGF.Int8PtrTy), 1751 OffsetSizePtr); 1752 1753 CharUnits PoisonEnd; 1754 if (layoutEndOffset >= Layout.getFieldCount()) { 1755 PoisonEnd = Layout.getNonVirtualSize(); 1756 } else { 1757 PoisonEnd = 1758 Context.toCharUnitsFromBits(Layout.getFieldOffset(layoutEndOffset)); 1759 } 1760 CharUnits PoisonSize = PoisonEnd - PoisonStart; 1761 if (!PoisonSize.isPositive()) 1762 return; 1763 1764 EmitSanitizerDtorCallback(CGF, OffsetPtr, PoisonSize.getQuantity()); 1765 } 1766 }; 1767 1768 class SanitizeDtorVTable final : public EHScopeStack::Cleanup { 1769 const CXXDestructorDecl *Dtor; 1770 1771 public: 1772 SanitizeDtorVTable(const CXXDestructorDecl *Dtor) : Dtor(Dtor) {} 1773 1774 // Generate function call for handling vtable pointer poisoning. 1775 void Emit(CodeGenFunction &CGF, Flags flags) override { 1776 assert(Dtor->getParent()->isDynamicClass()); 1777 (void)Dtor; 1778 ASTContext &Context = CGF.getContext(); 1779 // Poison vtable and vtable ptr if they exist for this class. 1780 llvm::Value *VTablePtr = CGF.LoadCXXThis(); 1781 1782 CharUnits::QuantityType PoisonSize = 1783 Context.toCharUnitsFromBits(CGF.PointerWidthInBits).getQuantity(); 1784 // Pass in void pointer and size of region as arguments to runtime 1785 // function 1786 EmitSanitizerDtorCallback(CGF, VTablePtr, PoisonSize); 1787 } 1788 }; 1789 } // end anonymous namespace 1790 1791 /// Emit all code that comes at the end of class's 1792 /// destructor. This is to call destructors on members and base classes 1793 /// in reverse order of their construction. 1794 /// 1795 /// For a deleting destructor, this also handles the case where a destroying 1796 /// operator delete completely overrides the definition. 1797 void CodeGenFunction::EnterDtorCleanups(const CXXDestructorDecl *DD, 1798 CXXDtorType DtorType) { 1799 assert((!DD->isTrivial() || DD->hasAttr<DLLExportAttr>()) && 1800 "Should not emit dtor epilogue for non-exported trivial dtor!"); 1801 1802 // The deleting-destructor phase just needs to call the appropriate 1803 // operator delete that Sema picked up. 1804 if (DtorType == Dtor_Deleting) { 1805 assert(DD->getOperatorDelete() && 1806 "operator delete missing - EnterDtorCleanups"); 1807 if (CXXStructorImplicitParamValue) { 1808 // If there is an implicit param to the deleting dtor, it's a boolean 1809 // telling whether this is a deleting destructor. 1810 if (DD->getOperatorDelete()->isDestroyingOperatorDelete()) 1811 EmitConditionalDtorDeleteCall(*this, CXXStructorImplicitParamValue, 1812 /*ReturnAfterDelete*/true); 1813 else 1814 EHStack.pushCleanup<CallDtorDeleteConditional>( 1815 NormalAndEHCleanup, CXXStructorImplicitParamValue); 1816 } else { 1817 if (DD->getOperatorDelete()->isDestroyingOperatorDelete()) { 1818 const CXXRecordDecl *ClassDecl = DD->getParent(); 1819 EmitDeleteCall(DD->getOperatorDelete(), 1820 LoadThisForDtorDelete(*this, DD), 1821 getContext().getTagDeclType(ClassDecl)); 1822 EmitBranchThroughCleanup(ReturnBlock); 1823 } else { 1824 EHStack.pushCleanup<CallDtorDelete>(NormalAndEHCleanup); 1825 } 1826 } 1827 return; 1828 } 1829 1830 const CXXRecordDecl *ClassDecl = DD->getParent(); 1831 1832 // Unions have no bases and do not call field destructors. 1833 if (ClassDecl->isUnion()) 1834 return; 1835 1836 // The complete-destructor phase just destructs all the virtual bases. 1837 if (DtorType == Dtor_Complete) { 1838 // Poison the vtable pointer such that access after the base 1839 // and member destructors are invoked is invalid. 1840 if (CGM.getCodeGenOpts().SanitizeMemoryUseAfterDtor && 1841 SanOpts.has(SanitizerKind::Memory) && ClassDecl->getNumVBases() && 1842 ClassDecl->isPolymorphic()) 1843 EHStack.pushCleanup<SanitizeDtorVTable>(NormalAndEHCleanup, DD); 1844 1845 // We push them in the forward order so that they'll be popped in 1846 // the reverse order. 1847 for (const auto &Base : ClassDecl->vbases()) { 1848 auto *BaseClassDecl = 1849 cast<CXXRecordDecl>(Base.getType()->castAs<RecordType>()->getDecl()); 1850 1851 // Ignore trivial destructors. 1852 if (BaseClassDecl->hasTrivialDestructor()) 1853 continue; 1854 1855 EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup, 1856 BaseClassDecl, 1857 /*BaseIsVirtual*/ true); 1858 } 1859 1860 return; 1861 } 1862 1863 assert(DtorType == Dtor_Base); 1864 // Poison the vtable pointer if it has no virtual bases, but inherits 1865 // virtual functions. 1866 if (CGM.getCodeGenOpts().SanitizeMemoryUseAfterDtor && 1867 SanOpts.has(SanitizerKind::Memory) && !ClassDecl->getNumVBases() && 1868 ClassDecl->isPolymorphic()) 1869 EHStack.pushCleanup<SanitizeDtorVTable>(NormalAndEHCleanup, DD); 1870 1871 // Destroy non-virtual bases. 1872 for (const auto &Base : ClassDecl->bases()) { 1873 // Ignore virtual bases. 1874 if (Base.isVirtual()) 1875 continue; 1876 1877 CXXRecordDecl *BaseClassDecl = Base.getType()->getAsCXXRecordDecl(); 1878 1879 // Ignore trivial destructors. 1880 if (BaseClassDecl->hasTrivialDestructor()) 1881 continue; 1882 1883 EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup, 1884 BaseClassDecl, 1885 /*BaseIsVirtual*/ false); 1886 } 1887 1888 // Poison fields such that access after their destructors are 1889 // invoked, and before the base class destructor runs, is invalid. 1890 if (CGM.getCodeGenOpts().SanitizeMemoryUseAfterDtor && 1891 SanOpts.has(SanitizerKind::Memory)) 1892 EHStack.pushCleanup<SanitizeDtorMembers>(NormalAndEHCleanup, DD); 1893 1894 // Destroy direct fields. 1895 for (const auto *Field : ClassDecl->fields()) { 1896 QualType type = Field->getType(); 1897 QualType::DestructionKind dtorKind = type.isDestructedType(); 1898 if (!dtorKind) continue; 1899 1900 // Anonymous union members do not have their destructors called. 1901 const RecordType *RT = type->getAsUnionType(); 1902 if (RT && RT->getDecl()->isAnonymousStructOrUnion()) continue; 1903 1904 CleanupKind cleanupKind = getCleanupKind(dtorKind); 1905 EHStack.pushCleanup<DestroyField>(cleanupKind, Field, 1906 getDestroyer(dtorKind), 1907 cleanupKind & EHCleanup); 1908 } 1909 } 1910 1911 /// EmitCXXAggrConstructorCall - Emit a loop to call a particular 1912 /// constructor for each of several members of an array. 1913 /// 1914 /// \param ctor the constructor to call for each element 1915 /// \param arrayType the type of the array to initialize 1916 /// \param arrayBegin an arrayType* 1917 /// \param zeroInitialize true if each element should be 1918 /// zero-initialized before it is constructed 1919 void CodeGenFunction::EmitCXXAggrConstructorCall( 1920 const CXXConstructorDecl *ctor, const ArrayType *arrayType, 1921 Address arrayBegin, const CXXConstructExpr *E, bool NewPointerIsChecked, 1922 bool zeroInitialize) { 1923 QualType elementType; 1924 llvm::Value *numElements = 1925 emitArrayLength(arrayType, elementType, arrayBegin); 1926 1927 EmitCXXAggrConstructorCall(ctor, numElements, arrayBegin, E, 1928 NewPointerIsChecked, zeroInitialize); 1929 } 1930 1931 /// EmitCXXAggrConstructorCall - Emit a loop to call a particular 1932 /// constructor for each of several members of an array. 1933 /// 1934 /// \param ctor the constructor to call for each element 1935 /// \param numElements the number of elements in the array; 1936 /// may be zero 1937 /// \param arrayBase a T*, where T is the type constructed by ctor 1938 /// \param zeroInitialize true if each element should be 1939 /// zero-initialized before it is constructed 1940 void CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *ctor, 1941 llvm::Value *numElements, 1942 Address arrayBase, 1943 const CXXConstructExpr *E, 1944 bool NewPointerIsChecked, 1945 bool zeroInitialize) { 1946 // It's legal for numElements to be zero. This can happen both 1947 // dynamically, because x can be zero in 'new A[x]', and statically, 1948 // because of GCC extensions that permit zero-length arrays. There 1949 // are probably legitimate places where we could assume that this 1950 // doesn't happen, but it's not clear that it's worth it. 1951 llvm::BranchInst *zeroCheckBranch = nullptr; 1952 1953 // Optimize for a constant count. 1954 llvm::ConstantInt *constantCount 1955 = dyn_cast<llvm::ConstantInt>(numElements); 1956 if (constantCount) { 1957 // Just skip out if the constant count is zero. 1958 if (constantCount->isZero()) return; 1959 1960 // Otherwise, emit the check. 1961 } else { 1962 llvm::BasicBlock *loopBB = createBasicBlock("new.ctorloop"); 1963 llvm::Value *iszero = Builder.CreateIsNull(numElements, "isempty"); 1964 zeroCheckBranch = Builder.CreateCondBr(iszero, loopBB, loopBB); 1965 EmitBlock(loopBB); 1966 } 1967 1968 // Find the end of the array. 1969 llvm::Type *elementType = arrayBase.getElementType(); 1970 llvm::Value *arrayBegin = arrayBase.getPointer(); 1971 llvm::Value *arrayEnd = Builder.CreateInBoundsGEP( 1972 elementType, arrayBegin, numElements, "arrayctor.end"); 1973 1974 // Enter the loop, setting up a phi for the current location to initialize. 1975 llvm::BasicBlock *entryBB = Builder.GetInsertBlock(); 1976 llvm::BasicBlock *loopBB = createBasicBlock("arrayctor.loop"); 1977 EmitBlock(loopBB); 1978 llvm::PHINode *cur = Builder.CreatePHI(arrayBegin->getType(), 2, 1979 "arrayctor.cur"); 1980 cur->addIncoming(arrayBegin, entryBB); 1981 1982 // Inside the loop body, emit the constructor call on the array element. 1983 1984 // The alignment of the base, adjusted by the size of a single element, 1985 // provides a conservative estimate of the alignment of every element. 1986 // (This assumes we never start tracking offsetted alignments.) 1987 // 1988 // Note that these are complete objects and so we don't need to 1989 // use the non-virtual size or alignment. 1990 QualType type = getContext().getTypeDeclType(ctor->getParent()); 1991 CharUnits eltAlignment = 1992 arrayBase.getAlignment() 1993 .alignmentOfArrayElement(getContext().getTypeSizeInChars(type)); 1994 Address curAddr = Address(cur, eltAlignment); 1995 1996 // Zero initialize the storage, if requested. 1997 if (zeroInitialize) 1998 EmitNullInitialization(curAddr, type); 1999 2000 // C++ [class.temporary]p4: 2001 // There are two contexts in which temporaries are destroyed at a different 2002 // point than the end of the full-expression. The first context is when a 2003 // default constructor is called to initialize an element of an array. 2004 // If the constructor has one or more default arguments, the destruction of 2005 // every temporary created in a default argument expression is sequenced 2006 // before the construction of the next array element, if any. 2007 2008 { 2009 RunCleanupsScope Scope(*this); 2010 2011 // Evaluate the constructor and its arguments in a regular 2012 // partial-destroy cleanup. 2013 if (getLangOpts().Exceptions && 2014 !ctor->getParent()->hasTrivialDestructor()) { 2015 Destroyer *destroyer = destroyCXXObject; 2016 pushRegularPartialArrayCleanup(arrayBegin, cur, type, eltAlignment, 2017 *destroyer); 2018 } 2019 auto currAVS = AggValueSlot::forAddr( 2020 curAddr, type.getQualifiers(), AggValueSlot::IsDestructed, 2021 AggValueSlot::DoesNotNeedGCBarriers, AggValueSlot::IsNotAliased, 2022 AggValueSlot::DoesNotOverlap, AggValueSlot::IsNotZeroed, 2023 NewPointerIsChecked ? AggValueSlot::IsSanitizerChecked 2024 : AggValueSlot::IsNotSanitizerChecked); 2025 EmitCXXConstructorCall(ctor, Ctor_Complete, /*ForVirtualBase=*/false, 2026 /*Delegating=*/false, currAVS, E); 2027 } 2028 2029 // Go to the next element. 2030 llvm::Value *next = Builder.CreateInBoundsGEP( 2031 elementType, cur, llvm::ConstantInt::get(SizeTy, 1), "arrayctor.next"); 2032 cur->addIncoming(next, Builder.GetInsertBlock()); 2033 2034 // Check whether that's the end of the loop. 2035 llvm::Value *done = Builder.CreateICmpEQ(next, arrayEnd, "arrayctor.done"); 2036 llvm::BasicBlock *contBB = createBasicBlock("arrayctor.cont"); 2037 Builder.CreateCondBr(done, contBB, loopBB); 2038 2039 // Patch the earlier check to skip over the loop. 2040 if (zeroCheckBranch) zeroCheckBranch->setSuccessor(0, contBB); 2041 2042 EmitBlock(contBB); 2043 } 2044 2045 void CodeGenFunction::destroyCXXObject(CodeGenFunction &CGF, 2046 Address addr, 2047 QualType type) { 2048 const RecordType *rtype = type->castAs<RecordType>(); 2049 const CXXRecordDecl *record = cast<CXXRecordDecl>(rtype->getDecl()); 2050 const CXXDestructorDecl *dtor = record->getDestructor(); 2051 assert(!dtor->isTrivial()); 2052 CGF.EmitCXXDestructorCall(dtor, Dtor_Complete, /*for vbase*/ false, 2053 /*Delegating=*/false, addr, type); 2054 } 2055 2056 void CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D, 2057 CXXCtorType Type, 2058 bool ForVirtualBase, 2059 bool Delegating, 2060 AggValueSlot ThisAVS, 2061 const CXXConstructExpr *E) { 2062 CallArgList Args; 2063 Address This = ThisAVS.getAddress(); 2064 LangAS SlotAS = ThisAVS.getQualifiers().getAddressSpace(); 2065 QualType ThisType = D->getThisType(); 2066 LangAS ThisAS = ThisType.getTypePtr()->getPointeeType().getAddressSpace(); 2067 llvm::Value *ThisPtr = This.getPointer(); 2068 2069 if (SlotAS != ThisAS) { 2070 unsigned TargetThisAS = getContext().getTargetAddressSpace(ThisAS); 2071 llvm::Type *NewType = 2072 ThisPtr->getType()->getPointerElementType()->getPointerTo(TargetThisAS); 2073 ThisPtr = getTargetHooks().performAddrSpaceCast(*this, This.getPointer(), 2074 ThisAS, SlotAS, NewType); 2075 } 2076 2077 // Push the this ptr. 2078 Args.add(RValue::get(ThisPtr), D->getThisType()); 2079 2080 // If this is a trivial constructor, emit a memcpy now before we lose 2081 // the alignment information on the argument. 2082 // FIXME: It would be better to preserve alignment information into CallArg. 2083 if (isMemcpyEquivalentSpecialMember(D)) { 2084 assert(E->getNumArgs() == 1 && "unexpected argcount for trivial ctor"); 2085 2086 const Expr *Arg = E->getArg(0); 2087 LValue Src = EmitLValue(Arg); 2088 QualType DestTy = getContext().getTypeDeclType(D->getParent()); 2089 LValue Dest = MakeAddrLValue(This, DestTy); 2090 EmitAggregateCopyCtor(Dest, Src, ThisAVS.mayOverlap()); 2091 return; 2092 } 2093 2094 // Add the rest of the user-supplied arguments. 2095 const FunctionProtoType *FPT = D->getType()->castAs<FunctionProtoType>(); 2096 EvaluationOrder Order = E->isListInitialization() 2097 ? EvaluationOrder::ForceLeftToRight 2098 : EvaluationOrder::Default; 2099 EmitCallArgs(Args, FPT, E->arguments(), E->getConstructor(), 2100 /*ParamsToSkip*/ 0, Order); 2101 2102 EmitCXXConstructorCall(D, Type, ForVirtualBase, Delegating, This, Args, 2103 ThisAVS.mayOverlap(), E->getExprLoc(), 2104 ThisAVS.isSanitizerChecked()); 2105 } 2106 2107 static bool canEmitDelegateCallArgs(CodeGenFunction &CGF, 2108 const CXXConstructorDecl *Ctor, 2109 CXXCtorType Type, CallArgList &Args) { 2110 // We can't forward a variadic call. 2111 if (Ctor->isVariadic()) 2112 return false; 2113 2114 if (CGF.getTarget().getCXXABI().areArgsDestroyedLeftToRightInCallee()) { 2115 // If the parameters are callee-cleanup, it's not safe to forward. 2116 for (auto *P : Ctor->parameters()) 2117 if (P->needsDestruction(CGF.getContext())) 2118 return false; 2119 2120 // Likewise if they're inalloca. 2121 const CGFunctionInfo &Info = 2122 CGF.CGM.getTypes().arrangeCXXConstructorCall(Args, Ctor, Type, 0, 0); 2123 if (Info.usesInAlloca()) 2124 return false; 2125 } 2126 2127 // Anything else should be OK. 2128 return true; 2129 } 2130 2131 void CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D, 2132 CXXCtorType Type, 2133 bool ForVirtualBase, 2134 bool Delegating, 2135 Address This, 2136 CallArgList &Args, 2137 AggValueSlot::Overlap_t Overlap, 2138 SourceLocation Loc, 2139 bool NewPointerIsChecked) { 2140 const CXXRecordDecl *ClassDecl = D->getParent(); 2141 2142 if (!NewPointerIsChecked) 2143 EmitTypeCheck(CodeGenFunction::TCK_ConstructorCall, Loc, This.getPointer(), 2144 getContext().getRecordType(ClassDecl), CharUnits::Zero()); 2145 2146 if (D->isTrivial() && D->isDefaultConstructor()) { 2147 assert(Args.size() == 1 && "trivial default ctor with args"); 2148 return; 2149 } 2150 2151 // If this is a trivial constructor, just emit what's needed. If this is a 2152 // union copy constructor, we must emit a memcpy, because the AST does not 2153 // model that copy. 2154 if (isMemcpyEquivalentSpecialMember(D)) { 2155 assert(Args.size() == 2 && "unexpected argcount for trivial ctor"); 2156 2157 QualType SrcTy = D->getParamDecl(0)->getType().getNonReferenceType(); 2158 Address Src(Args[1].getRValue(*this).getScalarVal(), 2159 CGM.getNaturalTypeAlignment(SrcTy)); 2160 LValue SrcLVal = MakeAddrLValue(Src, SrcTy); 2161 QualType DestTy = getContext().getTypeDeclType(ClassDecl); 2162 LValue DestLVal = MakeAddrLValue(This, DestTy); 2163 EmitAggregateCopyCtor(DestLVal, SrcLVal, Overlap); 2164 return; 2165 } 2166 2167 bool PassPrototypeArgs = true; 2168 // Check whether we can actually emit the constructor before trying to do so. 2169 if (auto Inherited = D->getInheritedConstructor()) { 2170 PassPrototypeArgs = getTypes().inheritingCtorHasParams(Inherited, Type); 2171 if (PassPrototypeArgs && !canEmitDelegateCallArgs(*this, D, Type, Args)) { 2172 EmitInlinedInheritingCXXConstructorCall(D, Type, ForVirtualBase, 2173 Delegating, Args); 2174 return; 2175 } 2176 } 2177 2178 // Insert any ABI-specific implicit constructor arguments. 2179 CGCXXABI::AddedStructorArgCounts ExtraArgs = 2180 CGM.getCXXABI().addImplicitConstructorArgs(*this, D, Type, ForVirtualBase, 2181 Delegating, Args); 2182 2183 // Emit the call. 2184 llvm::Constant *CalleePtr = CGM.getAddrOfCXXStructor(GlobalDecl(D, Type)); 2185 const CGFunctionInfo &Info = CGM.getTypes().arrangeCXXConstructorCall( 2186 Args, D, Type, ExtraArgs.Prefix, ExtraArgs.Suffix, PassPrototypeArgs); 2187 CGCallee Callee = CGCallee::forDirect(CalleePtr, GlobalDecl(D, Type)); 2188 EmitCall(Info, Callee, ReturnValueSlot(), Args, nullptr, false, Loc); 2189 2190 // Generate vtable assumptions if we're constructing a complete object 2191 // with a vtable. We don't do this for base subobjects for two reasons: 2192 // first, it's incorrect for classes with virtual bases, and second, we're 2193 // about to overwrite the vptrs anyway. 2194 // We also have to make sure if we can refer to vtable: 2195 // - Otherwise we can refer to vtable if it's safe to speculatively emit. 2196 // FIXME: If vtable is used by ctor/dtor, or if vtable is external and we are 2197 // sure that definition of vtable is not hidden, 2198 // then we are always safe to refer to it. 2199 // FIXME: It looks like InstCombine is very inefficient on dealing with 2200 // assumes. Make assumption loads require -fstrict-vtable-pointers temporarily. 2201 if (CGM.getCodeGenOpts().OptimizationLevel > 0 && 2202 ClassDecl->isDynamicClass() && Type != Ctor_Base && 2203 CGM.getCXXABI().canSpeculativelyEmitVTable(ClassDecl) && 2204 CGM.getCodeGenOpts().StrictVTablePointers) 2205 EmitVTableAssumptionLoads(ClassDecl, This); 2206 } 2207 2208 void CodeGenFunction::EmitInheritedCXXConstructorCall( 2209 const CXXConstructorDecl *D, bool ForVirtualBase, Address This, 2210 bool InheritedFromVBase, const CXXInheritedCtorInitExpr *E) { 2211 CallArgList Args; 2212 CallArg ThisArg(RValue::get(This.getPointer()), D->getThisType()); 2213 2214 // Forward the parameters. 2215 if (InheritedFromVBase && 2216 CGM.getTarget().getCXXABI().hasConstructorVariants()) { 2217 // Nothing to do; this construction is not responsible for constructing 2218 // the base class containing the inherited constructor. 2219 // FIXME: Can we just pass undef's for the remaining arguments if we don't 2220 // have constructor variants? 2221 Args.push_back(ThisArg); 2222 } else if (!CXXInheritedCtorInitExprArgs.empty()) { 2223 // The inheriting constructor was inlined; just inject its arguments. 2224 assert(CXXInheritedCtorInitExprArgs.size() >= D->getNumParams() && 2225 "wrong number of parameters for inherited constructor call"); 2226 Args = CXXInheritedCtorInitExprArgs; 2227 Args[0] = ThisArg; 2228 } else { 2229 // The inheriting constructor was not inlined. Emit delegating arguments. 2230 Args.push_back(ThisArg); 2231 const auto *OuterCtor = cast<CXXConstructorDecl>(CurCodeDecl); 2232 assert(OuterCtor->getNumParams() == D->getNumParams()); 2233 assert(!OuterCtor->isVariadic() && "should have been inlined"); 2234 2235 for (const auto *Param : OuterCtor->parameters()) { 2236 assert(getContext().hasSameUnqualifiedType( 2237 OuterCtor->getParamDecl(Param->getFunctionScopeIndex())->getType(), 2238 Param->getType())); 2239 EmitDelegateCallArg(Args, Param, E->getLocation()); 2240 2241 // Forward __attribute__(pass_object_size). 2242 if (Param->hasAttr<PassObjectSizeAttr>()) { 2243 auto *POSParam = SizeArguments[Param]; 2244 assert(POSParam && "missing pass_object_size value for forwarding"); 2245 EmitDelegateCallArg(Args, POSParam, E->getLocation()); 2246 } 2247 } 2248 } 2249 2250 EmitCXXConstructorCall(D, Ctor_Base, ForVirtualBase, /*Delegating*/false, 2251 This, Args, AggValueSlot::MayOverlap, 2252 E->getLocation(), /*NewPointerIsChecked*/true); 2253 } 2254 2255 void CodeGenFunction::EmitInlinedInheritingCXXConstructorCall( 2256 const CXXConstructorDecl *Ctor, CXXCtorType CtorType, bool ForVirtualBase, 2257 bool Delegating, CallArgList &Args) { 2258 GlobalDecl GD(Ctor, CtorType); 2259 InlinedInheritingConstructorScope Scope(*this, GD); 2260 ApplyInlineDebugLocation DebugScope(*this, GD); 2261 RunCleanupsScope RunCleanups(*this); 2262 2263 // Save the arguments to be passed to the inherited constructor. 2264 CXXInheritedCtorInitExprArgs = Args; 2265 2266 FunctionArgList Params; 2267 QualType RetType = BuildFunctionArgList(CurGD, Params); 2268 FnRetTy = RetType; 2269 2270 // Insert any ABI-specific implicit constructor arguments. 2271 CGM.getCXXABI().addImplicitConstructorArgs(*this, Ctor, CtorType, 2272 ForVirtualBase, Delegating, Args); 2273 2274 // Emit a simplified prolog. We only need to emit the implicit params. 2275 assert(Args.size() >= Params.size() && "too few arguments for call"); 2276 for (unsigned I = 0, N = Args.size(); I != N; ++I) { 2277 if (I < Params.size() && isa<ImplicitParamDecl>(Params[I])) { 2278 const RValue &RV = Args[I].getRValue(*this); 2279 assert(!RV.isComplex() && "complex indirect params not supported"); 2280 ParamValue Val = RV.isScalar() 2281 ? ParamValue::forDirect(RV.getScalarVal()) 2282 : ParamValue::forIndirect(RV.getAggregateAddress()); 2283 EmitParmDecl(*Params[I], Val, I + 1); 2284 } 2285 } 2286 2287 // Create a return value slot if the ABI implementation wants one. 2288 // FIXME: This is dumb, we should ask the ABI not to try to set the return 2289 // value instead. 2290 if (!RetType->isVoidType()) 2291 ReturnValue = CreateIRTemp(RetType, "retval.inhctor"); 2292 2293 CGM.getCXXABI().EmitInstanceFunctionProlog(*this); 2294 CXXThisValue = CXXABIThisValue; 2295 2296 // Directly emit the constructor initializers. 2297 EmitCtorPrologue(Ctor, CtorType, Params); 2298 } 2299 2300 void CodeGenFunction::EmitVTableAssumptionLoad(const VPtr &Vptr, Address This) { 2301 llvm::Value *VTableGlobal = 2302 CGM.getCXXABI().getVTableAddressPoint(Vptr.Base, Vptr.VTableClass); 2303 if (!VTableGlobal) 2304 return; 2305 2306 // We can just use the base offset in the complete class. 2307 CharUnits NonVirtualOffset = Vptr.Base.getBaseOffset(); 2308 2309 if (!NonVirtualOffset.isZero()) 2310 This = 2311 ApplyNonVirtualAndVirtualOffset(*this, This, NonVirtualOffset, nullptr, 2312 Vptr.VTableClass, Vptr.NearestVBase); 2313 2314 llvm::Value *VPtrValue = 2315 GetVTablePtr(This, VTableGlobal->getType(), Vptr.VTableClass); 2316 llvm::Value *Cmp = 2317 Builder.CreateICmpEQ(VPtrValue, VTableGlobal, "cmp.vtables"); 2318 Builder.CreateAssumption(Cmp); 2319 } 2320 2321 void CodeGenFunction::EmitVTableAssumptionLoads(const CXXRecordDecl *ClassDecl, 2322 Address This) { 2323 if (CGM.getCXXABI().doStructorsInitializeVPtrs(ClassDecl)) 2324 for (const VPtr &Vptr : getVTablePointers(ClassDecl)) 2325 EmitVTableAssumptionLoad(Vptr, This); 2326 } 2327 2328 void 2329 CodeGenFunction::EmitSynthesizedCXXCopyCtorCall(const CXXConstructorDecl *D, 2330 Address This, Address Src, 2331 const CXXConstructExpr *E) { 2332 const FunctionProtoType *FPT = D->getType()->castAs<FunctionProtoType>(); 2333 2334 CallArgList Args; 2335 2336 // Push the this ptr. 2337 Args.add(RValue::get(This.getPointer()), D->getThisType()); 2338 2339 // Push the src ptr. 2340 QualType QT = *(FPT->param_type_begin()); 2341 llvm::Type *t = CGM.getTypes().ConvertType(QT); 2342 Src = Builder.CreateBitCast(Src, t); 2343 Args.add(RValue::get(Src.getPointer()), QT); 2344 2345 // Skip over first argument (Src). 2346 EmitCallArgs(Args, FPT, drop_begin(E->arguments(), 1), E->getConstructor(), 2347 /*ParamsToSkip*/ 1); 2348 2349 EmitCXXConstructorCall(D, Ctor_Complete, /*ForVirtualBase*/false, 2350 /*Delegating*/false, This, Args, 2351 AggValueSlot::MayOverlap, E->getExprLoc(), 2352 /*NewPointerIsChecked*/false); 2353 } 2354 2355 void 2356 CodeGenFunction::EmitDelegateCXXConstructorCall(const CXXConstructorDecl *Ctor, 2357 CXXCtorType CtorType, 2358 const FunctionArgList &Args, 2359 SourceLocation Loc) { 2360 CallArgList DelegateArgs; 2361 2362 FunctionArgList::const_iterator I = Args.begin(), E = Args.end(); 2363 assert(I != E && "no parameters to constructor"); 2364 2365 // this 2366 Address This = LoadCXXThisAddress(); 2367 DelegateArgs.add(RValue::get(This.getPointer()), (*I)->getType()); 2368 ++I; 2369 2370 // FIXME: The location of the VTT parameter in the parameter list is 2371 // specific to the Itanium ABI and shouldn't be hardcoded here. 2372 if (CGM.getCXXABI().NeedsVTTParameter(CurGD)) { 2373 assert(I != E && "cannot skip vtt parameter, already done with args"); 2374 assert((*I)->getType()->isPointerType() && 2375 "skipping parameter not of vtt type"); 2376 ++I; 2377 } 2378 2379 // Explicit arguments. 2380 for (; I != E; ++I) { 2381 const VarDecl *param = *I; 2382 // FIXME: per-argument source location 2383 EmitDelegateCallArg(DelegateArgs, param, Loc); 2384 } 2385 2386 EmitCXXConstructorCall(Ctor, CtorType, /*ForVirtualBase=*/false, 2387 /*Delegating=*/true, This, DelegateArgs, 2388 AggValueSlot::MayOverlap, Loc, 2389 /*NewPointerIsChecked=*/true); 2390 } 2391 2392 namespace { 2393 struct CallDelegatingCtorDtor final : EHScopeStack::Cleanup { 2394 const CXXDestructorDecl *Dtor; 2395 Address Addr; 2396 CXXDtorType Type; 2397 2398 CallDelegatingCtorDtor(const CXXDestructorDecl *D, Address Addr, 2399 CXXDtorType Type) 2400 : Dtor(D), Addr(Addr), Type(Type) {} 2401 2402 void Emit(CodeGenFunction &CGF, Flags flags) override { 2403 // We are calling the destructor from within the constructor. 2404 // Therefore, "this" should have the expected type. 2405 QualType ThisTy = Dtor->getThisObjectType(); 2406 CGF.EmitCXXDestructorCall(Dtor, Type, /*ForVirtualBase=*/false, 2407 /*Delegating=*/true, Addr, ThisTy); 2408 } 2409 }; 2410 } // end anonymous namespace 2411 2412 void 2413 CodeGenFunction::EmitDelegatingCXXConstructorCall(const CXXConstructorDecl *Ctor, 2414 const FunctionArgList &Args) { 2415 assert(Ctor->isDelegatingConstructor()); 2416 2417 Address ThisPtr = LoadCXXThisAddress(); 2418 2419 AggValueSlot AggSlot = 2420 AggValueSlot::forAddr(ThisPtr, Qualifiers(), 2421 AggValueSlot::IsDestructed, 2422 AggValueSlot::DoesNotNeedGCBarriers, 2423 AggValueSlot::IsNotAliased, 2424 AggValueSlot::MayOverlap, 2425 AggValueSlot::IsNotZeroed, 2426 // Checks are made by the code that calls constructor. 2427 AggValueSlot::IsSanitizerChecked); 2428 2429 EmitAggExpr(Ctor->init_begin()[0]->getInit(), AggSlot); 2430 2431 const CXXRecordDecl *ClassDecl = Ctor->getParent(); 2432 if (CGM.getLangOpts().Exceptions && !ClassDecl->hasTrivialDestructor()) { 2433 CXXDtorType Type = 2434 CurGD.getCtorType() == Ctor_Complete ? Dtor_Complete : Dtor_Base; 2435 2436 EHStack.pushCleanup<CallDelegatingCtorDtor>(EHCleanup, 2437 ClassDecl->getDestructor(), 2438 ThisPtr, Type); 2439 } 2440 } 2441 2442 void CodeGenFunction::EmitCXXDestructorCall(const CXXDestructorDecl *DD, 2443 CXXDtorType Type, 2444 bool ForVirtualBase, 2445 bool Delegating, Address This, 2446 QualType ThisTy) { 2447 CGM.getCXXABI().EmitDestructorCall(*this, DD, Type, ForVirtualBase, 2448 Delegating, This, ThisTy); 2449 } 2450 2451 namespace { 2452 struct CallLocalDtor final : EHScopeStack::Cleanup { 2453 const CXXDestructorDecl *Dtor; 2454 Address Addr; 2455 QualType Ty; 2456 2457 CallLocalDtor(const CXXDestructorDecl *D, Address Addr, QualType Ty) 2458 : Dtor(D), Addr(Addr), Ty(Ty) {} 2459 2460 void Emit(CodeGenFunction &CGF, Flags flags) override { 2461 CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete, 2462 /*ForVirtualBase=*/false, 2463 /*Delegating=*/false, Addr, Ty); 2464 } 2465 }; 2466 } // end anonymous namespace 2467 2468 void CodeGenFunction::PushDestructorCleanup(const CXXDestructorDecl *D, 2469 QualType T, Address Addr) { 2470 EHStack.pushCleanup<CallLocalDtor>(NormalAndEHCleanup, D, Addr, T); 2471 } 2472 2473 void CodeGenFunction::PushDestructorCleanup(QualType T, Address Addr) { 2474 CXXRecordDecl *ClassDecl = T->getAsCXXRecordDecl(); 2475 if (!ClassDecl) return; 2476 if (ClassDecl->hasTrivialDestructor()) return; 2477 2478 const CXXDestructorDecl *D = ClassDecl->getDestructor(); 2479 assert(D && D->isUsed() && "destructor not marked as used!"); 2480 PushDestructorCleanup(D, T, Addr); 2481 } 2482 2483 void CodeGenFunction::InitializeVTablePointer(const VPtr &Vptr) { 2484 // Compute the address point. 2485 llvm::Value *VTableAddressPoint = 2486 CGM.getCXXABI().getVTableAddressPointInStructor( 2487 *this, Vptr.VTableClass, Vptr.Base, Vptr.NearestVBase); 2488 2489 if (!VTableAddressPoint) 2490 return; 2491 2492 // Compute where to store the address point. 2493 llvm::Value *VirtualOffset = nullptr; 2494 CharUnits NonVirtualOffset = CharUnits::Zero(); 2495 2496 if (CGM.getCXXABI().isVirtualOffsetNeededForVTableField(*this, Vptr)) { 2497 // We need to use the virtual base offset offset because the virtual base 2498 // might have a different offset in the most derived class. 2499 2500 VirtualOffset = CGM.getCXXABI().GetVirtualBaseClassOffset( 2501 *this, LoadCXXThisAddress(), Vptr.VTableClass, Vptr.NearestVBase); 2502 NonVirtualOffset = Vptr.OffsetFromNearestVBase; 2503 } else { 2504 // We can just use the base offset in the complete class. 2505 NonVirtualOffset = Vptr.Base.getBaseOffset(); 2506 } 2507 2508 // Apply the offsets. 2509 Address VTableField = LoadCXXThisAddress(); 2510 unsigned ThisAddrSpace = 2511 VTableField.getPointer()->getType()->getPointerAddressSpace(); 2512 2513 if (!NonVirtualOffset.isZero() || VirtualOffset) 2514 VTableField = ApplyNonVirtualAndVirtualOffset( 2515 *this, VTableField, NonVirtualOffset, VirtualOffset, Vptr.VTableClass, 2516 Vptr.NearestVBase); 2517 2518 // Finally, store the address point. Use the same LLVM types as the field to 2519 // support optimization. 2520 unsigned GlobalsAS = CGM.getDataLayout().getDefaultGlobalsAddressSpace(); 2521 unsigned ProgAS = CGM.getDataLayout().getProgramAddressSpace(); 2522 llvm::Type *VTablePtrTy = 2523 llvm::FunctionType::get(CGM.Int32Ty, /*isVarArg=*/true) 2524 ->getPointerTo(ProgAS) 2525 ->getPointerTo(GlobalsAS); 2526 // vtable field is is derived from `this` pointer, therefore they should be in 2527 // the same addr space. Note that this might not be LLVM address space 0. 2528 VTableField = Builder.CreateBitCast(VTableField, 2529 VTablePtrTy->getPointerTo(ThisAddrSpace)); 2530 VTableAddressPoint = Builder.CreateBitCast(VTableAddressPoint, VTablePtrTy); 2531 2532 llvm::StoreInst *Store = Builder.CreateStore(VTableAddressPoint, VTableField); 2533 TBAAAccessInfo TBAAInfo = CGM.getTBAAVTablePtrAccessInfo(VTablePtrTy); 2534 CGM.DecorateInstructionWithTBAA(Store, TBAAInfo); 2535 if (CGM.getCodeGenOpts().OptimizationLevel > 0 && 2536 CGM.getCodeGenOpts().StrictVTablePointers) 2537 CGM.DecorateInstructionWithInvariantGroup(Store, Vptr.VTableClass); 2538 } 2539 2540 CodeGenFunction::VPtrsVector 2541 CodeGenFunction::getVTablePointers(const CXXRecordDecl *VTableClass) { 2542 CodeGenFunction::VPtrsVector VPtrsResult; 2543 VisitedVirtualBasesSetTy VBases; 2544 getVTablePointers(BaseSubobject(VTableClass, CharUnits::Zero()), 2545 /*NearestVBase=*/nullptr, 2546 /*OffsetFromNearestVBase=*/CharUnits::Zero(), 2547 /*BaseIsNonVirtualPrimaryBase=*/false, VTableClass, VBases, 2548 VPtrsResult); 2549 return VPtrsResult; 2550 } 2551 2552 void CodeGenFunction::getVTablePointers(BaseSubobject Base, 2553 const CXXRecordDecl *NearestVBase, 2554 CharUnits OffsetFromNearestVBase, 2555 bool BaseIsNonVirtualPrimaryBase, 2556 const CXXRecordDecl *VTableClass, 2557 VisitedVirtualBasesSetTy &VBases, 2558 VPtrsVector &Vptrs) { 2559 // If this base is a non-virtual primary base the address point has already 2560 // been set. 2561 if (!BaseIsNonVirtualPrimaryBase) { 2562 // Initialize the vtable pointer for this base. 2563 VPtr Vptr = {Base, NearestVBase, OffsetFromNearestVBase, VTableClass}; 2564 Vptrs.push_back(Vptr); 2565 } 2566 2567 const CXXRecordDecl *RD = Base.getBase(); 2568 2569 // Traverse bases. 2570 for (const auto &I : RD->bases()) { 2571 auto *BaseDecl = 2572 cast<CXXRecordDecl>(I.getType()->castAs<RecordType>()->getDecl()); 2573 2574 // Ignore classes without a vtable. 2575 if (!BaseDecl->isDynamicClass()) 2576 continue; 2577 2578 CharUnits BaseOffset; 2579 CharUnits BaseOffsetFromNearestVBase; 2580 bool BaseDeclIsNonVirtualPrimaryBase; 2581 2582 if (I.isVirtual()) { 2583 // Check if we've visited this virtual base before. 2584 if (!VBases.insert(BaseDecl).second) 2585 continue; 2586 2587 const ASTRecordLayout &Layout = 2588 getContext().getASTRecordLayout(VTableClass); 2589 2590 BaseOffset = Layout.getVBaseClassOffset(BaseDecl); 2591 BaseOffsetFromNearestVBase = CharUnits::Zero(); 2592 BaseDeclIsNonVirtualPrimaryBase = false; 2593 } else { 2594 const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD); 2595 2596 BaseOffset = Base.getBaseOffset() + Layout.getBaseClassOffset(BaseDecl); 2597 BaseOffsetFromNearestVBase = 2598 OffsetFromNearestVBase + Layout.getBaseClassOffset(BaseDecl); 2599 BaseDeclIsNonVirtualPrimaryBase = Layout.getPrimaryBase() == BaseDecl; 2600 } 2601 2602 getVTablePointers( 2603 BaseSubobject(BaseDecl, BaseOffset), 2604 I.isVirtual() ? BaseDecl : NearestVBase, BaseOffsetFromNearestVBase, 2605 BaseDeclIsNonVirtualPrimaryBase, VTableClass, VBases, Vptrs); 2606 } 2607 } 2608 2609 void CodeGenFunction::InitializeVTablePointers(const CXXRecordDecl *RD) { 2610 // Ignore classes without a vtable. 2611 if (!RD->isDynamicClass()) 2612 return; 2613 2614 // Initialize the vtable pointers for this class and all of its bases. 2615 if (CGM.getCXXABI().doStructorsInitializeVPtrs(RD)) 2616 for (const VPtr &Vptr : getVTablePointers(RD)) 2617 InitializeVTablePointer(Vptr); 2618 2619 if (RD->getNumVBases()) 2620 CGM.getCXXABI().initializeHiddenVirtualInheritanceMembers(*this, RD); 2621 } 2622 2623 llvm::Value *CodeGenFunction::GetVTablePtr(Address This, 2624 llvm::Type *VTableTy, 2625 const CXXRecordDecl *RD) { 2626 Address VTablePtrSrc = Builder.CreateElementBitCast(This, VTableTy); 2627 llvm::Instruction *VTable = Builder.CreateLoad(VTablePtrSrc, "vtable"); 2628 TBAAAccessInfo TBAAInfo = CGM.getTBAAVTablePtrAccessInfo(VTableTy); 2629 CGM.DecorateInstructionWithTBAA(VTable, TBAAInfo); 2630 2631 if (CGM.getCodeGenOpts().OptimizationLevel > 0 && 2632 CGM.getCodeGenOpts().StrictVTablePointers) 2633 CGM.DecorateInstructionWithInvariantGroup(VTable, RD); 2634 2635 return VTable; 2636 } 2637 2638 // If a class has a single non-virtual base and does not introduce or override 2639 // virtual member functions or fields, it will have the same layout as its base. 2640 // This function returns the least derived such class. 2641 // 2642 // Casting an instance of a base class to such a derived class is technically 2643 // undefined behavior, but it is a relatively common hack for introducing member 2644 // functions on class instances with specific properties (e.g. llvm::Operator) 2645 // that works under most compilers and should not have security implications, so 2646 // we allow it by default. It can be disabled with -fsanitize=cfi-cast-strict. 2647 static const CXXRecordDecl * 2648 LeastDerivedClassWithSameLayout(const CXXRecordDecl *RD) { 2649 if (!RD->field_empty()) 2650 return RD; 2651 2652 if (RD->getNumVBases() != 0) 2653 return RD; 2654 2655 if (RD->getNumBases() != 1) 2656 return RD; 2657 2658 for (const CXXMethodDecl *MD : RD->methods()) { 2659 if (MD->isVirtual()) { 2660 // Virtual member functions are only ok if they are implicit destructors 2661 // because the implicit destructor will have the same semantics as the 2662 // base class's destructor if no fields are added. 2663 if (isa<CXXDestructorDecl>(MD) && MD->isImplicit()) 2664 continue; 2665 return RD; 2666 } 2667 } 2668 2669 return LeastDerivedClassWithSameLayout( 2670 RD->bases_begin()->getType()->getAsCXXRecordDecl()); 2671 } 2672 2673 void CodeGenFunction::EmitTypeMetadataCodeForVCall(const CXXRecordDecl *RD, 2674 llvm::Value *VTable, 2675 SourceLocation Loc) { 2676 if (SanOpts.has(SanitizerKind::CFIVCall)) 2677 EmitVTablePtrCheckForCall(RD, VTable, CodeGenFunction::CFITCK_VCall, Loc); 2678 else if (CGM.getCodeGenOpts().WholeProgramVTables && 2679 // Don't insert type test assumes if we are forcing public std 2680 // visibility. 2681 !CGM.HasLTOVisibilityPublicStd(RD)) { 2682 llvm::Metadata *MD = 2683 CGM.CreateMetadataIdentifierForType(QualType(RD->getTypeForDecl(), 0)); 2684 llvm::Value *TypeId = 2685 llvm::MetadataAsValue::get(CGM.getLLVMContext(), MD); 2686 2687 llvm::Value *CastedVTable = Builder.CreateBitCast(VTable, Int8PtrTy); 2688 llvm::Value *TypeTest = 2689 Builder.CreateCall(CGM.getIntrinsic(llvm::Intrinsic::type_test), 2690 {CastedVTable, TypeId}); 2691 Builder.CreateCall(CGM.getIntrinsic(llvm::Intrinsic::assume), TypeTest); 2692 } 2693 } 2694 2695 void CodeGenFunction::EmitVTablePtrCheckForCall(const CXXRecordDecl *RD, 2696 llvm::Value *VTable, 2697 CFITypeCheckKind TCK, 2698 SourceLocation Loc) { 2699 if (!SanOpts.has(SanitizerKind::CFICastStrict)) 2700 RD = LeastDerivedClassWithSameLayout(RD); 2701 2702 EmitVTablePtrCheck(RD, VTable, TCK, Loc); 2703 } 2704 2705 void CodeGenFunction::EmitVTablePtrCheckForCast(QualType T, 2706 llvm::Value *Derived, 2707 bool MayBeNull, 2708 CFITypeCheckKind TCK, 2709 SourceLocation Loc) { 2710 if (!getLangOpts().CPlusPlus) 2711 return; 2712 2713 auto *ClassTy = T->getAs<RecordType>(); 2714 if (!ClassTy) 2715 return; 2716 2717 const CXXRecordDecl *ClassDecl = cast<CXXRecordDecl>(ClassTy->getDecl()); 2718 2719 if (!ClassDecl->isCompleteDefinition() || !ClassDecl->isDynamicClass()) 2720 return; 2721 2722 if (!SanOpts.has(SanitizerKind::CFICastStrict)) 2723 ClassDecl = LeastDerivedClassWithSameLayout(ClassDecl); 2724 2725 llvm::BasicBlock *ContBlock = nullptr; 2726 2727 if (MayBeNull) { 2728 llvm::Value *DerivedNotNull = 2729 Builder.CreateIsNotNull(Derived, "cast.nonnull"); 2730 2731 llvm::BasicBlock *CheckBlock = createBasicBlock("cast.check"); 2732 ContBlock = createBasicBlock("cast.cont"); 2733 2734 Builder.CreateCondBr(DerivedNotNull, CheckBlock, ContBlock); 2735 2736 EmitBlock(CheckBlock); 2737 } 2738 2739 llvm::Value *VTable; 2740 std::tie(VTable, ClassDecl) = CGM.getCXXABI().LoadVTablePtr( 2741 *this, Address(Derived, getPointerAlign()), ClassDecl); 2742 2743 EmitVTablePtrCheck(ClassDecl, VTable, TCK, Loc); 2744 2745 if (MayBeNull) { 2746 Builder.CreateBr(ContBlock); 2747 EmitBlock(ContBlock); 2748 } 2749 } 2750 2751 void CodeGenFunction::EmitVTablePtrCheck(const CXXRecordDecl *RD, 2752 llvm::Value *VTable, 2753 CFITypeCheckKind TCK, 2754 SourceLocation Loc) { 2755 if (!CGM.getCodeGenOpts().SanitizeCfiCrossDso && 2756 !CGM.HasHiddenLTOVisibility(RD)) 2757 return; 2758 2759 SanitizerMask M; 2760 llvm::SanitizerStatKind SSK; 2761 switch (TCK) { 2762 case CFITCK_VCall: 2763 M = SanitizerKind::CFIVCall; 2764 SSK = llvm::SanStat_CFI_VCall; 2765 break; 2766 case CFITCK_NVCall: 2767 M = SanitizerKind::CFINVCall; 2768 SSK = llvm::SanStat_CFI_NVCall; 2769 break; 2770 case CFITCK_DerivedCast: 2771 M = SanitizerKind::CFIDerivedCast; 2772 SSK = llvm::SanStat_CFI_DerivedCast; 2773 break; 2774 case CFITCK_UnrelatedCast: 2775 M = SanitizerKind::CFIUnrelatedCast; 2776 SSK = llvm::SanStat_CFI_UnrelatedCast; 2777 break; 2778 case CFITCK_ICall: 2779 case CFITCK_NVMFCall: 2780 case CFITCK_VMFCall: 2781 llvm_unreachable("unexpected sanitizer kind"); 2782 } 2783 2784 std::string TypeName = RD->getQualifiedNameAsString(); 2785 if (getContext().getNoSanitizeList().containsType(M, TypeName)) 2786 return; 2787 2788 SanitizerScope SanScope(this); 2789 EmitSanitizerStatReport(SSK); 2790 2791 llvm::Metadata *MD = 2792 CGM.CreateMetadataIdentifierForType(QualType(RD->getTypeForDecl(), 0)); 2793 llvm::Value *TypeId = llvm::MetadataAsValue::get(getLLVMContext(), MD); 2794 2795 llvm::Value *CastedVTable = Builder.CreateBitCast(VTable, Int8PtrTy); 2796 llvm::Value *TypeTest = Builder.CreateCall( 2797 CGM.getIntrinsic(llvm::Intrinsic::type_test), {CastedVTable, TypeId}); 2798 2799 llvm::Constant *StaticData[] = { 2800 llvm::ConstantInt::get(Int8Ty, TCK), 2801 EmitCheckSourceLocation(Loc), 2802 EmitCheckTypeDescriptor(QualType(RD->getTypeForDecl(), 0)), 2803 }; 2804 2805 auto CrossDsoTypeId = CGM.CreateCrossDsoCfiTypeId(MD); 2806 if (CGM.getCodeGenOpts().SanitizeCfiCrossDso && CrossDsoTypeId) { 2807 EmitCfiSlowPathCheck(M, TypeTest, CrossDsoTypeId, CastedVTable, StaticData); 2808 return; 2809 } 2810 2811 if (CGM.getCodeGenOpts().SanitizeTrap.has(M)) { 2812 EmitTrapCheck(TypeTest, SanitizerHandler::CFICheckFail); 2813 return; 2814 } 2815 2816 llvm::Value *AllVtables = llvm::MetadataAsValue::get( 2817 CGM.getLLVMContext(), 2818 llvm::MDString::get(CGM.getLLVMContext(), "all-vtables")); 2819 llvm::Value *ValidVtable = Builder.CreateCall( 2820 CGM.getIntrinsic(llvm::Intrinsic::type_test), {CastedVTable, AllVtables}); 2821 EmitCheck(std::make_pair(TypeTest, M), SanitizerHandler::CFICheckFail, 2822 StaticData, {CastedVTable, ValidVtable}); 2823 } 2824 2825 bool CodeGenFunction::ShouldEmitVTableTypeCheckedLoad(const CXXRecordDecl *RD) { 2826 if (!CGM.getCodeGenOpts().WholeProgramVTables || 2827 !CGM.HasHiddenLTOVisibility(RD)) 2828 return false; 2829 2830 if (CGM.getCodeGenOpts().VirtualFunctionElimination) 2831 return true; 2832 2833 if (!SanOpts.has(SanitizerKind::CFIVCall) || 2834 !CGM.getCodeGenOpts().SanitizeTrap.has(SanitizerKind::CFIVCall)) 2835 return false; 2836 2837 std::string TypeName = RD->getQualifiedNameAsString(); 2838 return !getContext().getNoSanitizeList().containsType(SanitizerKind::CFIVCall, 2839 TypeName); 2840 } 2841 2842 llvm::Value *CodeGenFunction::EmitVTableTypeCheckedLoad( 2843 const CXXRecordDecl *RD, llvm::Value *VTable, uint64_t VTableByteOffset) { 2844 SanitizerScope SanScope(this); 2845 2846 EmitSanitizerStatReport(llvm::SanStat_CFI_VCall); 2847 2848 llvm::Metadata *MD = 2849 CGM.CreateMetadataIdentifierForType(QualType(RD->getTypeForDecl(), 0)); 2850 llvm::Value *TypeId = llvm::MetadataAsValue::get(CGM.getLLVMContext(), MD); 2851 2852 llvm::Value *CastedVTable = Builder.CreateBitCast(VTable, Int8PtrTy); 2853 llvm::Value *CheckedLoad = Builder.CreateCall( 2854 CGM.getIntrinsic(llvm::Intrinsic::type_checked_load), 2855 {CastedVTable, llvm::ConstantInt::get(Int32Ty, VTableByteOffset), 2856 TypeId}); 2857 llvm::Value *CheckResult = Builder.CreateExtractValue(CheckedLoad, 1); 2858 2859 std::string TypeName = RD->getQualifiedNameAsString(); 2860 if (SanOpts.has(SanitizerKind::CFIVCall) && 2861 !getContext().getNoSanitizeList().containsType(SanitizerKind::CFIVCall, 2862 TypeName)) { 2863 EmitCheck(std::make_pair(CheckResult, SanitizerKind::CFIVCall), 2864 SanitizerHandler::CFICheckFail, {}, {}); 2865 } 2866 2867 return Builder.CreateBitCast( 2868 Builder.CreateExtractValue(CheckedLoad, 0), 2869 cast<llvm::PointerType>(VTable->getType())->getElementType()); 2870 } 2871 2872 void CodeGenFunction::EmitForwardingCallToLambda( 2873 const CXXMethodDecl *callOperator, 2874 CallArgList &callArgs) { 2875 // Get the address of the call operator. 2876 const CGFunctionInfo &calleeFnInfo = 2877 CGM.getTypes().arrangeCXXMethodDeclaration(callOperator); 2878 llvm::Constant *calleePtr = 2879 CGM.GetAddrOfFunction(GlobalDecl(callOperator), 2880 CGM.getTypes().GetFunctionType(calleeFnInfo)); 2881 2882 // Prepare the return slot. 2883 const FunctionProtoType *FPT = 2884 callOperator->getType()->castAs<FunctionProtoType>(); 2885 QualType resultType = FPT->getReturnType(); 2886 ReturnValueSlot returnSlot; 2887 if (!resultType->isVoidType() && 2888 calleeFnInfo.getReturnInfo().getKind() == ABIArgInfo::Indirect && 2889 !hasScalarEvaluationKind(calleeFnInfo.getReturnType())) 2890 returnSlot = 2891 ReturnValueSlot(ReturnValue, resultType.isVolatileQualified(), 2892 /*IsUnused=*/false, /*IsExternallyDestructed=*/true); 2893 2894 // We don't need to separately arrange the call arguments because 2895 // the call can't be variadic anyway --- it's impossible to forward 2896 // variadic arguments. 2897 2898 // Now emit our call. 2899 auto callee = CGCallee::forDirect(calleePtr, GlobalDecl(callOperator)); 2900 RValue RV = EmitCall(calleeFnInfo, callee, returnSlot, callArgs); 2901 2902 // If necessary, copy the returned value into the slot. 2903 if (!resultType->isVoidType() && returnSlot.isNull()) { 2904 if (getLangOpts().ObjCAutoRefCount && resultType->isObjCRetainableType()) { 2905 RV = RValue::get(EmitARCRetainAutoreleasedReturnValue(RV.getScalarVal())); 2906 } 2907 EmitReturnOfRValue(RV, resultType); 2908 } else 2909 EmitBranchThroughCleanup(ReturnBlock); 2910 } 2911 2912 void CodeGenFunction::EmitLambdaBlockInvokeBody() { 2913 const BlockDecl *BD = BlockInfo->getBlockDecl(); 2914 const VarDecl *variable = BD->capture_begin()->getVariable(); 2915 const CXXRecordDecl *Lambda = variable->getType()->getAsCXXRecordDecl(); 2916 const CXXMethodDecl *CallOp = Lambda->getLambdaCallOperator(); 2917 2918 if (CallOp->isVariadic()) { 2919 // FIXME: Making this work correctly is nasty because it requires either 2920 // cloning the body of the call operator or making the call operator 2921 // forward. 2922 CGM.ErrorUnsupported(CurCodeDecl, "lambda conversion to variadic function"); 2923 return; 2924 } 2925 2926 // Start building arguments for forwarding call 2927 CallArgList CallArgs; 2928 2929 QualType ThisType = getContext().getPointerType(getContext().getRecordType(Lambda)); 2930 Address ThisPtr = GetAddrOfBlockDecl(variable); 2931 CallArgs.add(RValue::get(ThisPtr.getPointer()), ThisType); 2932 2933 // Add the rest of the parameters. 2934 for (auto param : BD->parameters()) 2935 EmitDelegateCallArg(CallArgs, param, param->getBeginLoc()); 2936 2937 assert(!Lambda->isGenericLambda() && 2938 "generic lambda interconversion to block not implemented"); 2939 EmitForwardingCallToLambda(CallOp, CallArgs); 2940 } 2941 2942 void CodeGenFunction::EmitLambdaDelegatingInvokeBody(const CXXMethodDecl *MD) { 2943 const CXXRecordDecl *Lambda = MD->getParent(); 2944 2945 // Start building arguments for forwarding call 2946 CallArgList CallArgs; 2947 2948 QualType ThisType = getContext().getPointerType(getContext().getRecordType(Lambda)); 2949 llvm::Value *ThisPtr = llvm::UndefValue::get(getTypes().ConvertType(ThisType)); 2950 CallArgs.add(RValue::get(ThisPtr), ThisType); 2951 2952 // Add the rest of the parameters. 2953 for (auto Param : MD->parameters()) 2954 EmitDelegateCallArg(CallArgs, Param, Param->getBeginLoc()); 2955 2956 const CXXMethodDecl *CallOp = Lambda->getLambdaCallOperator(); 2957 // For a generic lambda, find the corresponding call operator specialization 2958 // to which the call to the static-invoker shall be forwarded. 2959 if (Lambda->isGenericLambda()) { 2960 assert(MD->isFunctionTemplateSpecialization()); 2961 const TemplateArgumentList *TAL = MD->getTemplateSpecializationArgs(); 2962 FunctionTemplateDecl *CallOpTemplate = CallOp->getDescribedFunctionTemplate(); 2963 void *InsertPos = nullptr; 2964 FunctionDecl *CorrespondingCallOpSpecialization = 2965 CallOpTemplate->findSpecialization(TAL->asArray(), InsertPos); 2966 assert(CorrespondingCallOpSpecialization); 2967 CallOp = cast<CXXMethodDecl>(CorrespondingCallOpSpecialization); 2968 } 2969 EmitForwardingCallToLambda(CallOp, CallArgs); 2970 } 2971 2972 void CodeGenFunction::EmitLambdaStaticInvokeBody(const CXXMethodDecl *MD) { 2973 if (MD->isVariadic()) { 2974 // FIXME: Making this work correctly is nasty because it requires either 2975 // cloning the body of the call operator or making the call operator forward. 2976 CGM.ErrorUnsupported(MD, "lambda conversion to variadic function"); 2977 return; 2978 } 2979 2980 EmitLambdaDelegatingInvokeBody(MD); 2981 } 2982