17330f729Sjoerg //===--- CGExprAgg.cpp - Emit LLVM Code from Aggregate Expressions --------===//
27330f729Sjoerg //
37330f729Sjoerg // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
47330f729Sjoerg // See https://llvm.org/LICENSE.txt for license information.
57330f729Sjoerg // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
67330f729Sjoerg //
77330f729Sjoerg //===----------------------------------------------------------------------===//
87330f729Sjoerg //
97330f729Sjoerg // This contains code to emit Aggregate Expr nodes as LLVM code.
107330f729Sjoerg //
117330f729Sjoerg //===----------------------------------------------------------------------===//
127330f729Sjoerg
137330f729Sjoerg #include "CGCXXABI.h"
147330f729Sjoerg #include "CGObjCRuntime.h"
15*e038c9c4Sjoerg #include "CodeGenFunction.h"
167330f729Sjoerg #include "CodeGenModule.h"
177330f729Sjoerg #include "ConstantEmitter.h"
18*e038c9c4Sjoerg #include "TargetInfo.h"
197330f729Sjoerg #include "clang/AST/ASTContext.h"
20*e038c9c4Sjoerg #include "clang/AST/Attr.h"
217330f729Sjoerg #include "clang/AST/DeclCXX.h"
227330f729Sjoerg #include "clang/AST/DeclTemplate.h"
237330f729Sjoerg #include "clang/AST/StmtVisitor.h"
247330f729Sjoerg #include "llvm/IR/Constants.h"
257330f729Sjoerg #include "llvm/IR/Function.h"
267330f729Sjoerg #include "llvm/IR/GlobalVariable.h"
277330f729Sjoerg #include "llvm/IR/IntrinsicInst.h"
28*e038c9c4Sjoerg #include "llvm/IR/Intrinsics.h"
297330f729Sjoerg using namespace clang;
307330f729Sjoerg using namespace CodeGen;
317330f729Sjoerg
327330f729Sjoerg //===----------------------------------------------------------------------===//
337330f729Sjoerg // Aggregate Expression Emitter
347330f729Sjoerg //===----------------------------------------------------------------------===//
357330f729Sjoerg
367330f729Sjoerg namespace {
377330f729Sjoerg class AggExprEmitter : public StmtVisitor<AggExprEmitter> {
387330f729Sjoerg CodeGenFunction &CGF;
397330f729Sjoerg CGBuilderTy &Builder;
407330f729Sjoerg AggValueSlot Dest;
417330f729Sjoerg bool IsResultUnused;
427330f729Sjoerg
EnsureSlot(QualType T)437330f729Sjoerg AggValueSlot EnsureSlot(QualType T) {
447330f729Sjoerg if (!Dest.isIgnored()) return Dest;
457330f729Sjoerg return CGF.CreateAggTemp(T, "agg.tmp.ensured");
467330f729Sjoerg }
EnsureDest(QualType T)477330f729Sjoerg void EnsureDest(QualType T) {
487330f729Sjoerg if (!Dest.isIgnored()) return;
497330f729Sjoerg Dest = CGF.CreateAggTemp(T, "agg.tmp.ensured");
507330f729Sjoerg }
517330f729Sjoerg
527330f729Sjoerg // Calls `Fn` with a valid return value slot, potentially creating a temporary
537330f729Sjoerg // to do so. If a temporary is created, an appropriate copy into `Dest` will
547330f729Sjoerg // be emitted, as will lifetime markers.
557330f729Sjoerg //
567330f729Sjoerg // The given function should take a ReturnValueSlot, and return an RValue that
577330f729Sjoerg // points to said slot.
587330f729Sjoerg void withReturnValueSlot(const Expr *E,
597330f729Sjoerg llvm::function_ref<RValue(ReturnValueSlot)> Fn);
607330f729Sjoerg
617330f729Sjoerg public:
AggExprEmitter(CodeGenFunction & cgf,AggValueSlot Dest,bool IsResultUnused)627330f729Sjoerg AggExprEmitter(CodeGenFunction &cgf, AggValueSlot Dest, bool IsResultUnused)
637330f729Sjoerg : CGF(cgf), Builder(CGF.Builder), Dest(Dest),
647330f729Sjoerg IsResultUnused(IsResultUnused) { }
657330f729Sjoerg
667330f729Sjoerg //===--------------------------------------------------------------------===//
677330f729Sjoerg // Utilities
687330f729Sjoerg //===--------------------------------------------------------------------===//
697330f729Sjoerg
707330f729Sjoerg /// EmitAggLoadOfLValue - Given an expression with aggregate type that
717330f729Sjoerg /// represents a value lvalue, this method emits the address of the lvalue,
727330f729Sjoerg /// then loads the result into DestPtr.
737330f729Sjoerg void EmitAggLoadOfLValue(const Expr *E);
747330f729Sjoerg
757330f729Sjoerg enum ExprValueKind {
767330f729Sjoerg EVK_RValue,
777330f729Sjoerg EVK_NonRValue
787330f729Sjoerg };
797330f729Sjoerg
807330f729Sjoerg /// EmitFinalDestCopy - Perform the final copy to DestPtr, if desired.
817330f729Sjoerg /// SrcIsRValue is true if source comes from an RValue.
827330f729Sjoerg void EmitFinalDestCopy(QualType type, const LValue &src,
837330f729Sjoerg ExprValueKind SrcValueKind = EVK_NonRValue);
847330f729Sjoerg void EmitFinalDestCopy(QualType type, RValue src);
857330f729Sjoerg void EmitCopy(QualType type, const AggValueSlot &dest,
867330f729Sjoerg const AggValueSlot &src);
877330f729Sjoerg
887330f729Sjoerg void EmitMoveFromReturnSlot(const Expr *E, RValue Src);
897330f729Sjoerg
907330f729Sjoerg void EmitArrayInit(Address DestPtr, llvm::ArrayType *AType,
917330f729Sjoerg QualType ArrayQTy, InitListExpr *E);
927330f729Sjoerg
needsGC(QualType T)937330f729Sjoerg AggValueSlot::NeedsGCBarriers_t needsGC(QualType T) {
947330f729Sjoerg if (CGF.getLangOpts().getGC() && TypeRequiresGCollection(T))
957330f729Sjoerg return AggValueSlot::NeedsGCBarriers;
967330f729Sjoerg return AggValueSlot::DoesNotNeedGCBarriers;
977330f729Sjoerg }
987330f729Sjoerg
997330f729Sjoerg bool TypeRequiresGCollection(QualType T);
1007330f729Sjoerg
1017330f729Sjoerg //===--------------------------------------------------------------------===//
1027330f729Sjoerg // Visitor Methods
1037330f729Sjoerg //===--------------------------------------------------------------------===//
1047330f729Sjoerg
Visit(Expr * E)1057330f729Sjoerg void Visit(Expr *E) {
1067330f729Sjoerg ApplyDebugLocation DL(CGF, E);
1077330f729Sjoerg StmtVisitor<AggExprEmitter>::Visit(E);
1087330f729Sjoerg }
1097330f729Sjoerg
VisitStmt(Stmt * S)1107330f729Sjoerg void VisitStmt(Stmt *S) {
1117330f729Sjoerg CGF.ErrorUnsupported(S, "aggregate expression");
1127330f729Sjoerg }
VisitParenExpr(ParenExpr * PE)1137330f729Sjoerg void VisitParenExpr(ParenExpr *PE) { Visit(PE->getSubExpr()); }
VisitGenericSelectionExpr(GenericSelectionExpr * GE)1147330f729Sjoerg void VisitGenericSelectionExpr(GenericSelectionExpr *GE) {
1157330f729Sjoerg Visit(GE->getResultExpr());
1167330f729Sjoerg }
VisitCoawaitExpr(CoawaitExpr * E)1177330f729Sjoerg void VisitCoawaitExpr(CoawaitExpr *E) {
1187330f729Sjoerg CGF.EmitCoawaitExpr(*E, Dest, IsResultUnused);
1197330f729Sjoerg }
VisitCoyieldExpr(CoyieldExpr * E)1207330f729Sjoerg void VisitCoyieldExpr(CoyieldExpr *E) {
1217330f729Sjoerg CGF.EmitCoyieldExpr(*E, Dest, IsResultUnused);
1227330f729Sjoerg }
VisitUnaryCoawait(UnaryOperator * E)1237330f729Sjoerg void VisitUnaryCoawait(UnaryOperator *E) { Visit(E->getSubExpr()); }
VisitUnaryExtension(UnaryOperator * E)1247330f729Sjoerg void VisitUnaryExtension(UnaryOperator *E) { Visit(E->getSubExpr()); }
VisitSubstNonTypeTemplateParmExpr(SubstNonTypeTemplateParmExpr * E)1257330f729Sjoerg void VisitSubstNonTypeTemplateParmExpr(SubstNonTypeTemplateParmExpr *E) {
1267330f729Sjoerg return Visit(E->getReplacement());
1277330f729Sjoerg }
1287330f729Sjoerg
VisitConstantExpr(ConstantExpr * E)1297330f729Sjoerg void VisitConstantExpr(ConstantExpr *E) {
130*e038c9c4Sjoerg if (llvm::Value *Result = ConstantEmitter(CGF).tryEmitConstantExpr(E)) {
131*e038c9c4Sjoerg CGF.EmitAggregateStore(Result, Dest.getAddress(),
132*e038c9c4Sjoerg E->getType().isVolatileQualified());
133*e038c9c4Sjoerg return;
134*e038c9c4Sjoerg }
1357330f729Sjoerg return Visit(E->getSubExpr());
1367330f729Sjoerg }
1377330f729Sjoerg
1387330f729Sjoerg // l-values.
VisitDeclRefExpr(DeclRefExpr * E)1397330f729Sjoerg void VisitDeclRefExpr(DeclRefExpr *E) { EmitAggLoadOfLValue(E); }
VisitMemberExpr(MemberExpr * ME)1407330f729Sjoerg void VisitMemberExpr(MemberExpr *ME) { EmitAggLoadOfLValue(ME); }
VisitUnaryDeref(UnaryOperator * E)1417330f729Sjoerg void VisitUnaryDeref(UnaryOperator *E) { EmitAggLoadOfLValue(E); }
VisitStringLiteral(StringLiteral * E)1427330f729Sjoerg void VisitStringLiteral(StringLiteral *E) { EmitAggLoadOfLValue(E); }
1437330f729Sjoerg void VisitCompoundLiteralExpr(CompoundLiteralExpr *E);
VisitArraySubscriptExpr(ArraySubscriptExpr * E)1447330f729Sjoerg void VisitArraySubscriptExpr(ArraySubscriptExpr *E) {
1457330f729Sjoerg EmitAggLoadOfLValue(E);
1467330f729Sjoerg }
VisitPredefinedExpr(const PredefinedExpr * E)1477330f729Sjoerg void VisitPredefinedExpr(const PredefinedExpr *E) {
1487330f729Sjoerg EmitAggLoadOfLValue(E);
1497330f729Sjoerg }
1507330f729Sjoerg
1517330f729Sjoerg // Operators.
1527330f729Sjoerg void VisitCastExpr(CastExpr *E);
1537330f729Sjoerg void VisitCallExpr(const CallExpr *E);
1547330f729Sjoerg void VisitStmtExpr(const StmtExpr *E);
1557330f729Sjoerg void VisitBinaryOperator(const BinaryOperator *BO);
1567330f729Sjoerg void VisitPointerToDataMemberBinaryOperator(const BinaryOperator *BO);
1577330f729Sjoerg void VisitBinAssign(const BinaryOperator *E);
1587330f729Sjoerg void VisitBinComma(const BinaryOperator *E);
1597330f729Sjoerg void VisitBinCmp(const BinaryOperator *E);
VisitCXXRewrittenBinaryOperator(CXXRewrittenBinaryOperator * E)1607330f729Sjoerg void VisitCXXRewrittenBinaryOperator(CXXRewrittenBinaryOperator *E) {
1617330f729Sjoerg Visit(E->getSemanticForm());
1627330f729Sjoerg }
1637330f729Sjoerg
1647330f729Sjoerg void VisitObjCMessageExpr(ObjCMessageExpr *E);
VisitObjCIvarRefExpr(ObjCIvarRefExpr * E)1657330f729Sjoerg void VisitObjCIvarRefExpr(ObjCIvarRefExpr *E) {
1667330f729Sjoerg EmitAggLoadOfLValue(E);
1677330f729Sjoerg }
1687330f729Sjoerg
1697330f729Sjoerg void VisitDesignatedInitUpdateExpr(DesignatedInitUpdateExpr *E);
1707330f729Sjoerg void VisitAbstractConditionalOperator(const AbstractConditionalOperator *CO);
1717330f729Sjoerg void VisitChooseExpr(const ChooseExpr *CE);
1727330f729Sjoerg void VisitInitListExpr(InitListExpr *E);
1737330f729Sjoerg void VisitArrayInitLoopExpr(const ArrayInitLoopExpr *E,
1747330f729Sjoerg llvm::Value *outerBegin = nullptr);
1757330f729Sjoerg void VisitImplicitValueInitExpr(ImplicitValueInitExpr *E);
VisitNoInitExpr(NoInitExpr * E)1767330f729Sjoerg void VisitNoInitExpr(NoInitExpr *E) { } // Do nothing.
VisitCXXDefaultArgExpr(CXXDefaultArgExpr * DAE)1777330f729Sjoerg void VisitCXXDefaultArgExpr(CXXDefaultArgExpr *DAE) {
1787330f729Sjoerg CodeGenFunction::CXXDefaultArgExprScope Scope(CGF, DAE);
1797330f729Sjoerg Visit(DAE->getExpr());
1807330f729Sjoerg }
VisitCXXDefaultInitExpr(CXXDefaultInitExpr * DIE)1817330f729Sjoerg void VisitCXXDefaultInitExpr(CXXDefaultInitExpr *DIE) {
1827330f729Sjoerg CodeGenFunction::CXXDefaultInitExprScope Scope(CGF, DIE);
1837330f729Sjoerg Visit(DIE->getExpr());
1847330f729Sjoerg }
1857330f729Sjoerg void VisitCXXBindTemporaryExpr(CXXBindTemporaryExpr *E);
1867330f729Sjoerg void VisitCXXConstructExpr(const CXXConstructExpr *E);
1877330f729Sjoerg void VisitCXXInheritedCtorInitExpr(const CXXInheritedCtorInitExpr *E);
1887330f729Sjoerg void VisitLambdaExpr(LambdaExpr *E);
1897330f729Sjoerg void VisitCXXStdInitializerListExpr(CXXStdInitializerListExpr *E);
1907330f729Sjoerg void VisitExprWithCleanups(ExprWithCleanups *E);
1917330f729Sjoerg void VisitCXXScalarValueInitExpr(CXXScalarValueInitExpr *E);
VisitCXXTypeidExpr(CXXTypeidExpr * E)1927330f729Sjoerg void VisitCXXTypeidExpr(CXXTypeidExpr *E) { EmitAggLoadOfLValue(E); }
1937330f729Sjoerg void VisitMaterializeTemporaryExpr(MaterializeTemporaryExpr *E);
1947330f729Sjoerg void VisitOpaqueValueExpr(OpaqueValueExpr *E);
1957330f729Sjoerg
VisitPseudoObjectExpr(PseudoObjectExpr * E)1967330f729Sjoerg void VisitPseudoObjectExpr(PseudoObjectExpr *E) {
1977330f729Sjoerg if (E->isGLValue()) {
1987330f729Sjoerg LValue LV = CGF.EmitPseudoObjectLValue(E);
1997330f729Sjoerg return EmitFinalDestCopy(E->getType(), LV);
2007330f729Sjoerg }
2017330f729Sjoerg
2027330f729Sjoerg CGF.EmitPseudoObjectRValue(E, EnsureSlot(E->getType()));
2037330f729Sjoerg }
2047330f729Sjoerg
2057330f729Sjoerg void VisitVAArgExpr(VAArgExpr *E);
2067330f729Sjoerg
2077330f729Sjoerg void EmitInitializationToLValue(Expr *E, LValue Address);
2087330f729Sjoerg void EmitNullInitializationToLValue(LValue Address);
2097330f729Sjoerg // case Expr::ChooseExprClass:
VisitCXXThrowExpr(const CXXThrowExpr * E)2107330f729Sjoerg void VisitCXXThrowExpr(const CXXThrowExpr *E) { CGF.EmitCXXThrowExpr(E); }
VisitAtomicExpr(AtomicExpr * E)2117330f729Sjoerg void VisitAtomicExpr(AtomicExpr *E) {
2127330f729Sjoerg RValue Res = CGF.EmitAtomicExpr(E);
2137330f729Sjoerg EmitFinalDestCopy(E->getType(), Res);
2147330f729Sjoerg }
2157330f729Sjoerg };
2167330f729Sjoerg } // end anonymous namespace.
2177330f729Sjoerg
2187330f729Sjoerg //===----------------------------------------------------------------------===//
2197330f729Sjoerg // Utilities
2207330f729Sjoerg //===----------------------------------------------------------------------===//
2217330f729Sjoerg
2227330f729Sjoerg /// EmitAggLoadOfLValue - Given an expression with aggregate type that
2237330f729Sjoerg /// represents a value lvalue, this method emits the address of the lvalue,
2247330f729Sjoerg /// then loads the result into DestPtr.
EmitAggLoadOfLValue(const Expr * E)2257330f729Sjoerg void AggExprEmitter::EmitAggLoadOfLValue(const Expr *E) {
2267330f729Sjoerg LValue LV = CGF.EmitLValue(E);
2277330f729Sjoerg
2287330f729Sjoerg // If the type of the l-value is atomic, then do an atomic load.
2297330f729Sjoerg if (LV.getType()->isAtomicType() || CGF.LValueIsSuitableForInlineAtomic(LV)) {
2307330f729Sjoerg CGF.EmitAtomicLoad(LV, E->getExprLoc(), Dest);
2317330f729Sjoerg return;
2327330f729Sjoerg }
2337330f729Sjoerg
2347330f729Sjoerg EmitFinalDestCopy(E->getType(), LV);
2357330f729Sjoerg }
2367330f729Sjoerg
2377330f729Sjoerg /// True if the given aggregate type requires special GC API calls.
TypeRequiresGCollection(QualType T)2387330f729Sjoerg bool AggExprEmitter::TypeRequiresGCollection(QualType T) {
2397330f729Sjoerg // Only record types have members that might require garbage collection.
2407330f729Sjoerg const RecordType *RecordTy = T->getAs<RecordType>();
2417330f729Sjoerg if (!RecordTy) return false;
2427330f729Sjoerg
2437330f729Sjoerg // Don't mess with non-trivial C++ types.
2447330f729Sjoerg RecordDecl *Record = RecordTy->getDecl();
2457330f729Sjoerg if (isa<CXXRecordDecl>(Record) &&
2467330f729Sjoerg (cast<CXXRecordDecl>(Record)->hasNonTrivialCopyConstructor() ||
2477330f729Sjoerg !cast<CXXRecordDecl>(Record)->hasTrivialDestructor()))
2487330f729Sjoerg return false;
2497330f729Sjoerg
2507330f729Sjoerg // Check whether the type has an object member.
2517330f729Sjoerg return Record->hasObjectMember();
2527330f729Sjoerg }
2537330f729Sjoerg
withReturnValueSlot(const Expr * E,llvm::function_ref<RValue (ReturnValueSlot)> EmitCall)2547330f729Sjoerg void AggExprEmitter::withReturnValueSlot(
2557330f729Sjoerg const Expr *E, llvm::function_ref<RValue(ReturnValueSlot)> EmitCall) {
2567330f729Sjoerg QualType RetTy = E->getType();
2577330f729Sjoerg bool RequiresDestruction =
258*e038c9c4Sjoerg !Dest.isExternallyDestructed() &&
2597330f729Sjoerg RetTy.isDestructedType() == QualType::DK_nontrivial_c_struct;
2607330f729Sjoerg
2617330f729Sjoerg // If it makes no observable difference, save a memcpy + temporary.
2627330f729Sjoerg //
2637330f729Sjoerg // We need to always provide our own temporary if destruction is required.
2647330f729Sjoerg // Otherwise, EmitCall will emit its own, notice that it's "unused", and end
2657330f729Sjoerg // its lifetime before we have the chance to emit a proper destructor call.
2667330f729Sjoerg bool UseTemp = Dest.isPotentiallyAliased() || Dest.requiresGCollection() ||
2677330f729Sjoerg (RequiresDestruction && !Dest.getAddress().isValid());
2687330f729Sjoerg
2697330f729Sjoerg Address RetAddr = Address::invalid();
2707330f729Sjoerg Address RetAllocaAddr = Address::invalid();
2717330f729Sjoerg
2727330f729Sjoerg EHScopeStack::stable_iterator LifetimeEndBlock;
2737330f729Sjoerg llvm::Value *LifetimeSizePtr = nullptr;
2747330f729Sjoerg llvm::IntrinsicInst *LifetimeStartInst = nullptr;
2757330f729Sjoerg if (!UseTemp) {
2767330f729Sjoerg RetAddr = Dest.getAddress();
2777330f729Sjoerg } else {
2787330f729Sjoerg RetAddr = CGF.CreateMemTemp(RetTy, "tmp", &RetAllocaAddr);
2797330f729Sjoerg uint64_t Size =
2807330f729Sjoerg CGF.CGM.getDataLayout().getTypeAllocSize(CGF.ConvertTypeForMem(RetTy));
2817330f729Sjoerg LifetimeSizePtr = CGF.EmitLifetimeStart(Size, RetAllocaAddr.getPointer());
2827330f729Sjoerg if (LifetimeSizePtr) {
2837330f729Sjoerg LifetimeStartInst =
2847330f729Sjoerg cast<llvm::IntrinsicInst>(std::prev(Builder.GetInsertPoint()));
2857330f729Sjoerg assert(LifetimeStartInst->getIntrinsicID() ==
2867330f729Sjoerg llvm::Intrinsic::lifetime_start &&
2877330f729Sjoerg "Last insertion wasn't a lifetime.start?");
2887330f729Sjoerg
2897330f729Sjoerg CGF.pushFullExprCleanup<CodeGenFunction::CallLifetimeEnd>(
2907330f729Sjoerg NormalEHLifetimeMarker, RetAllocaAddr, LifetimeSizePtr);
2917330f729Sjoerg LifetimeEndBlock = CGF.EHStack.stable_begin();
2927330f729Sjoerg }
2937330f729Sjoerg }
2947330f729Sjoerg
2957330f729Sjoerg RValue Src =
296*e038c9c4Sjoerg EmitCall(ReturnValueSlot(RetAddr, Dest.isVolatile(), IsResultUnused,
297*e038c9c4Sjoerg Dest.isExternallyDestructed()));
2987330f729Sjoerg
2997330f729Sjoerg if (!UseTemp)
3007330f729Sjoerg return;
3017330f729Sjoerg
3027330f729Sjoerg assert(Dest.getPointer() != Src.getAggregatePointer());
3037330f729Sjoerg EmitFinalDestCopy(E->getType(), Src);
3047330f729Sjoerg
3057330f729Sjoerg if (!RequiresDestruction && LifetimeStartInst) {
3067330f729Sjoerg // If there's no dtor to run, the copy was the last use of our temporary.
3077330f729Sjoerg // Since we're not guaranteed to be in an ExprWithCleanups, clean up
3087330f729Sjoerg // eagerly.
3097330f729Sjoerg CGF.DeactivateCleanupBlock(LifetimeEndBlock, LifetimeStartInst);
3107330f729Sjoerg CGF.EmitLifetimeEnd(LifetimeSizePtr, RetAllocaAddr.getPointer());
3117330f729Sjoerg }
3127330f729Sjoerg }
3137330f729Sjoerg
3147330f729Sjoerg /// EmitFinalDestCopy - Perform the final copy to DestPtr, if desired.
EmitFinalDestCopy(QualType type,RValue src)3157330f729Sjoerg void AggExprEmitter::EmitFinalDestCopy(QualType type, RValue src) {
3167330f729Sjoerg assert(src.isAggregate() && "value must be aggregate value!");
3177330f729Sjoerg LValue srcLV = CGF.MakeAddrLValue(src.getAggregateAddress(), type);
3187330f729Sjoerg EmitFinalDestCopy(type, srcLV, EVK_RValue);
3197330f729Sjoerg }
3207330f729Sjoerg
3217330f729Sjoerg /// EmitFinalDestCopy - Perform the final copy to DestPtr, if desired.
EmitFinalDestCopy(QualType type,const LValue & src,ExprValueKind SrcValueKind)3227330f729Sjoerg void AggExprEmitter::EmitFinalDestCopy(QualType type, const LValue &src,
3237330f729Sjoerg ExprValueKind SrcValueKind) {
3247330f729Sjoerg // If Dest is ignored, then we're evaluating an aggregate expression
3257330f729Sjoerg // in a context that doesn't care about the result. Note that loads
3267330f729Sjoerg // from volatile l-values force the existence of a non-ignored
3277330f729Sjoerg // destination.
3287330f729Sjoerg if (Dest.isIgnored())
3297330f729Sjoerg return;
3307330f729Sjoerg
3317330f729Sjoerg // Copy non-trivial C structs here.
3327330f729Sjoerg LValue DstLV = CGF.MakeAddrLValue(
3337330f729Sjoerg Dest.getAddress(), Dest.isVolatile() ? type.withVolatile() : type);
3347330f729Sjoerg
3357330f729Sjoerg if (SrcValueKind == EVK_RValue) {
3367330f729Sjoerg if (type.isNonTrivialToPrimitiveDestructiveMove() == QualType::PCK_Struct) {
3377330f729Sjoerg if (Dest.isPotentiallyAliased())
3387330f729Sjoerg CGF.callCStructMoveAssignmentOperator(DstLV, src);
3397330f729Sjoerg else
3407330f729Sjoerg CGF.callCStructMoveConstructor(DstLV, src);
3417330f729Sjoerg return;
3427330f729Sjoerg }
3437330f729Sjoerg } else {
3447330f729Sjoerg if (type.isNonTrivialToPrimitiveCopy() == QualType::PCK_Struct) {
3457330f729Sjoerg if (Dest.isPotentiallyAliased())
3467330f729Sjoerg CGF.callCStructCopyAssignmentOperator(DstLV, src);
3477330f729Sjoerg else
3487330f729Sjoerg CGF.callCStructCopyConstructor(DstLV, src);
3497330f729Sjoerg return;
3507330f729Sjoerg }
3517330f729Sjoerg }
3527330f729Sjoerg
353*e038c9c4Sjoerg AggValueSlot srcAgg = AggValueSlot::forLValue(
354*e038c9c4Sjoerg src, CGF, AggValueSlot::IsDestructed, needsGC(type),
355*e038c9c4Sjoerg AggValueSlot::IsAliased, AggValueSlot::MayOverlap);
3567330f729Sjoerg EmitCopy(type, Dest, srcAgg);
3577330f729Sjoerg }
3587330f729Sjoerg
3597330f729Sjoerg /// Perform a copy from the source into the destination.
3607330f729Sjoerg ///
3617330f729Sjoerg /// \param type - the type of the aggregate being copied; qualifiers are
3627330f729Sjoerg /// ignored
EmitCopy(QualType type,const AggValueSlot & dest,const AggValueSlot & src)3637330f729Sjoerg void AggExprEmitter::EmitCopy(QualType type, const AggValueSlot &dest,
3647330f729Sjoerg const AggValueSlot &src) {
3657330f729Sjoerg if (dest.requiresGCollection()) {
3667330f729Sjoerg CharUnits sz = dest.getPreferredSize(CGF.getContext(), type);
3677330f729Sjoerg llvm::Value *size = llvm::ConstantInt::get(CGF.SizeTy, sz.getQuantity());
3687330f729Sjoerg CGF.CGM.getObjCRuntime().EmitGCMemmoveCollectable(CGF,
3697330f729Sjoerg dest.getAddress(),
3707330f729Sjoerg src.getAddress(),
3717330f729Sjoerg size);
3727330f729Sjoerg return;
3737330f729Sjoerg }
3747330f729Sjoerg
3757330f729Sjoerg // If the result of the assignment is used, copy the LHS there also.
3767330f729Sjoerg // It's volatile if either side is. Use the minimum alignment of
3777330f729Sjoerg // the two sides.
3787330f729Sjoerg LValue DestLV = CGF.MakeAddrLValue(dest.getAddress(), type);
3797330f729Sjoerg LValue SrcLV = CGF.MakeAddrLValue(src.getAddress(), type);
3807330f729Sjoerg CGF.EmitAggregateCopy(DestLV, SrcLV, type, dest.mayOverlap(),
3817330f729Sjoerg dest.isVolatile() || src.isVolatile());
3827330f729Sjoerg }
3837330f729Sjoerg
3847330f729Sjoerg /// Emit the initializer for a std::initializer_list initialized with a
3857330f729Sjoerg /// real initializer list.
3867330f729Sjoerg void
VisitCXXStdInitializerListExpr(CXXStdInitializerListExpr * E)3877330f729Sjoerg AggExprEmitter::VisitCXXStdInitializerListExpr(CXXStdInitializerListExpr *E) {
3887330f729Sjoerg // Emit an array containing the elements. The array is externally destructed
3897330f729Sjoerg // if the std::initializer_list object is.
3907330f729Sjoerg ASTContext &Ctx = CGF.getContext();
3917330f729Sjoerg LValue Array = CGF.EmitLValue(E->getSubExpr());
3927330f729Sjoerg assert(Array.isSimple() && "initializer_list array not a simple lvalue");
393*e038c9c4Sjoerg Address ArrayPtr = Array.getAddress(CGF);
3947330f729Sjoerg
3957330f729Sjoerg const ConstantArrayType *ArrayType =
3967330f729Sjoerg Ctx.getAsConstantArrayType(E->getSubExpr()->getType());
3977330f729Sjoerg assert(ArrayType && "std::initializer_list constructed from non-array");
3987330f729Sjoerg
3997330f729Sjoerg // FIXME: Perform the checks on the field types in SemaInit.
4007330f729Sjoerg RecordDecl *Record = E->getType()->castAs<RecordType>()->getDecl();
4017330f729Sjoerg RecordDecl::field_iterator Field = Record->field_begin();
4027330f729Sjoerg if (Field == Record->field_end()) {
4037330f729Sjoerg CGF.ErrorUnsupported(E, "weird std::initializer_list");
4047330f729Sjoerg return;
4057330f729Sjoerg }
4067330f729Sjoerg
4077330f729Sjoerg // Start pointer.
4087330f729Sjoerg if (!Field->getType()->isPointerType() ||
4097330f729Sjoerg !Ctx.hasSameType(Field->getType()->getPointeeType(),
4107330f729Sjoerg ArrayType->getElementType())) {
4117330f729Sjoerg CGF.ErrorUnsupported(E, "weird std::initializer_list");
4127330f729Sjoerg return;
4137330f729Sjoerg }
4147330f729Sjoerg
4157330f729Sjoerg AggValueSlot Dest = EnsureSlot(E->getType());
4167330f729Sjoerg LValue DestLV = CGF.MakeAddrLValue(Dest.getAddress(), E->getType());
4177330f729Sjoerg LValue Start = CGF.EmitLValueForFieldInitialization(DestLV, *Field);
4187330f729Sjoerg llvm::Value *Zero = llvm::ConstantInt::get(CGF.PtrDiffTy, 0);
4197330f729Sjoerg llvm::Value *IdxStart[] = { Zero, Zero };
4207330f729Sjoerg llvm::Value *ArrayStart =
4217330f729Sjoerg Builder.CreateInBoundsGEP(ArrayPtr.getPointer(), IdxStart, "arraystart");
4227330f729Sjoerg CGF.EmitStoreThroughLValue(RValue::get(ArrayStart), Start);
4237330f729Sjoerg ++Field;
4247330f729Sjoerg
4257330f729Sjoerg if (Field == Record->field_end()) {
4267330f729Sjoerg CGF.ErrorUnsupported(E, "weird std::initializer_list");
4277330f729Sjoerg return;
4287330f729Sjoerg }
4297330f729Sjoerg
4307330f729Sjoerg llvm::Value *Size = Builder.getInt(ArrayType->getSize());
4317330f729Sjoerg LValue EndOrLength = CGF.EmitLValueForFieldInitialization(DestLV, *Field);
4327330f729Sjoerg if (Field->getType()->isPointerType() &&
4337330f729Sjoerg Ctx.hasSameType(Field->getType()->getPointeeType(),
4347330f729Sjoerg ArrayType->getElementType())) {
4357330f729Sjoerg // End pointer.
4367330f729Sjoerg llvm::Value *IdxEnd[] = { Zero, Size };
4377330f729Sjoerg llvm::Value *ArrayEnd =
4387330f729Sjoerg Builder.CreateInBoundsGEP(ArrayPtr.getPointer(), IdxEnd, "arrayend");
4397330f729Sjoerg CGF.EmitStoreThroughLValue(RValue::get(ArrayEnd), EndOrLength);
4407330f729Sjoerg } else if (Ctx.hasSameType(Field->getType(), Ctx.getSizeType())) {
4417330f729Sjoerg // Length.
4427330f729Sjoerg CGF.EmitStoreThroughLValue(RValue::get(Size), EndOrLength);
4437330f729Sjoerg } else {
4447330f729Sjoerg CGF.ErrorUnsupported(E, "weird std::initializer_list");
4457330f729Sjoerg return;
4467330f729Sjoerg }
4477330f729Sjoerg }
4487330f729Sjoerg
4497330f729Sjoerg /// Determine if E is a trivial array filler, that is, one that is
4507330f729Sjoerg /// equivalent to zero-initialization.
isTrivialFiller(Expr * E)4517330f729Sjoerg static bool isTrivialFiller(Expr *E) {
4527330f729Sjoerg if (!E)
4537330f729Sjoerg return true;
4547330f729Sjoerg
4557330f729Sjoerg if (isa<ImplicitValueInitExpr>(E))
4567330f729Sjoerg return true;
4577330f729Sjoerg
4587330f729Sjoerg if (auto *ILE = dyn_cast<InitListExpr>(E)) {
4597330f729Sjoerg if (ILE->getNumInits())
4607330f729Sjoerg return false;
4617330f729Sjoerg return isTrivialFiller(ILE->getArrayFiller());
4627330f729Sjoerg }
4637330f729Sjoerg
4647330f729Sjoerg if (auto *Cons = dyn_cast_or_null<CXXConstructExpr>(E))
4657330f729Sjoerg return Cons->getConstructor()->isDefaultConstructor() &&
4667330f729Sjoerg Cons->getConstructor()->isTrivial();
4677330f729Sjoerg
4687330f729Sjoerg // FIXME: Are there other cases where we can avoid emitting an initializer?
4697330f729Sjoerg return false;
4707330f729Sjoerg }
4717330f729Sjoerg
4727330f729Sjoerg /// Emit initialization of an array from an initializer list.
EmitArrayInit(Address DestPtr,llvm::ArrayType * AType,QualType ArrayQTy,InitListExpr * E)4737330f729Sjoerg void AggExprEmitter::EmitArrayInit(Address DestPtr, llvm::ArrayType *AType,
4747330f729Sjoerg QualType ArrayQTy, InitListExpr *E) {
4757330f729Sjoerg uint64_t NumInitElements = E->getNumInits();
4767330f729Sjoerg
4777330f729Sjoerg uint64_t NumArrayElements = AType->getNumElements();
4787330f729Sjoerg assert(NumInitElements <= NumArrayElements);
4797330f729Sjoerg
4807330f729Sjoerg QualType elementType =
4817330f729Sjoerg CGF.getContext().getAsArrayType(ArrayQTy)->getElementType();
4827330f729Sjoerg
4837330f729Sjoerg // DestPtr is an array*. Construct an elementType* by drilling
4847330f729Sjoerg // down a level.
4857330f729Sjoerg llvm::Value *zero = llvm::ConstantInt::get(CGF.SizeTy, 0);
4867330f729Sjoerg llvm::Value *indices[] = { zero, zero };
4877330f729Sjoerg llvm::Value *begin =
4887330f729Sjoerg Builder.CreateInBoundsGEP(DestPtr.getPointer(), indices, "arrayinit.begin");
4897330f729Sjoerg
4907330f729Sjoerg CharUnits elementSize = CGF.getContext().getTypeSizeInChars(elementType);
4917330f729Sjoerg CharUnits elementAlign =
4927330f729Sjoerg DestPtr.getAlignment().alignmentOfArrayElement(elementSize);
4937330f729Sjoerg
4947330f729Sjoerg // Consider initializing the array by copying from a global. For this to be
4957330f729Sjoerg // more efficient than per-element initialization, the size of the elements
4967330f729Sjoerg // with explicit initializers should be large enough.
4977330f729Sjoerg if (NumInitElements * elementSize.getQuantity() > 16 &&
4987330f729Sjoerg elementType.isTriviallyCopyableType(CGF.getContext())) {
4997330f729Sjoerg CodeGen::CodeGenModule &CGM = CGF.CGM;
500*e038c9c4Sjoerg ConstantEmitter Emitter(CGF);
5017330f729Sjoerg LangAS AS = ArrayQTy.getAddressSpace();
5027330f729Sjoerg if (llvm::Constant *C = Emitter.tryEmitForInitializer(E, AS, ArrayQTy)) {
5037330f729Sjoerg auto GV = new llvm::GlobalVariable(
5047330f729Sjoerg CGM.getModule(), C->getType(),
5057330f729Sjoerg CGM.isTypeConstant(ArrayQTy, /* ExcludeCtorDtor= */ true),
5067330f729Sjoerg llvm::GlobalValue::PrivateLinkage, C, "constinit",
5077330f729Sjoerg /* InsertBefore= */ nullptr, llvm::GlobalVariable::NotThreadLocal,
5087330f729Sjoerg CGM.getContext().getTargetAddressSpace(AS));
5097330f729Sjoerg Emitter.finalize(GV);
5107330f729Sjoerg CharUnits Align = CGM.getContext().getTypeAlignInChars(ArrayQTy);
5117330f729Sjoerg GV->setAlignment(Align.getAsAlign());
5127330f729Sjoerg EmitFinalDestCopy(ArrayQTy, CGF.MakeAddrLValue(GV, ArrayQTy, Align));
5137330f729Sjoerg return;
5147330f729Sjoerg }
5157330f729Sjoerg }
5167330f729Sjoerg
5177330f729Sjoerg // Exception safety requires us to destroy all the
5187330f729Sjoerg // already-constructed members if an initializer throws.
5197330f729Sjoerg // For that, we'll need an EH cleanup.
5207330f729Sjoerg QualType::DestructionKind dtorKind = elementType.isDestructedType();
5217330f729Sjoerg Address endOfInit = Address::invalid();
5227330f729Sjoerg EHScopeStack::stable_iterator cleanup;
5237330f729Sjoerg llvm::Instruction *cleanupDominator = nullptr;
5247330f729Sjoerg if (CGF.needsEHCleanup(dtorKind)) {
5257330f729Sjoerg // In principle we could tell the cleanup where we are more
5267330f729Sjoerg // directly, but the control flow can get so varied here that it
5277330f729Sjoerg // would actually be quite complex. Therefore we go through an
5287330f729Sjoerg // alloca.
5297330f729Sjoerg endOfInit = CGF.CreateTempAlloca(begin->getType(), CGF.getPointerAlign(),
5307330f729Sjoerg "arrayinit.endOfInit");
5317330f729Sjoerg cleanupDominator = Builder.CreateStore(begin, endOfInit);
5327330f729Sjoerg CGF.pushIrregularPartialArrayCleanup(begin, endOfInit, elementType,
5337330f729Sjoerg elementAlign,
5347330f729Sjoerg CGF.getDestroyer(dtorKind));
5357330f729Sjoerg cleanup = CGF.EHStack.stable_begin();
5367330f729Sjoerg
5377330f729Sjoerg // Otherwise, remember that we didn't need a cleanup.
5387330f729Sjoerg } else {
5397330f729Sjoerg dtorKind = QualType::DK_none;
5407330f729Sjoerg }
5417330f729Sjoerg
5427330f729Sjoerg llvm::Value *one = llvm::ConstantInt::get(CGF.SizeTy, 1);
5437330f729Sjoerg
5447330f729Sjoerg // The 'current element to initialize'. The invariants on this
5457330f729Sjoerg // variable are complicated. Essentially, after each iteration of
5467330f729Sjoerg // the loop, it points to the last initialized element, except
5477330f729Sjoerg // that it points to the beginning of the array before any
5487330f729Sjoerg // elements have been initialized.
5497330f729Sjoerg llvm::Value *element = begin;
5507330f729Sjoerg
5517330f729Sjoerg // Emit the explicit initializers.
5527330f729Sjoerg for (uint64_t i = 0; i != NumInitElements; ++i) {
5537330f729Sjoerg // Advance to the next element.
5547330f729Sjoerg if (i > 0) {
5557330f729Sjoerg element = Builder.CreateInBoundsGEP(element, one, "arrayinit.element");
5567330f729Sjoerg
5577330f729Sjoerg // Tell the cleanup that it needs to destroy up to this
5587330f729Sjoerg // element. TODO: some of these stores can be trivially
5597330f729Sjoerg // observed to be unnecessary.
5607330f729Sjoerg if (endOfInit.isValid()) Builder.CreateStore(element, endOfInit);
5617330f729Sjoerg }
5627330f729Sjoerg
5637330f729Sjoerg LValue elementLV =
5647330f729Sjoerg CGF.MakeAddrLValue(Address(element, elementAlign), elementType);
5657330f729Sjoerg EmitInitializationToLValue(E->getInit(i), elementLV);
5667330f729Sjoerg }
5677330f729Sjoerg
5687330f729Sjoerg // Check whether there's a non-trivial array-fill expression.
5697330f729Sjoerg Expr *filler = E->getArrayFiller();
5707330f729Sjoerg bool hasTrivialFiller = isTrivialFiller(filler);
5717330f729Sjoerg
5727330f729Sjoerg // Any remaining elements need to be zero-initialized, possibly
5737330f729Sjoerg // using the filler expression. We can skip this if the we're
5747330f729Sjoerg // emitting to zeroed memory.
5757330f729Sjoerg if (NumInitElements != NumArrayElements &&
5767330f729Sjoerg !(Dest.isZeroed() && hasTrivialFiller &&
5777330f729Sjoerg CGF.getTypes().isZeroInitializable(elementType))) {
5787330f729Sjoerg
5797330f729Sjoerg // Use an actual loop. This is basically
5807330f729Sjoerg // do { *array++ = filler; } while (array != end);
5817330f729Sjoerg
5827330f729Sjoerg // Advance to the start of the rest of the array.
5837330f729Sjoerg if (NumInitElements) {
5847330f729Sjoerg element = Builder.CreateInBoundsGEP(element, one, "arrayinit.start");
5857330f729Sjoerg if (endOfInit.isValid()) Builder.CreateStore(element, endOfInit);
5867330f729Sjoerg }
5877330f729Sjoerg
5887330f729Sjoerg // Compute the end of the array.
5897330f729Sjoerg llvm::Value *end = Builder.CreateInBoundsGEP(begin,
5907330f729Sjoerg llvm::ConstantInt::get(CGF.SizeTy, NumArrayElements),
5917330f729Sjoerg "arrayinit.end");
5927330f729Sjoerg
5937330f729Sjoerg llvm::BasicBlock *entryBB = Builder.GetInsertBlock();
5947330f729Sjoerg llvm::BasicBlock *bodyBB = CGF.createBasicBlock("arrayinit.body");
5957330f729Sjoerg
5967330f729Sjoerg // Jump into the body.
5977330f729Sjoerg CGF.EmitBlock(bodyBB);
5987330f729Sjoerg llvm::PHINode *currentElement =
5997330f729Sjoerg Builder.CreatePHI(element->getType(), 2, "arrayinit.cur");
6007330f729Sjoerg currentElement->addIncoming(element, entryBB);
6017330f729Sjoerg
6027330f729Sjoerg // Emit the actual filler expression.
6037330f729Sjoerg {
6047330f729Sjoerg // C++1z [class.temporary]p5:
6057330f729Sjoerg // when a default constructor is called to initialize an element of
6067330f729Sjoerg // an array with no corresponding initializer [...] the destruction of
6077330f729Sjoerg // every temporary created in a default argument is sequenced before
6087330f729Sjoerg // the construction of the next array element, if any
6097330f729Sjoerg CodeGenFunction::RunCleanupsScope CleanupsScope(CGF);
6107330f729Sjoerg LValue elementLV =
6117330f729Sjoerg CGF.MakeAddrLValue(Address(currentElement, elementAlign), elementType);
6127330f729Sjoerg if (filler)
6137330f729Sjoerg EmitInitializationToLValue(filler, elementLV);
6147330f729Sjoerg else
6157330f729Sjoerg EmitNullInitializationToLValue(elementLV);
6167330f729Sjoerg }
6177330f729Sjoerg
6187330f729Sjoerg // Move on to the next element.
6197330f729Sjoerg llvm::Value *nextElement =
6207330f729Sjoerg Builder.CreateInBoundsGEP(currentElement, one, "arrayinit.next");
6217330f729Sjoerg
6227330f729Sjoerg // Tell the EH cleanup that we finished with the last element.
6237330f729Sjoerg if (endOfInit.isValid()) Builder.CreateStore(nextElement, endOfInit);
6247330f729Sjoerg
6257330f729Sjoerg // Leave the loop if we're done.
6267330f729Sjoerg llvm::Value *done = Builder.CreateICmpEQ(nextElement, end,
6277330f729Sjoerg "arrayinit.done");
6287330f729Sjoerg llvm::BasicBlock *endBB = CGF.createBasicBlock("arrayinit.end");
6297330f729Sjoerg Builder.CreateCondBr(done, endBB, bodyBB);
6307330f729Sjoerg currentElement->addIncoming(nextElement, Builder.GetInsertBlock());
6317330f729Sjoerg
6327330f729Sjoerg CGF.EmitBlock(endBB);
6337330f729Sjoerg }
6347330f729Sjoerg
6357330f729Sjoerg // Leave the partial-array cleanup if we entered one.
6367330f729Sjoerg if (dtorKind) CGF.DeactivateCleanupBlock(cleanup, cleanupDominator);
6377330f729Sjoerg }
6387330f729Sjoerg
6397330f729Sjoerg //===----------------------------------------------------------------------===//
6407330f729Sjoerg // Visitor Methods
6417330f729Sjoerg //===----------------------------------------------------------------------===//
6427330f729Sjoerg
VisitMaterializeTemporaryExpr(MaterializeTemporaryExpr * E)6437330f729Sjoerg void AggExprEmitter::VisitMaterializeTemporaryExpr(MaterializeTemporaryExpr *E){
644*e038c9c4Sjoerg Visit(E->getSubExpr());
6457330f729Sjoerg }
6467330f729Sjoerg
VisitOpaqueValueExpr(OpaqueValueExpr * e)6477330f729Sjoerg void AggExprEmitter::VisitOpaqueValueExpr(OpaqueValueExpr *e) {
6487330f729Sjoerg // If this is a unique OVE, just visit its source expression.
6497330f729Sjoerg if (e->isUnique())
6507330f729Sjoerg Visit(e->getSourceExpr());
6517330f729Sjoerg else
6527330f729Sjoerg EmitFinalDestCopy(e->getType(), CGF.getOrCreateOpaqueLValueMapping(e));
6537330f729Sjoerg }
6547330f729Sjoerg
6557330f729Sjoerg void
VisitCompoundLiteralExpr(CompoundLiteralExpr * E)6567330f729Sjoerg AggExprEmitter::VisitCompoundLiteralExpr(CompoundLiteralExpr *E) {
6577330f729Sjoerg if (Dest.isPotentiallyAliased() &&
6587330f729Sjoerg E->getType().isPODType(CGF.getContext())) {
6597330f729Sjoerg // For a POD type, just emit a load of the lvalue + a copy, because our
6607330f729Sjoerg // compound literal might alias the destination.
6617330f729Sjoerg EmitAggLoadOfLValue(E);
6627330f729Sjoerg return;
6637330f729Sjoerg }
6647330f729Sjoerg
6657330f729Sjoerg AggValueSlot Slot = EnsureSlot(E->getType());
666*e038c9c4Sjoerg
667*e038c9c4Sjoerg // Block-scope compound literals are destroyed at the end of the enclosing
668*e038c9c4Sjoerg // scope in C.
669*e038c9c4Sjoerg bool Destruct =
670*e038c9c4Sjoerg !CGF.getLangOpts().CPlusPlus && !Slot.isExternallyDestructed();
671*e038c9c4Sjoerg if (Destruct)
672*e038c9c4Sjoerg Slot.setExternallyDestructed();
673*e038c9c4Sjoerg
6747330f729Sjoerg CGF.EmitAggExpr(E->getInitializer(), Slot);
675*e038c9c4Sjoerg
676*e038c9c4Sjoerg if (Destruct)
677*e038c9c4Sjoerg if (QualType::DestructionKind DtorKind = E->getType().isDestructedType())
678*e038c9c4Sjoerg CGF.pushLifetimeExtendedDestroy(
679*e038c9c4Sjoerg CGF.getCleanupKind(DtorKind), Slot.getAddress(), E->getType(),
680*e038c9c4Sjoerg CGF.getDestroyer(DtorKind), DtorKind & EHCleanup);
6817330f729Sjoerg }
6827330f729Sjoerg
6837330f729Sjoerg /// Attempt to look through various unimportant expressions to find a
6847330f729Sjoerg /// cast of the given kind.
findPeephole(Expr * op,CastKind kind,const ASTContext & ctx)685*e038c9c4Sjoerg static Expr *findPeephole(Expr *op, CastKind kind, const ASTContext &ctx) {
686*e038c9c4Sjoerg op = op->IgnoreParenNoopCasts(ctx);
687*e038c9c4Sjoerg if (auto castE = dyn_cast<CastExpr>(op)) {
6887330f729Sjoerg if (castE->getCastKind() == kind)
6897330f729Sjoerg return castE->getSubExpr();
6907330f729Sjoerg }
6917330f729Sjoerg return nullptr;
6927330f729Sjoerg }
6937330f729Sjoerg
VisitCastExpr(CastExpr * E)6947330f729Sjoerg void AggExprEmitter::VisitCastExpr(CastExpr *E) {
6957330f729Sjoerg if (const auto *ECE = dyn_cast<ExplicitCastExpr>(E))
6967330f729Sjoerg CGF.CGM.EmitExplicitCastExprType(ECE, &CGF);
6977330f729Sjoerg switch (E->getCastKind()) {
6987330f729Sjoerg case CK_Dynamic: {
6997330f729Sjoerg // FIXME: Can this actually happen? We have no test coverage for it.
7007330f729Sjoerg assert(isa<CXXDynamicCastExpr>(E) && "CK_Dynamic without a dynamic_cast?");
7017330f729Sjoerg LValue LV = CGF.EmitCheckedLValue(E->getSubExpr(),
7027330f729Sjoerg CodeGenFunction::TCK_Load);
7037330f729Sjoerg // FIXME: Do we also need to handle property references here?
7047330f729Sjoerg if (LV.isSimple())
705*e038c9c4Sjoerg CGF.EmitDynamicCast(LV.getAddress(CGF), cast<CXXDynamicCastExpr>(E));
7067330f729Sjoerg else
7077330f729Sjoerg CGF.CGM.ErrorUnsupported(E, "non-simple lvalue dynamic_cast");
7087330f729Sjoerg
7097330f729Sjoerg if (!Dest.isIgnored())
7107330f729Sjoerg CGF.CGM.ErrorUnsupported(E, "lvalue dynamic_cast with a destination");
7117330f729Sjoerg break;
7127330f729Sjoerg }
7137330f729Sjoerg
7147330f729Sjoerg case CK_ToUnion: {
7157330f729Sjoerg // Evaluate even if the destination is ignored.
7167330f729Sjoerg if (Dest.isIgnored()) {
7177330f729Sjoerg CGF.EmitAnyExpr(E->getSubExpr(), AggValueSlot::ignored(),
7187330f729Sjoerg /*ignoreResult=*/true);
7197330f729Sjoerg break;
7207330f729Sjoerg }
7217330f729Sjoerg
7227330f729Sjoerg // GCC union extension
7237330f729Sjoerg QualType Ty = E->getSubExpr()->getType();
7247330f729Sjoerg Address CastPtr =
7257330f729Sjoerg Builder.CreateElementBitCast(Dest.getAddress(), CGF.ConvertType(Ty));
7267330f729Sjoerg EmitInitializationToLValue(E->getSubExpr(),
7277330f729Sjoerg CGF.MakeAddrLValue(CastPtr, Ty));
7287330f729Sjoerg break;
7297330f729Sjoerg }
7307330f729Sjoerg
7317330f729Sjoerg case CK_LValueToRValueBitCast: {
7327330f729Sjoerg if (Dest.isIgnored()) {
7337330f729Sjoerg CGF.EmitAnyExpr(E->getSubExpr(), AggValueSlot::ignored(),
7347330f729Sjoerg /*ignoreResult=*/true);
7357330f729Sjoerg break;
7367330f729Sjoerg }
7377330f729Sjoerg
7387330f729Sjoerg LValue SourceLV = CGF.EmitLValue(E->getSubExpr());
7397330f729Sjoerg Address SourceAddress =
740*e038c9c4Sjoerg Builder.CreateElementBitCast(SourceLV.getAddress(CGF), CGF.Int8Ty);
7417330f729Sjoerg Address DestAddress =
7427330f729Sjoerg Builder.CreateElementBitCast(Dest.getAddress(), CGF.Int8Ty);
7437330f729Sjoerg llvm::Value *SizeVal = llvm::ConstantInt::get(
7447330f729Sjoerg CGF.SizeTy,
7457330f729Sjoerg CGF.getContext().getTypeSizeInChars(E->getType()).getQuantity());
7467330f729Sjoerg Builder.CreateMemCpy(DestAddress, SourceAddress, SizeVal);
7477330f729Sjoerg break;
7487330f729Sjoerg }
7497330f729Sjoerg
7507330f729Sjoerg case CK_DerivedToBase:
7517330f729Sjoerg case CK_BaseToDerived:
7527330f729Sjoerg case CK_UncheckedDerivedToBase: {
7537330f729Sjoerg llvm_unreachable("cannot perform hierarchy conversion in EmitAggExpr: "
7547330f729Sjoerg "should have been unpacked before we got here");
7557330f729Sjoerg }
7567330f729Sjoerg
7577330f729Sjoerg case CK_NonAtomicToAtomic:
7587330f729Sjoerg case CK_AtomicToNonAtomic: {
7597330f729Sjoerg bool isToAtomic = (E->getCastKind() == CK_NonAtomicToAtomic);
7607330f729Sjoerg
7617330f729Sjoerg // Determine the atomic and value types.
7627330f729Sjoerg QualType atomicType = E->getSubExpr()->getType();
7637330f729Sjoerg QualType valueType = E->getType();
7647330f729Sjoerg if (isToAtomic) std::swap(atomicType, valueType);
7657330f729Sjoerg
7667330f729Sjoerg assert(atomicType->isAtomicType());
7677330f729Sjoerg assert(CGF.getContext().hasSameUnqualifiedType(valueType,
7687330f729Sjoerg atomicType->castAs<AtomicType>()->getValueType()));
7697330f729Sjoerg
7707330f729Sjoerg // Just recurse normally if we're ignoring the result or the
7717330f729Sjoerg // atomic type doesn't change representation.
7727330f729Sjoerg if (Dest.isIgnored() || !CGF.CGM.isPaddedAtomicType(atomicType)) {
7737330f729Sjoerg return Visit(E->getSubExpr());
7747330f729Sjoerg }
7757330f729Sjoerg
7767330f729Sjoerg CastKind peepholeTarget =
7777330f729Sjoerg (isToAtomic ? CK_AtomicToNonAtomic : CK_NonAtomicToAtomic);
7787330f729Sjoerg
7797330f729Sjoerg // These two cases are reverses of each other; try to peephole them.
780*e038c9c4Sjoerg if (Expr *op =
781*e038c9c4Sjoerg findPeephole(E->getSubExpr(), peepholeTarget, CGF.getContext())) {
7827330f729Sjoerg assert(CGF.getContext().hasSameUnqualifiedType(op->getType(),
7837330f729Sjoerg E->getType()) &&
7847330f729Sjoerg "peephole significantly changed types?");
7857330f729Sjoerg return Visit(op);
7867330f729Sjoerg }
7877330f729Sjoerg
7887330f729Sjoerg // If we're converting an r-value of non-atomic type to an r-value
7897330f729Sjoerg // of atomic type, just emit directly into the relevant sub-object.
7907330f729Sjoerg if (isToAtomic) {
7917330f729Sjoerg AggValueSlot valueDest = Dest;
7927330f729Sjoerg if (!valueDest.isIgnored() && CGF.CGM.isPaddedAtomicType(atomicType)) {
7937330f729Sjoerg // Zero-initialize. (Strictly speaking, we only need to initialize
7947330f729Sjoerg // the padding at the end, but this is simpler.)
7957330f729Sjoerg if (!Dest.isZeroed())
7967330f729Sjoerg CGF.EmitNullInitialization(Dest.getAddress(), atomicType);
7977330f729Sjoerg
7987330f729Sjoerg // Build a GEP to refer to the subobject.
7997330f729Sjoerg Address valueAddr =
8007330f729Sjoerg CGF.Builder.CreateStructGEP(valueDest.getAddress(), 0);
8017330f729Sjoerg valueDest = AggValueSlot::forAddr(valueAddr,
8027330f729Sjoerg valueDest.getQualifiers(),
8037330f729Sjoerg valueDest.isExternallyDestructed(),
8047330f729Sjoerg valueDest.requiresGCollection(),
8057330f729Sjoerg valueDest.isPotentiallyAliased(),
8067330f729Sjoerg AggValueSlot::DoesNotOverlap,
8077330f729Sjoerg AggValueSlot::IsZeroed);
8087330f729Sjoerg }
8097330f729Sjoerg
8107330f729Sjoerg CGF.EmitAggExpr(E->getSubExpr(), valueDest);
8117330f729Sjoerg return;
8127330f729Sjoerg }
8137330f729Sjoerg
8147330f729Sjoerg // Otherwise, we're converting an atomic type to a non-atomic type.
8157330f729Sjoerg // Make an atomic temporary, emit into that, and then copy the value out.
8167330f729Sjoerg AggValueSlot atomicSlot =
8177330f729Sjoerg CGF.CreateAggTemp(atomicType, "atomic-to-nonatomic.temp");
8187330f729Sjoerg CGF.EmitAggExpr(E->getSubExpr(), atomicSlot);
8197330f729Sjoerg
8207330f729Sjoerg Address valueAddr = Builder.CreateStructGEP(atomicSlot.getAddress(), 0);
8217330f729Sjoerg RValue rvalue = RValue::getAggregate(valueAddr, atomicSlot.isVolatile());
8227330f729Sjoerg return EmitFinalDestCopy(valueType, rvalue);
8237330f729Sjoerg }
8247330f729Sjoerg case CK_AddressSpaceConversion:
8257330f729Sjoerg return Visit(E->getSubExpr());
8267330f729Sjoerg
8277330f729Sjoerg case CK_LValueToRValue:
8287330f729Sjoerg // If we're loading from a volatile type, force the destination
8297330f729Sjoerg // into existence.
8307330f729Sjoerg if (E->getSubExpr()->getType().isVolatileQualified()) {
831*e038c9c4Sjoerg bool Destruct =
832*e038c9c4Sjoerg !Dest.isExternallyDestructed() &&
833*e038c9c4Sjoerg E->getType().isDestructedType() == QualType::DK_nontrivial_c_struct;
834*e038c9c4Sjoerg if (Destruct)
835*e038c9c4Sjoerg Dest.setExternallyDestructed();
8367330f729Sjoerg EnsureDest(E->getType());
837*e038c9c4Sjoerg Visit(E->getSubExpr());
838*e038c9c4Sjoerg
839*e038c9c4Sjoerg if (Destruct)
840*e038c9c4Sjoerg CGF.pushDestroy(QualType::DK_nontrivial_c_struct, Dest.getAddress(),
841*e038c9c4Sjoerg E->getType());
842*e038c9c4Sjoerg
843*e038c9c4Sjoerg return;
8447330f729Sjoerg }
8457330f729Sjoerg
8467330f729Sjoerg LLVM_FALLTHROUGH;
8477330f729Sjoerg
8487330f729Sjoerg
8497330f729Sjoerg case CK_NoOp:
8507330f729Sjoerg case CK_UserDefinedConversion:
8517330f729Sjoerg case CK_ConstructorConversion:
8527330f729Sjoerg assert(CGF.getContext().hasSameUnqualifiedType(E->getSubExpr()->getType(),
8537330f729Sjoerg E->getType()) &&
8547330f729Sjoerg "Implicit cast types must be compatible");
8557330f729Sjoerg Visit(E->getSubExpr());
8567330f729Sjoerg break;
8577330f729Sjoerg
8587330f729Sjoerg case CK_LValueBitCast:
8597330f729Sjoerg llvm_unreachable("should not be emitting lvalue bitcast as rvalue");
8607330f729Sjoerg
8617330f729Sjoerg case CK_Dependent:
8627330f729Sjoerg case CK_BitCast:
8637330f729Sjoerg case CK_ArrayToPointerDecay:
8647330f729Sjoerg case CK_FunctionToPointerDecay:
8657330f729Sjoerg case CK_NullToPointer:
8667330f729Sjoerg case CK_NullToMemberPointer:
8677330f729Sjoerg case CK_BaseToDerivedMemberPointer:
8687330f729Sjoerg case CK_DerivedToBaseMemberPointer:
8697330f729Sjoerg case CK_MemberPointerToBoolean:
8707330f729Sjoerg case CK_ReinterpretMemberPointer:
8717330f729Sjoerg case CK_IntegralToPointer:
8727330f729Sjoerg case CK_PointerToIntegral:
8737330f729Sjoerg case CK_PointerToBoolean:
8747330f729Sjoerg case CK_ToVoid:
8757330f729Sjoerg case CK_VectorSplat:
8767330f729Sjoerg case CK_IntegralCast:
8777330f729Sjoerg case CK_BooleanToSignedIntegral:
8787330f729Sjoerg case CK_IntegralToBoolean:
8797330f729Sjoerg case CK_IntegralToFloating:
8807330f729Sjoerg case CK_FloatingToIntegral:
8817330f729Sjoerg case CK_FloatingToBoolean:
8827330f729Sjoerg case CK_FloatingCast:
8837330f729Sjoerg case CK_CPointerToObjCPointerCast:
8847330f729Sjoerg case CK_BlockPointerToObjCPointerCast:
8857330f729Sjoerg case CK_AnyPointerToBlockPointerCast:
8867330f729Sjoerg case CK_ObjCObjectLValueCast:
8877330f729Sjoerg case CK_FloatingRealToComplex:
8887330f729Sjoerg case CK_FloatingComplexToReal:
8897330f729Sjoerg case CK_FloatingComplexToBoolean:
8907330f729Sjoerg case CK_FloatingComplexCast:
8917330f729Sjoerg case CK_FloatingComplexToIntegralComplex:
8927330f729Sjoerg case CK_IntegralRealToComplex:
8937330f729Sjoerg case CK_IntegralComplexToReal:
8947330f729Sjoerg case CK_IntegralComplexToBoolean:
8957330f729Sjoerg case CK_IntegralComplexCast:
8967330f729Sjoerg case CK_IntegralComplexToFloatingComplex:
8977330f729Sjoerg case CK_ARCProduceObject:
8987330f729Sjoerg case CK_ARCConsumeObject:
8997330f729Sjoerg case CK_ARCReclaimReturnedObject:
9007330f729Sjoerg case CK_ARCExtendBlockObject:
9017330f729Sjoerg case CK_CopyAndAutoreleaseBlockObject:
9027330f729Sjoerg case CK_BuiltinFnToFnPtr:
9037330f729Sjoerg case CK_ZeroToOCLOpaqueType:
904*e038c9c4Sjoerg case CK_MatrixCast:
9057330f729Sjoerg
9067330f729Sjoerg case CK_IntToOCLSampler:
907*e038c9c4Sjoerg case CK_FloatingToFixedPoint:
908*e038c9c4Sjoerg case CK_FixedPointToFloating:
9097330f729Sjoerg case CK_FixedPointCast:
9107330f729Sjoerg case CK_FixedPointToBoolean:
9117330f729Sjoerg case CK_FixedPointToIntegral:
9127330f729Sjoerg case CK_IntegralToFixedPoint:
9137330f729Sjoerg llvm_unreachable("cast kind invalid for aggregate types");
9147330f729Sjoerg }
9157330f729Sjoerg }
9167330f729Sjoerg
VisitCallExpr(const CallExpr * E)9177330f729Sjoerg void AggExprEmitter::VisitCallExpr(const CallExpr *E) {
9187330f729Sjoerg if (E->getCallReturnType(CGF.getContext())->isReferenceType()) {
9197330f729Sjoerg EmitAggLoadOfLValue(E);
9207330f729Sjoerg return;
9217330f729Sjoerg }
9227330f729Sjoerg
9237330f729Sjoerg withReturnValueSlot(E, [&](ReturnValueSlot Slot) {
9247330f729Sjoerg return CGF.EmitCallExpr(E, Slot);
9257330f729Sjoerg });
9267330f729Sjoerg }
9277330f729Sjoerg
VisitObjCMessageExpr(ObjCMessageExpr * E)9287330f729Sjoerg void AggExprEmitter::VisitObjCMessageExpr(ObjCMessageExpr *E) {
9297330f729Sjoerg withReturnValueSlot(E, [&](ReturnValueSlot Slot) {
9307330f729Sjoerg return CGF.EmitObjCMessageExpr(E, Slot);
9317330f729Sjoerg });
9327330f729Sjoerg }
9337330f729Sjoerg
VisitBinComma(const BinaryOperator * E)9347330f729Sjoerg void AggExprEmitter::VisitBinComma(const BinaryOperator *E) {
9357330f729Sjoerg CGF.EmitIgnoredExpr(E->getLHS());
9367330f729Sjoerg Visit(E->getRHS());
9377330f729Sjoerg }
9387330f729Sjoerg
VisitStmtExpr(const StmtExpr * E)9397330f729Sjoerg void AggExprEmitter::VisitStmtExpr(const StmtExpr *E) {
9407330f729Sjoerg CodeGenFunction::StmtExprEvaluation eval(CGF);
9417330f729Sjoerg CGF.EmitCompoundStmt(*E->getSubStmt(), true, Dest);
9427330f729Sjoerg }
9437330f729Sjoerg
9447330f729Sjoerg enum CompareKind {
9457330f729Sjoerg CK_Less,
9467330f729Sjoerg CK_Greater,
9477330f729Sjoerg CK_Equal,
9487330f729Sjoerg };
9497330f729Sjoerg
EmitCompare(CGBuilderTy & Builder,CodeGenFunction & CGF,const BinaryOperator * E,llvm::Value * LHS,llvm::Value * RHS,CompareKind Kind,const char * NameSuffix="")9507330f729Sjoerg static llvm::Value *EmitCompare(CGBuilderTy &Builder, CodeGenFunction &CGF,
9517330f729Sjoerg const BinaryOperator *E, llvm::Value *LHS,
9527330f729Sjoerg llvm::Value *RHS, CompareKind Kind,
9537330f729Sjoerg const char *NameSuffix = "") {
9547330f729Sjoerg QualType ArgTy = E->getLHS()->getType();
9557330f729Sjoerg if (const ComplexType *CT = ArgTy->getAs<ComplexType>())
9567330f729Sjoerg ArgTy = CT->getElementType();
9577330f729Sjoerg
9587330f729Sjoerg if (const auto *MPT = ArgTy->getAs<MemberPointerType>()) {
9597330f729Sjoerg assert(Kind == CK_Equal &&
9607330f729Sjoerg "member pointers may only be compared for equality");
9617330f729Sjoerg return CGF.CGM.getCXXABI().EmitMemberPointerComparison(
9627330f729Sjoerg CGF, LHS, RHS, MPT, /*IsInequality*/ false);
9637330f729Sjoerg }
9647330f729Sjoerg
9657330f729Sjoerg // Compute the comparison instructions for the specified comparison kind.
9667330f729Sjoerg struct CmpInstInfo {
9677330f729Sjoerg const char *Name;
9687330f729Sjoerg llvm::CmpInst::Predicate FCmp;
9697330f729Sjoerg llvm::CmpInst::Predicate SCmp;
9707330f729Sjoerg llvm::CmpInst::Predicate UCmp;
9717330f729Sjoerg };
9727330f729Sjoerg CmpInstInfo InstInfo = [&]() -> CmpInstInfo {
9737330f729Sjoerg using FI = llvm::FCmpInst;
9747330f729Sjoerg using II = llvm::ICmpInst;
9757330f729Sjoerg switch (Kind) {
9767330f729Sjoerg case CK_Less:
9777330f729Sjoerg return {"cmp.lt", FI::FCMP_OLT, II::ICMP_SLT, II::ICMP_ULT};
9787330f729Sjoerg case CK_Greater:
9797330f729Sjoerg return {"cmp.gt", FI::FCMP_OGT, II::ICMP_SGT, II::ICMP_UGT};
9807330f729Sjoerg case CK_Equal:
9817330f729Sjoerg return {"cmp.eq", FI::FCMP_OEQ, II::ICMP_EQ, II::ICMP_EQ};
9827330f729Sjoerg }
9837330f729Sjoerg llvm_unreachable("Unrecognised CompareKind enum");
9847330f729Sjoerg }();
9857330f729Sjoerg
9867330f729Sjoerg if (ArgTy->hasFloatingRepresentation())
9877330f729Sjoerg return Builder.CreateFCmp(InstInfo.FCmp, LHS, RHS,
9887330f729Sjoerg llvm::Twine(InstInfo.Name) + NameSuffix);
9897330f729Sjoerg if (ArgTy->isIntegralOrEnumerationType() || ArgTy->isPointerType()) {
9907330f729Sjoerg auto Inst =
9917330f729Sjoerg ArgTy->hasSignedIntegerRepresentation() ? InstInfo.SCmp : InstInfo.UCmp;
9927330f729Sjoerg return Builder.CreateICmp(Inst, LHS, RHS,
9937330f729Sjoerg llvm::Twine(InstInfo.Name) + NameSuffix);
9947330f729Sjoerg }
9957330f729Sjoerg
9967330f729Sjoerg llvm_unreachable("unsupported aggregate binary expression should have "
9977330f729Sjoerg "already been handled");
9987330f729Sjoerg }
9997330f729Sjoerg
VisitBinCmp(const BinaryOperator * E)10007330f729Sjoerg void AggExprEmitter::VisitBinCmp(const BinaryOperator *E) {
10017330f729Sjoerg using llvm::BasicBlock;
10027330f729Sjoerg using llvm::PHINode;
10037330f729Sjoerg using llvm::Value;
10047330f729Sjoerg assert(CGF.getContext().hasSameType(E->getLHS()->getType(),
10057330f729Sjoerg E->getRHS()->getType()));
10067330f729Sjoerg const ComparisonCategoryInfo &CmpInfo =
10077330f729Sjoerg CGF.getContext().CompCategories.getInfoForType(E->getType());
10087330f729Sjoerg assert(CmpInfo.Record->isTriviallyCopyable() &&
10097330f729Sjoerg "cannot copy non-trivially copyable aggregate");
10107330f729Sjoerg
10117330f729Sjoerg QualType ArgTy = E->getLHS()->getType();
10127330f729Sjoerg
10137330f729Sjoerg if (!ArgTy->isIntegralOrEnumerationType() && !ArgTy->isRealFloatingType() &&
10147330f729Sjoerg !ArgTy->isNullPtrType() && !ArgTy->isPointerType() &&
10157330f729Sjoerg !ArgTy->isMemberPointerType() && !ArgTy->isAnyComplexType()) {
10167330f729Sjoerg return CGF.ErrorUnsupported(E, "aggregate three-way comparison");
10177330f729Sjoerg }
10187330f729Sjoerg bool IsComplex = ArgTy->isAnyComplexType();
10197330f729Sjoerg
10207330f729Sjoerg // Evaluate the operands to the expression and extract their values.
10217330f729Sjoerg auto EmitOperand = [&](Expr *E) -> std::pair<Value *, Value *> {
10227330f729Sjoerg RValue RV = CGF.EmitAnyExpr(E);
10237330f729Sjoerg if (RV.isScalar())
10247330f729Sjoerg return {RV.getScalarVal(), nullptr};
10257330f729Sjoerg if (RV.isAggregate())
10267330f729Sjoerg return {RV.getAggregatePointer(), nullptr};
10277330f729Sjoerg assert(RV.isComplex());
10287330f729Sjoerg return RV.getComplexVal();
10297330f729Sjoerg };
10307330f729Sjoerg auto LHSValues = EmitOperand(E->getLHS()),
10317330f729Sjoerg RHSValues = EmitOperand(E->getRHS());
10327330f729Sjoerg
10337330f729Sjoerg auto EmitCmp = [&](CompareKind K) {
10347330f729Sjoerg Value *Cmp = EmitCompare(Builder, CGF, E, LHSValues.first, RHSValues.first,
10357330f729Sjoerg K, IsComplex ? ".r" : "");
10367330f729Sjoerg if (!IsComplex)
10377330f729Sjoerg return Cmp;
10387330f729Sjoerg assert(K == CompareKind::CK_Equal);
10397330f729Sjoerg Value *CmpImag = EmitCompare(Builder, CGF, E, LHSValues.second,
10407330f729Sjoerg RHSValues.second, K, ".i");
10417330f729Sjoerg return Builder.CreateAnd(Cmp, CmpImag, "and.eq");
10427330f729Sjoerg };
10437330f729Sjoerg auto EmitCmpRes = [&](const ComparisonCategoryInfo::ValueInfo *VInfo) {
10447330f729Sjoerg return Builder.getInt(VInfo->getIntValue());
10457330f729Sjoerg };
10467330f729Sjoerg
10477330f729Sjoerg Value *Select;
10487330f729Sjoerg if (ArgTy->isNullPtrType()) {
10497330f729Sjoerg Select = EmitCmpRes(CmpInfo.getEqualOrEquiv());
10507330f729Sjoerg } else if (!CmpInfo.isPartial()) {
10517330f729Sjoerg Value *SelectOne =
10527330f729Sjoerg Builder.CreateSelect(EmitCmp(CK_Less), EmitCmpRes(CmpInfo.getLess()),
10537330f729Sjoerg EmitCmpRes(CmpInfo.getGreater()), "sel.lt");
10547330f729Sjoerg Select = Builder.CreateSelect(EmitCmp(CK_Equal),
10557330f729Sjoerg EmitCmpRes(CmpInfo.getEqualOrEquiv()),
10567330f729Sjoerg SelectOne, "sel.eq");
10577330f729Sjoerg } else {
10587330f729Sjoerg Value *SelectEq = Builder.CreateSelect(
10597330f729Sjoerg EmitCmp(CK_Equal), EmitCmpRes(CmpInfo.getEqualOrEquiv()),
10607330f729Sjoerg EmitCmpRes(CmpInfo.getUnordered()), "sel.eq");
10617330f729Sjoerg Value *SelectGT = Builder.CreateSelect(EmitCmp(CK_Greater),
10627330f729Sjoerg EmitCmpRes(CmpInfo.getGreater()),
10637330f729Sjoerg SelectEq, "sel.gt");
10647330f729Sjoerg Select = Builder.CreateSelect(
10657330f729Sjoerg EmitCmp(CK_Less), EmitCmpRes(CmpInfo.getLess()), SelectGT, "sel.lt");
10667330f729Sjoerg }
10677330f729Sjoerg // Create the return value in the destination slot.
10687330f729Sjoerg EnsureDest(E->getType());
10697330f729Sjoerg LValue DestLV = CGF.MakeAddrLValue(Dest.getAddress(), E->getType());
10707330f729Sjoerg
10717330f729Sjoerg // Emit the address of the first (and only) field in the comparison category
10727330f729Sjoerg // type, and initialize it from the constant integer value selected above.
10737330f729Sjoerg LValue FieldLV = CGF.EmitLValueForFieldInitialization(
10747330f729Sjoerg DestLV, *CmpInfo.Record->field_begin());
10757330f729Sjoerg CGF.EmitStoreThroughLValue(RValue::get(Select), FieldLV, /*IsInit*/ true);
10767330f729Sjoerg
10777330f729Sjoerg // All done! The result is in the Dest slot.
10787330f729Sjoerg }
10797330f729Sjoerg
VisitBinaryOperator(const BinaryOperator * E)10807330f729Sjoerg void AggExprEmitter::VisitBinaryOperator(const BinaryOperator *E) {
10817330f729Sjoerg if (E->getOpcode() == BO_PtrMemD || E->getOpcode() == BO_PtrMemI)
10827330f729Sjoerg VisitPointerToDataMemberBinaryOperator(E);
10837330f729Sjoerg else
10847330f729Sjoerg CGF.ErrorUnsupported(E, "aggregate binary expression");
10857330f729Sjoerg }
10867330f729Sjoerg
VisitPointerToDataMemberBinaryOperator(const BinaryOperator * E)10877330f729Sjoerg void AggExprEmitter::VisitPointerToDataMemberBinaryOperator(
10887330f729Sjoerg const BinaryOperator *E) {
10897330f729Sjoerg LValue LV = CGF.EmitPointerToDataMemberBinaryExpr(E);
10907330f729Sjoerg EmitFinalDestCopy(E->getType(), LV);
10917330f729Sjoerg }
10927330f729Sjoerg
10937330f729Sjoerg /// Is the value of the given expression possibly a reference to or
10947330f729Sjoerg /// into a __block variable?
isBlockVarRef(const Expr * E)10957330f729Sjoerg static bool isBlockVarRef(const Expr *E) {
10967330f729Sjoerg // Make sure we look through parens.
10977330f729Sjoerg E = E->IgnoreParens();
10987330f729Sjoerg
10997330f729Sjoerg // Check for a direct reference to a __block variable.
11007330f729Sjoerg if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(E)) {
11017330f729Sjoerg const VarDecl *var = dyn_cast<VarDecl>(DRE->getDecl());
11027330f729Sjoerg return (var && var->hasAttr<BlocksAttr>());
11037330f729Sjoerg }
11047330f729Sjoerg
11057330f729Sjoerg // More complicated stuff.
11067330f729Sjoerg
11077330f729Sjoerg // Binary operators.
11087330f729Sjoerg if (const BinaryOperator *op = dyn_cast<BinaryOperator>(E)) {
11097330f729Sjoerg // For an assignment or pointer-to-member operation, just care
11107330f729Sjoerg // about the LHS.
11117330f729Sjoerg if (op->isAssignmentOp() || op->isPtrMemOp())
11127330f729Sjoerg return isBlockVarRef(op->getLHS());
11137330f729Sjoerg
11147330f729Sjoerg // For a comma, just care about the RHS.
11157330f729Sjoerg if (op->getOpcode() == BO_Comma)
11167330f729Sjoerg return isBlockVarRef(op->getRHS());
11177330f729Sjoerg
11187330f729Sjoerg // FIXME: pointer arithmetic?
11197330f729Sjoerg return false;
11207330f729Sjoerg
11217330f729Sjoerg // Check both sides of a conditional operator.
11227330f729Sjoerg } else if (const AbstractConditionalOperator *op
11237330f729Sjoerg = dyn_cast<AbstractConditionalOperator>(E)) {
11247330f729Sjoerg return isBlockVarRef(op->getTrueExpr())
11257330f729Sjoerg || isBlockVarRef(op->getFalseExpr());
11267330f729Sjoerg
11277330f729Sjoerg // OVEs are required to support BinaryConditionalOperators.
11287330f729Sjoerg } else if (const OpaqueValueExpr *op
11297330f729Sjoerg = dyn_cast<OpaqueValueExpr>(E)) {
11307330f729Sjoerg if (const Expr *src = op->getSourceExpr())
11317330f729Sjoerg return isBlockVarRef(src);
11327330f729Sjoerg
11337330f729Sjoerg // Casts are necessary to get things like (*(int*)&var) = foo().
11347330f729Sjoerg // We don't really care about the kind of cast here, except
11357330f729Sjoerg // we don't want to look through l2r casts, because it's okay
11367330f729Sjoerg // to get the *value* in a __block variable.
11377330f729Sjoerg } else if (const CastExpr *cast = dyn_cast<CastExpr>(E)) {
11387330f729Sjoerg if (cast->getCastKind() == CK_LValueToRValue)
11397330f729Sjoerg return false;
11407330f729Sjoerg return isBlockVarRef(cast->getSubExpr());
11417330f729Sjoerg
11427330f729Sjoerg // Handle unary operators. Again, just aggressively look through
11437330f729Sjoerg // it, ignoring the operation.
11447330f729Sjoerg } else if (const UnaryOperator *uop = dyn_cast<UnaryOperator>(E)) {
11457330f729Sjoerg return isBlockVarRef(uop->getSubExpr());
11467330f729Sjoerg
11477330f729Sjoerg // Look into the base of a field access.
11487330f729Sjoerg } else if (const MemberExpr *mem = dyn_cast<MemberExpr>(E)) {
11497330f729Sjoerg return isBlockVarRef(mem->getBase());
11507330f729Sjoerg
11517330f729Sjoerg // Look into the base of a subscript.
11527330f729Sjoerg } else if (const ArraySubscriptExpr *sub = dyn_cast<ArraySubscriptExpr>(E)) {
11537330f729Sjoerg return isBlockVarRef(sub->getBase());
11547330f729Sjoerg }
11557330f729Sjoerg
11567330f729Sjoerg return false;
11577330f729Sjoerg }
11587330f729Sjoerg
VisitBinAssign(const BinaryOperator * E)11597330f729Sjoerg void AggExprEmitter::VisitBinAssign(const BinaryOperator *E) {
11607330f729Sjoerg // For an assignment to work, the value on the right has
11617330f729Sjoerg // to be compatible with the value on the left.
11627330f729Sjoerg assert(CGF.getContext().hasSameUnqualifiedType(E->getLHS()->getType(),
11637330f729Sjoerg E->getRHS()->getType())
11647330f729Sjoerg && "Invalid assignment");
11657330f729Sjoerg
11667330f729Sjoerg // If the LHS might be a __block variable, and the RHS can
11677330f729Sjoerg // potentially cause a block copy, we need to evaluate the RHS first
11687330f729Sjoerg // so that the assignment goes the right place.
11697330f729Sjoerg // This is pretty semantically fragile.
11707330f729Sjoerg if (isBlockVarRef(E->getLHS()) &&
11717330f729Sjoerg E->getRHS()->HasSideEffects(CGF.getContext())) {
11727330f729Sjoerg // Ensure that we have a destination, and evaluate the RHS into that.
11737330f729Sjoerg EnsureDest(E->getRHS()->getType());
11747330f729Sjoerg Visit(E->getRHS());
11757330f729Sjoerg
11767330f729Sjoerg // Now emit the LHS and copy into it.
11777330f729Sjoerg LValue LHS = CGF.EmitCheckedLValue(E->getLHS(), CodeGenFunction::TCK_Store);
11787330f729Sjoerg
11797330f729Sjoerg // That copy is an atomic copy if the LHS is atomic.
11807330f729Sjoerg if (LHS.getType()->isAtomicType() ||
11817330f729Sjoerg CGF.LValueIsSuitableForInlineAtomic(LHS)) {
11827330f729Sjoerg CGF.EmitAtomicStore(Dest.asRValue(), LHS, /*isInit*/ false);
11837330f729Sjoerg return;
11847330f729Sjoerg }
11857330f729Sjoerg
11867330f729Sjoerg EmitCopy(E->getLHS()->getType(),
1187*e038c9c4Sjoerg AggValueSlot::forLValue(LHS, CGF, AggValueSlot::IsDestructed,
11887330f729Sjoerg needsGC(E->getLHS()->getType()),
11897330f729Sjoerg AggValueSlot::IsAliased,
11907330f729Sjoerg AggValueSlot::MayOverlap),
11917330f729Sjoerg Dest);
11927330f729Sjoerg return;
11937330f729Sjoerg }
11947330f729Sjoerg
11957330f729Sjoerg LValue LHS = CGF.EmitLValue(E->getLHS());
11967330f729Sjoerg
11977330f729Sjoerg // If we have an atomic type, evaluate into the destination and then
11987330f729Sjoerg // do an atomic copy.
11997330f729Sjoerg if (LHS.getType()->isAtomicType() ||
12007330f729Sjoerg CGF.LValueIsSuitableForInlineAtomic(LHS)) {
12017330f729Sjoerg EnsureDest(E->getRHS()->getType());
12027330f729Sjoerg Visit(E->getRHS());
12037330f729Sjoerg CGF.EmitAtomicStore(Dest.asRValue(), LHS, /*isInit*/ false);
12047330f729Sjoerg return;
12057330f729Sjoerg }
12067330f729Sjoerg
12077330f729Sjoerg // Codegen the RHS so that it stores directly into the LHS.
1208*e038c9c4Sjoerg AggValueSlot LHSSlot = AggValueSlot::forLValue(
1209*e038c9c4Sjoerg LHS, CGF, AggValueSlot::IsDestructed, needsGC(E->getLHS()->getType()),
1210*e038c9c4Sjoerg AggValueSlot::IsAliased, AggValueSlot::MayOverlap);
12117330f729Sjoerg // A non-volatile aggregate destination might have volatile member.
12127330f729Sjoerg if (!LHSSlot.isVolatile() &&
12137330f729Sjoerg CGF.hasVolatileMember(E->getLHS()->getType()))
12147330f729Sjoerg LHSSlot.setVolatile(true);
12157330f729Sjoerg
12167330f729Sjoerg CGF.EmitAggExpr(E->getRHS(), LHSSlot);
12177330f729Sjoerg
12187330f729Sjoerg // Copy into the destination if the assignment isn't ignored.
12197330f729Sjoerg EmitFinalDestCopy(E->getType(), LHS);
1220*e038c9c4Sjoerg
1221*e038c9c4Sjoerg if (!Dest.isIgnored() && !Dest.isExternallyDestructed() &&
1222*e038c9c4Sjoerg E->getType().isDestructedType() == QualType::DK_nontrivial_c_struct)
1223*e038c9c4Sjoerg CGF.pushDestroy(QualType::DK_nontrivial_c_struct, Dest.getAddress(),
1224*e038c9c4Sjoerg E->getType());
12257330f729Sjoerg }
12267330f729Sjoerg
12277330f729Sjoerg void AggExprEmitter::
VisitAbstractConditionalOperator(const AbstractConditionalOperator * E)12287330f729Sjoerg VisitAbstractConditionalOperator(const AbstractConditionalOperator *E) {
12297330f729Sjoerg llvm::BasicBlock *LHSBlock = CGF.createBasicBlock("cond.true");
12307330f729Sjoerg llvm::BasicBlock *RHSBlock = CGF.createBasicBlock("cond.false");
12317330f729Sjoerg llvm::BasicBlock *ContBlock = CGF.createBasicBlock("cond.end");
12327330f729Sjoerg
12337330f729Sjoerg // Bind the common expression if necessary.
12347330f729Sjoerg CodeGenFunction::OpaqueValueMapping binding(CGF, E);
12357330f729Sjoerg
12367330f729Sjoerg CodeGenFunction::ConditionalEvaluation eval(CGF);
12377330f729Sjoerg CGF.EmitBranchOnBoolExpr(E->getCond(), LHSBlock, RHSBlock,
12387330f729Sjoerg CGF.getProfileCount(E));
12397330f729Sjoerg
12407330f729Sjoerg // Save whether the destination's lifetime is externally managed.
12417330f729Sjoerg bool isExternallyDestructed = Dest.isExternallyDestructed();
1242*e038c9c4Sjoerg bool destructNonTrivialCStruct =
1243*e038c9c4Sjoerg !isExternallyDestructed &&
1244*e038c9c4Sjoerg E->getType().isDestructedType() == QualType::DK_nontrivial_c_struct;
1245*e038c9c4Sjoerg isExternallyDestructed |= destructNonTrivialCStruct;
1246*e038c9c4Sjoerg Dest.setExternallyDestructed(isExternallyDestructed);
12477330f729Sjoerg
12487330f729Sjoerg eval.begin(CGF);
12497330f729Sjoerg CGF.EmitBlock(LHSBlock);
12507330f729Sjoerg CGF.incrementProfileCounter(E);
12517330f729Sjoerg Visit(E->getTrueExpr());
12527330f729Sjoerg eval.end(CGF);
12537330f729Sjoerg
12547330f729Sjoerg assert(CGF.HaveInsertPoint() && "expression evaluation ended with no IP!");
12557330f729Sjoerg CGF.Builder.CreateBr(ContBlock);
12567330f729Sjoerg
12577330f729Sjoerg // If the result of an agg expression is unused, then the emission
12587330f729Sjoerg // of the LHS might need to create a destination slot. That's fine
12597330f729Sjoerg // with us, and we can safely emit the RHS into the same slot, but
12607330f729Sjoerg // we shouldn't claim that it's already being destructed.
12617330f729Sjoerg Dest.setExternallyDestructed(isExternallyDestructed);
12627330f729Sjoerg
12637330f729Sjoerg eval.begin(CGF);
12647330f729Sjoerg CGF.EmitBlock(RHSBlock);
12657330f729Sjoerg Visit(E->getFalseExpr());
12667330f729Sjoerg eval.end(CGF);
12677330f729Sjoerg
1268*e038c9c4Sjoerg if (destructNonTrivialCStruct)
1269*e038c9c4Sjoerg CGF.pushDestroy(QualType::DK_nontrivial_c_struct, Dest.getAddress(),
1270*e038c9c4Sjoerg E->getType());
1271*e038c9c4Sjoerg
12727330f729Sjoerg CGF.EmitBlock(ContBlock);
12737330f729Sjoerg }
12747330f729Sjoerg
VisitChooseExpr(const ChooseExpr * CE)12757330f729Sjoerg void AggExprEmitter::VisitChooseExpr(const ChooseExpr *CE) {
12767330f729Sjoerg Visit(CE->getChosenSubExpr());
12777330f729Sjoerg }
12787330f729Sjoerg
VisitVAArgExpr(VAArgExpr * VE)12797330f729Sjoerg void AggExprEmitter::VisitVAArgExpr(VAArgExpr *VE) {
12807330f729Sjoerg Address ArgValue = Address::invalid();
12817330f729Sjoerg Address ArgPtr = CGF.EmitVAArg(VE, ArgValue);
12827330f729Sjoerg
12837330f729Sjoerg // If EmitVAArg fails, emit an error.
12847330f729Sjoerg if (!ArgPtr.isValid()) {
12857330f729Sjoerg CGF.ErrorUnsupported(VE, "aggregate va_arg expression");
12867330f729Sjoerg return;
12877330f729Sjoerg }
12887330f729Sjoerg
12897330f729Sjoerg EmitFinalDestCopy(VE->getType(), CGF.MakeAddrLValue(ArgPtr, VE->getType()));
12907330f729Sjoerg }
12917330f729Sjoerg
VisitCXXBindTemporaryExpr(CXXBindTemporaryExpr * E)12927330f729Sjoerg void AggExprEmitter::VisitCXXBindTemporaryExpr(CXXBindTemporaryExpr *E) {
12937330f729Sjoerg // Ensure that we have a slot, but if we already do, remember
12947330f729Sjoerg // whether it was externally destructed.
12957330f729Sjoerg bool wasExternallyDestructed = Dest.isExternallyDestructed();
12967330f729Sjoerg EnsureDest(E->getType());
12977330f729Sjoerg
12987330f729Sjoerg // We're going to push a destructor if there isn't already one.
12997330f729Sjoerg Dest.setExternallyDestructed();
13007330f729Sjoerg
13017330f729Sjoerg Visit(E->getSubExpr());
13027330f729Sjoerg
13037330f729Sjoerg // Push that destructor we promised.
13047330f729Sjoerg if (!wasExternallyDestructed)
13057330f729Sjoerg CGF.EmitCXXTemporary(E->getTemporary(), E->getType(), Dest.getAddress());
13067330f729Sjoerg }
13077330f729Sjoerg
13087330f729Sjoerg void
VisitCXXConstructExpr(const CXXConstructExpr * E)13097330f729Sjoerg AggExprEmitter::VisitCXXConstructExpr(const CXXConstructExpr *E) {
13107330f729Sjoerg AggValueSlot Slot = EnsureSlot(E->getType());
13117330f729Sjoerg CGF.EmitCXXConstructExpr(E, Slot);
13127330f729Sjoerg }
13137330f729Sjoerg
VisitCXXInheritedCtorInitExpr(const CXXInheritedCtorInitExpr * E)13147330f729Sjoerg void AggExprEmitter::VisitCXXInheritedCtorInitExpr(
13157330f729Sjoerg const CXXInheritedCtorInitExpr *E) {
13167330f729Sjoerg AggValueSlot Slot = EnsureSlot(E->getType());
13177330f729Sjoerg CGF.EmitInheritedCXXConstructorCall(
13187330f729Sjoerg E->getConstructor(), E->constructsVBase(), Slot.getAddress(),
13197330f729Sjoerg E->inheritedFromVBase(), E);
13207330f729Sjoerg }
13217330f729Sjoerg
13227330f729Sjoerg void
VisitLambdaExpr(LambdaExpr * E)13237330f729Sjoerg AggExprEmitter::VisitLambdaExpr(LambdaExpr *E) {
13247330f729Sjoerg AggValueSlot Slot = EnsureSlot(E->getType());
13257330f729Sjoerg LValue SlotLV = CGF.MakeAddrLValue(Slot.getAddress(), E->getType());
13267330f729Sjoerg
13277330f729Sjoerg // We'll need to enter cleanup scopes in case any of the element
13287330f729Sjoerg // initializers throws an exception.
13297330f729Sjoerg SmallVector<EHScopeStack::stable_iterator, 16> Cleanups;
13307330f729Sjoerg llvm::Instruction *CleanupDominator = nullptr;
13317330f729Sjoerg
13327330f729Sjoerg CXXRecordDecl::field_iterator CurField = E->getLambdaClass()->field_begin();
13337330f729Sjoerg for (LambdaExpr::const_capture_init_iterator i = E->capture_init_begin(),
13347330f729Sjoerg e = E->capture_init_end();
13357330f729Sjoerg i != e; ++i, ++CurField) {
13367330f729Sjoerg // Emit initialization
13377330f729Sjoerg LValue LV = CGF.EmitLValueForFieldInitialization(SlotLV, *CurField);
13387330f729Sjoerg if (CurField->hasCapturedVLAType()) {
13397330f729Sjoerg CGF.EmitLambdaVLACapture(CurField->getCapturedVLAType(), LV);
13407330f729Sjoerg continue;
13417330f729Sjoerg }
13427330f729Sjoerg
13437330f729Sjoerg EmitInitializationToLValue(*i, LV);
13447330f729Sjoerg
13457330f729Sjoerg // Push a destructor if necessary.
13467330f729Sjoerg if (QualType::DestructionKind DtorKind =
13477330f729Sjoerg CurField->getType().isDestructedType()) {
13487330f729Sjoerg assert(LV.isSimple());
13497330f729Sjoerg if (CGF.needsEHCleanup(DtorKind)) {
13507330f729Sjoerg if (!CleanupDominator)
13517330f729Sjoerg CleanupDominator = CGF.Builder.CreateAlignedLoad(
13527330f729Sjoerg CGF.Int8Ty,
13537330f729Sjoerg llvm::Constant::getNullValue(CGF.Int8PtrTy),
13547330f729Sjoerg CharUnits::One()); // placeholder
13557330f729Sjoerg
1356*e038c9c4Sjoerg CGF.pushDestroy(EHCleanup, LV.getAddress(CGF), CurField->getType(),
13577330f729Sjoerg CGF.getDestroyer(DtorKind), false);
13587330f729Sjoerg Cleanups.push_back(CGF.EHStack.stable_begin());
13597330f729Sjoerg }
13607330f729Sjoerg }
13617330f729Sjoerg }
13627330f729Sjoerg
13637330f729Sjoerg // Deactivate all the partial cleanups in reverse order, which
13647330f729Sjoerg // generally means popping them.
13657330f729Sjoerg for (unsigned i = Cleanups.size(); i != 0; --i)
13667330f729Sjoerg CGF.DeactivateCleanupBlock(Cleanups[i-1], CleanupDominator);
13677330f729Sjoerg
13687330f729Sjoerg // Destroy the placeholder if we made one.
13697330f729Sjoerg if (CleanupDominator)
13707330f729Sjoerg CleanupDominator->eraseFromParent();
13717330f729Sjoerg }
13727330f729Sjoerg
VisitExprWithCleanups(ExprWithCleanups * E)13737330f729Sjoerg void AggExprEmitter::VisitExprWithCleanups(ExprWithCleanups *E) {
13747330f729Sjoerg CodeGenFunction::RunCleanupsScope cleanups(CGF);
13757330f729Sjoerg Visit(E->getSubExpr());
13767330f729Sjoerg }
13777330f729Sjoerg
VisitCXXScalarValueInitExpr(CXXScalarValueInitExpr * E)13787330f729Sjoerg void AggExprEmitter::VisitCXXScalarValueInitExpr(CXXScalarValueInitExpr *E) {
13797330f729Sjoerg QualType T = E->getType();
13807330f729Sjoerg AggValueSlot Slot = EnsureSlot(T);
13817330f729Sjoerg EmitNullInitializationToLValue(CGF.MakeAddrLValue(Slot.getAddress(), T));
13827330f729Sjoerg }
13837330f729Sjoerg
VisitImplicitValueInitExpr(ImplicitValueInitExpr * E)13847330f729Sjoerg void AggExprEmitter::VisitImplicitValueInitExpr(ImplicitValueInitExpr *E) {
13857330f729Sjoerg QualType T = E->getType();
13867330f729Sjoerg AggValueSlot Slot = EnsureSlot(T);
13877330f729Sjoerg EmitNullInitializationToLValue(CGF.MakeAddrLValue(Slot.getAddress(), T));
13887330f729Sjoerg }
13897330f729Sjoerg
1390*e038c9c4Sjoerg /// Determine whether the given cast kind is known to always convert values
1391*e038c9c4Sjoerg /// with all zero bits in their value representation to values with all zero
1392*e038c9c4Sjoerg /// bits in their value representation.
castPreservesZero(const CastExpr * CE)1393*e038c9c4Sjoerg static bool castPreservesZero(const CastExpr *CE) {
1394*e038c9c4Sjoerg switch (CE->getCastKind()) {
1395*e038c9c4Sjoerg // No-ops.
1396*e038c9c4Sjoerg case CK_NoOp:
1397*e038c9c4Sjoerg case CK_UserDefinedConversion:
1398*e038c9c4Sjoerg case CK_ConstructorConversion:
1399*e038c9c4Sjoerg case CK_BitCast:
1400*e038c9c4Sjoerg case CK_ToUnion:
1401*e038c9c4Sjoerg case CK_ToVoid:
1402*e038c9c4Sjoerg // Conversions between (possibly-complex) integral, (possibly-complex)
1403*e038c9c4Sjoerg // floating-point, and bool.
1404*e038c9c4Sjoerg case CK_BooleanToSignedIntegral:
1405*e038c9c4Sjoerg case CK_FloatingCast:
1406*e038c9c4Sjoerg case CK_FloatingComplexCast:
1407*e038c9c4Sjoerg case CK_FloatingComplexToBoolean:
1408*e038c9c4Sjoerg case CK_FloatingComplexToIntegralComplex:
1409*e038c9c4Sjoerg case CK_FloatingComplexToReal:
1410*e038c9c4Sjoerg case CK_FloatingRealToComplex:
1411*e038c9c4Sjoerg case CK_FloatingToBoolean:
1412*e038c9c4Sjoerg case CK_FloatingToIntegral:
1413*e038c9c4Sjoerg case CK_IntegralCast:
1414*e038c9c4Sjoerg case CK_IntegralComplexCast:
1415*e038c9c4Sjoerg case CK_IntegralComplexToBoolean:
1416*e038c9c4Sjoerg case CK_IntegralComplexToFloatingComplex:
1417*e038c9c4Sjoerg case CK_IntegralComplexToReal:
1418*e038c9c4Sjoerg case CK_IntegralRealToComplex:
1419*e038c9c4Sjoerg case CK_IntegralToBoolean:
1420*e038c9c4Sjoerg case CK_IntegralToFloating:
1421*e038c9c4Sjoerg // Reinterpreting integers as pointers and vice versa.
1422*e038c9c4Sjoerg case CK_IntegralToPointer:
1423*e038c9c4Sjoerg case CK_PointerToIntegral:
1424*e038c9c4Sjoerg // Language extensions.
1425*e038c9c4Sjoerg case CK_VectorSplat:
1426*e038c9c4Sjoerg case CK_MatrixCast:
1427*e038c9c4Sjoerg case CK_NonAtomicToAtomic:
1428*e038c9c4Sjoerg case CK_AtomicToNonAtomic:
1429*e038c9c4Sjoerg return true;
1430*e038c9c4Sjoerg
1431*e038c9c4Sjoerg case CK_BaseToDerivedMemberPointer:
1432*e038c9c4Sjoerg case CK_DerivedToBaseMemberPointer:
1433*e038c9c4Sjoerg case CK_MemberPointerToBoolean:
1434*e038c9c4Sjoerg case CK_NullToMemberPointer:
1435*e038c9c4Sjoerg case CK_ReinterpretMemberPointer:
1436*e038c9c4Sjoerg // FIXME: ABI-dependent.
1437*e038c9c4Sjoerg return false;
1438*e038c9c4Sjoerg
1439*e038c9c4Sjoerg case CK_AnyPointerToBlockPointerCast:
1440*e038c9c4Sjoerg case CK_BlockPointerToObjCPointerCast:
1441*e038c9c4Sjoerg case CK_CPointerToObjCPointerCast:
1442*e038c9c4Sjoerg case CK_ObjCObjectLValueCast:
1443*e038c9c4Sjoerg case CK_IntToOCLSampler:
1444*e038c9c4Sjoerg case CK_ZeroToOCLOpaqueType:
1445*e038c9c4Sjoerg // FIXME: Check these.
1446*e038c9c4Sjoerg return false;
1447*e038c9c4Sjoerg
1448*e038c9c4Sjoerg case CK_FixedPointCast:
1449*e038c9c4Sjoerg case CK_FixedPointToBoolean:
1450*e038c9c4Sjoerg case CK_FixedPointToFloating:
1451*e038c9c4Sjoerg case CK_FixedPointToIntegral:
1452*e038c9c4Sjoerg case CK_FloatingToFixedPoint:
1453*e038c9c4Sjoerg case CK_IntegralToFixedPoint:
1454*e038c9c4Sjoerg // FIXME: Do all fixed-point types represent zero as all 0 bits?
1455*e038c9c4Sjoerg return false;
1456*e038c9c4Sjoerg
1457*e038c9c4Sjoerg case CK_AddressSpaceConversion:
1458*e038c9c4Sjoerg case CK_BaseToDerived:
1459*e038c9c4Sjoerg case CK_DerivedToBase:
1460*e038c9c4Sjoerg case CK_Dynamic:
1461*e038c9c4Sjoerg case CK_NullToPointer:
1462*e038c9c4Sjoerg case CK_PointerToBoolean:
1463*e038c9c4Sjoerg // FIXME: Preserves zeroes only if zero pointers and null pointers have the
1464*e038c9c4Sjoerg // same representation in all involved address spaces.
1465*e038c9c4Sjoerg return false;
1466*e038c9c4Sjoerg
1467*e038c9c4Sjoerg case CK_ARCConsumeObject:
1468*e038c9c4Sjoerg case CK_ARCExtendBlockObject:
1469*e038c9c4Sjoerg case CK_ARCProduceObject:
1470*e038c9c4Sjoerg case CK_ARCReclaimReturnedObject:
1471*e038c9c4Sjoerg case CK_CopyAndAutoreleaseBlockObject:
1472*e038c9c4Sjoerg case CK_ArrayToPointerDecay:
1473*e038c9c4Sjoerg case CK_FunctionToPointerDecay:
1474*e038c9c4Sjoerg case CK_BuiltinFnToFnPtr:
1475*e038c9c4Sjoerg case CK_Dependent:
1476*e038c9c4Sjoerg case CK_LValueBitCast:
1477*e038c9c4Sjoerg case CK_LValueToRValue:
1478*e038c9c4Sjoerg case CK_LValueToRValueBitCast:
1479*e038c9c4Sjoerg case CK_UncheckedDerivedToBase:
1480*e038c9c4Sjoerg return false;
1481*e038c9c4Sjoerg }
1482*e038c9c4Sjoerg llvm_unreachable("Unhandled clang::CastKind enum");
1483*e038c9c4Sjoerg }
1484*e038c9c4Sjoerg
14857330f729Sjoerg /// isSimpleZero - If emitting this value will obviously just cause a store of
14867330f729Sjoerg /// zero to memory, return true. This can return false if uncertain, so it just
14877330f729Sjoerg /// handles simple cases.
isSimpleZero(const Expr * E,CodeGenFunction & CGF)14887330f729Sjoerg static bool isSimpleZero(const Expr *E, CodeGenFunction &CGF) {
14897330f729Sjoerg E = E->IgnoreParens();
1490*e038c9c4Sjoerg while (auto *CE = dyn_cast<CastExpr>(E)) {
1491*e038c9c4Sjoerg if (!castPreservesZero(CE))
1492*e038c9c4Sjoerg break;
1493*e038c9c4Sjoerg E = CE->getSubExpr()->IgnoreParens();
1494*e038c9c4Sjoerg }
14957330f729Sjoerg
14967330f729Sjoerg // 0
14977330f729Sjoerg if (const IntegerLiteral *IL = dyn_cast<IntegerLiteral>(E))
14987330f729Sjoerg return IL->getValue() == 0;
14997330f729Sjoerg // +0.0
15007330f729Sjoerg if (const FloatingLiteral *FL = dyn_cast<FloatingLiteral>(E))
15017330f729Sjoerg return FL->getValue().isPosZero();
15027330f729Sjoerg // int()
15037330f729Sjoerg if ((isa<ImplicitValueInitExpr>(E) || isa<CXXScalarValueInitExpr>(E)) &&
15047330f729Sjoerg CGF.getTypes().isZeroInitializable(E->getType()))
15057330f729Sjoerg return true;
15067330f729Sjoerg // (int*)0 - Null pointer expressions.
15077330f729Sjoerg if (const CastExpr *ICE = dyn_cast<CastExpr>(E))
15087330f729Sjoerg return ICE->getCastKind() == CK_NullToPointer &&
15097330f729Sjoerg CGF.getTypes().isPointerZeroInitializable(E->getType()) &&
15107330f729Sjoerg !E->HasSideEffects(CGF.getContext());
15117330f729Sjoerg // '\0'
15127330f729Sjoerg if (const CharacterLiteral *CL = dyn_cast<CharacterLiteral>(E))
15137330f729Sjoerg return CL->getValue() == 0;
15147330f729Sjoerg
15157330f729Sjoerg // Otherwise, hard case: conservatively return false.
15167330f729Sjoerg return false;
15177330f729Sjoerg }
15187330f729Sjoerg
15197330f729Sjoerg
15207330f729Sjoerg void
EmitInitializationToLValue(Expr * E,LValue LV)15217330f729Sjoerg AggExprEmitter::EmitInitializationToLValue(Expr *E, LValue LV) {
15227330f729Sjoerg QualType type = LV.getType();
15237330f729Sjoerg // FIXME: Ignore result?
15247330f729Sjoerg // FIXME: Are initializers affected by volatile?
15257330f729Sjoerg if (Dest.isZeroed() && isSimpleZero(E, CGF)) {
15267330f729Sjoerg // Storing "i32 0" to a zero'd memory location is a noop.
15277330f729Sjoerg return;
15287330f729Sjoerg } else if (isa<ImplicitValueInitExpr>(E) || isa<CXXScalarValueInitExpr>(E)) {
15297330f729Sjoerg return EmitNullInitializationToLValue(LV);
15307330f729Sjoerg } else if (isa<NoInitExpr>(E)) {
15317330f729Sjoerg // Do nothing.
15327330f729Sjoerg return;
15337330f729Sjoerg } else if (type->isReferenceType()) {
15347330f729Sjoerg RValue RV = CGF.EmitReferenceBindingToExpr(E);
15357330f729Sjoerg return CGF.EmitStoreThroughLValue(RV, LV);
15367330f729Sjoerg }
15377330f729Sjoerg
15387330f729Sjoerg switch (CGF.getEvaluationKind(type)) {
15397330f729Sjoerg case TEK_Complex:
15407330f729Sjoerg CGF.EmitComplexExprIntoLValue(E, LV, /*isInit*/ true);
15417330f729Sjoerg return;
15427330f729Sjoerg case TEK_Aggregate:
1543*e038c9c4Sjoerg CGF.EmitAggExpr(
1544*e038c9c4Sjoerg E, AggValueSlot::forLValue(LV, CGF, AggValueSlot::IsDestructed,
15457330f729Sjoerg AggValueSlot::DoesNotNeedGCBarriers,
15467330f729Sjoerg AggValueSlot::IsNotAliased,
1547*e038c9c4Sjoerg AggValueSlot::MayOverlap, Dest.isZeroed()));
15487330f729Sjoerg return;
15497330f729Sjoerg case TEK_Scalar:
15507330f729Sjoerg if (LV.isSimple()) {
15517330f729Sjoerg CGF.EmitScalarInit(E, /*D=*/nullptr, LV, /*Captured=*/false);
15527330f729Sjoerg } else {
15537330f729Sjoerg CGF.EmitStoreThroughLValue(RValue::get(CGF.EmitScalarExpr(E)), LV);
15547330f729Sjoerg }
15557330f729Sjoerg return;
15567330f729Sjoerg }
15577330f729Sjoerg llvm_unreachable("bad evaluation kind");
15587330f729Sjoerg }
15597330f729Sjoerg
EmitNullInitializationToLValue(LValue lv)15607330f729Sjoerg void AggExprEmitter::EmitNullInitializationToLValue(LValue lv) {
15617330f729Sjoerg QualType type = lv.getType();
15627330f729Sjoerg
15637330f729Sjoerg // If the destination slot is already zeroed out before the aggregate is
15647330f729Sjoerg // copied into it, we don't have to emit any zeros here.
15657330f729Sjoerg if (Dest.isZeroed() && CGF.getTypes().isZeroInitializable(type))
15667330f729Sjoerg return;
15677330f729Sjoerg
15687330f729Sjoerg if (CGF.hasScalarEvaluationKind(type)) {
15697330f729Sjoerg // For non-aggregates, we can store the appropriate null constant.
15707330f729Sjoerg llvm::Value *null = CGF.CGM.EmitNullConstant(type);
15717330f729Sjoerg // Note that the following is not equivalent to
15727330f729Sjoerg // EmitStoreThroughBitfieldLValue for ARC types.
15737330f729Sjoerg if (lv.isBitField()) {
15747330f729Sjoerg CGF.EmitStoreThroughBitfieldLValue(RValue::get(null), lv);
15757330f729Sjoerg } else {
15767330f729Sjoerg assert(lv.isSimple());
15777330f729Sjoerg CGF.EmitStoreOfScalar(null, lv, /* isInitialization */ true);
15787330f729Sjoerg }
15797330f729Sjoerg } else {
15807330f729Sjoerg // There's a potential optimization opportunity in combining
15817330f729Sjoerg // memsets; that would be easy for arrays, but relatively
15827330f729Sjoerg // difficult for structures with the current code.
1583*e038c9c4Sjoerg CGF.EmitNullInitialization(lv.getAddress(CGF), lv.getType());
15847330f729Sjoerg }
15857330f729Sjoerg }
15867330f729Sjoerg
VisitInitListExpr(InitListExpr * E)15877330f729Sjoerg void AggExprEmitter::VisitInitListExpr(InitListExpr *E) {
15887330f729Sjoerg #if 0
15897330f729Sjoerg // FIXME: Assess perf here? Figure out what cases are worth optimizing here
15907330f729Sjoerg // (Length of globals? Chunks of zeroed-out space?).
15917330f729Sjoerg //
15927330f729Sjoerg // If we can, prefer a copy from a global; this is a lot less code for long
15937330f729Sjoerg // globals, and it's easier for the current optimizers to analyze.
15947330f729Sjoerg if (llvm::Constant* C = CGF.CGM.EmitConstantExpr(E, E->getType(), &CGF)) {
15957330f729Sjoerg llvm::GlobalVariable* GV =
15967330f729Sjoerg new llvm::GlobalVariable(CGF.CGM.getModule(), C->getType(), true,
15977330f729Sjoerg llvm::GlobalValue::InternalLinkage, C, "");
15987330f729Sjoerg EmitFinalDestCopy(E->getType(), CGF.MakeAddrLValue(GV, E->getType()));
15997330f729Sjoerg return;
16007330f729Sjoerg }
16017330f729Sjoerg #endif
16027330f729Sjoerg if (E->hadArrayRangeDesignator())
16037330f729Sjoerg CGF.ErrorUnsupported(E, "GNU array range designator extension");
16047330f729Sjoerg
16057330f729Sjoerg if (E->isTransparent())
16067330f729Sjoerg return Visit(E->getInit(0));
16077330f729Sjoerg
16087330f729Sjoerg AggValueSlot Dest = EnsureSlot(E->getType());
16097330f729Sjoerg
16107330f729Sjoerg LValue DestLV = CGF.MakeAddrLValue(Dest.getAddress(), E->getType());
16117330f729Sjoerg
16127330f729Sjoerg // Handle initialization of an array.
16137330f729Sjoerg if (E->getType()->isArrayType()) {
16147330f729Sjoerg auto AType = cast<llvm::ArrayType>(Dest.getAddress().getElementType());
16157330f729Sjoerg EmitArrayInit(Dest.getAddress(), AType, E->getType(), E);
16167330f729Sjoerg return;
16177330f729Sjoerg }
16187330f729Sjoerg
16197330f729Sjoerg assert(E->getType()->isRecordType() && "Only support structs/unions here!");
16207330f729Sjoerg
16217330f729Sjoerg // Do struct initialization; this code just sets each individual member
16227330f729Sjoerg // to the approprate value. This makes bitfield support automatic;
16237330f729Sjoerg // the disadvantage is that the generated code is more difficult for
16247330f729Sjoerg // the optimizer, especially with bitfields.
16257330f729Sjoerg unsigned NumInitElements = E->getNumInits();
16267330f729Sjoerg RecordDecl *record = E->getType()->castAs<RecordType>()->getDecl();
16277330f729Sjoerg
16287330f729Sjoerg // We'll need to enter cleanup scopes in case any of the element
16297330f729Sjoerg // initializers throws an exception.
16307330f729Sjoerg SmallVector<EHScopeStack::stable_iterator, 16> cleanups;
16317330f729Sjoerg llvm::Instruction *cleanupDominator = nullptr;
16327330f729Sjoerg auto addCleanup = [&](const EHScopeStack::stable_iterator &cleanup) {
16337330f729Sjoerg cleanups.push_back(cleanup);
16347330f729Sjoerg if (!cleanupDominator) // create placeholder once needed
16357330f729Sjoerg cleanupDominator = CGF.Builder.CreateAlignedLoad(
16367330f729Sjoerg CGF.Int8Ty, llvm::Constant::getNullValue(CGF.Int8PtrTy),
16377330f729Sjoerg CharUnits::One());
16387330f729Sjoerg };
16397330f729Sjoerg
16407330f729Sjoerg unsigned curInitIndex = 0;
16417330f729Sjoerg
16427330f729Sjoerg // Emit initialization of base classes.
16437330f729Sjoerg if (auto *CXXRD = dyn_cast<CXXRecordDecl>(record)) {
16447330f729Sjoerg assert(E->getNumInits() >= CXXRD->getNumBases() &&
16457330f729Sjoerg "missing initializer for base class");
16467330f729Sjoerg for (auto &Base : CXXRD->bases()) {
16477330f729Sjoerg assert(!Base.isVirtual() && "should not see vbases here");
16487330f729Sjoerg auto *BaseRD = Base.getType()->getAsCXXRecordDecl();
16497330f729Sjoerg Address V = CGF.GetAddressOfDirectBaseInCompleteClass(
16507330f729Sjoerg Dest.getAddress(), CXXRD, BaseRD,
16517330f729Sjoerg /*isBaseVirtual*/ false);
16527330f729Sjoerg AggValueSlot AggSlot = AggValueSlot::forAddr(
16537330f729Sjoerg V, Qualifiers(),
16547330f729Sjoerg AggValueSlot::IsDestructed,
16557330f729Sjoerg AggValueSlot::DoesNotNeedGCBarriers,
16567330f729Sjoerg AggValueSlot::IsNotAliased,
16577330f729Sjoerg CGF.getOverlapForBaseInit(CXXRD, BaseRD, Base.isVirtual()));
16587330f729Sjoerg CGF.EmitAggExpr(E->getInit(curInitIndex++), AggSlot);
16597330f729Sjoerg
16607330f729Sjoerg if (QualType::DestructionKind dtorKind =
16617330f729Sjoerg Base.getType().isDestructedType()) {
16627330f729Sjoerg CGF.pushDestroy(dtorKind, V, Base.getType());
16637330f729Sjoerg addCleanup(CGF.EHStack.stable_begin());
16647330f729Sjoerg }
16657330f729Sjoerg }
16667330f729Sjoerg }
16677330f729Sjoerg
16687330f729Sjoerg // Prepare a 'this' for CXXDefaultInitExprs.
16697330f729Sjoerg CodeGenFunction::FieldConstructionScope FCS(CGF, Dest.getAddress());
16707330f729Sjoerg
16717330f729Sjoerg if (record->isUnion()) {
16727330f729Sjoerg // Only initialize one field of a union. The field itself is
16737330f729Sjoerg // specified by the initializer list.
16747330f729Sjoerg if (!E->getInitializedFieldInUnion()) {
16757330f729Sjoerg // Empty union; we have nothing to do.
16767330f729Sjoerg
16777330f729Sjoerg #ifndef NDEBUG
16787330f729Sjoerg // Make sure that it's really an empty and not a failure of
16797330f729Sjoerg // semantic analysis.
16807330f729Sjoerg for (const auto *Field : record->fields())
16817330f729Sjoerg assert(Field->isUnnamedBitfield() && "Only unnamed bitfields allowed");
16827330f729Sjoerg #endif
16837330f729Sjoerg return;
16847330f729Sjoerg }
16857330f729Sjoerg
16867330f729Sjoerg // FIXME: volatility
16877330f729Sjoerg FieldDecl *Field = E->getInitializedFieldInUnion();
16887330f729Sjoerg
16897330f729Sjoerg LValue FieldLoc = CGF.EmitLValueForFieldInitialization(DestLV, Field);
16907330f729Sjoerg if (NumInitElements) {
16917330f729Sjoerg // Store the initializer into the field
16927330f729Sjoerg EmitInitializationToLValue(E->getInit(0), FieldLoc);
16937330f729Sjoerg } else {
16947330f729Sjoerg // Default-initialize to null.
16957330f729Sjoerg EmitNullInitializationToLValue(FieldLoc);
16967330f729Sjoerg }
16977330f729Sjoerg
16987330f729Sjoerg return;
16997330f729Sjoerg }
17007330f729Sjoerg
17017330f729Sjoerg // Here we iterate over the fields; this makes it simpler to both
17027330f729Sjoerg // default-initialize fields and skip over unnamed fields.
17037330f729Sjoerg for (const auto *field : record->fields()) {
17047330f729Sjoerg // We're done once we hit the flexible array member.
17057330f729Sjoerg if (field->getType()->isIncompleteArrayType())
17067330f729Sjoerg break;
17077330f729Sjoerg
17087330f729Sjoerg // Always skip anonymous bitfields.
17097330f729Sjoerg if (field->isUnnamedBitfield())
17107330f729Sjoerg continue;
17117330f729Sjoerg
17127330f729Sjoerg // We're done if we reach the end of the explicit initializers, we
17137330f729Sjoerg // have a zeroed object, and the rest of the fields are
17147330f729Sjoerg // zero-initializable.
17157330f729Sjoerg if (curInitIndex == NumInitElements && Dest.isZeroed() &&
17167330f729Sjoerg CGF.getTypes().isZeroInitializable(E->getType()))
17177330f729Sjoerg break;
17187330f729Sjoerg
17197330f729Sjoerg
17207330f729Sjoerg LValue LV = CGF.EmitLValueForFieldInitialization(DestLV, field);
17217330f729Sjoerg // We never generate write-barries for initialized fields.
17227330f729Sjoerg LV.setNonGC(true);
17237330f729Sjoerg
17247330f729Sjoerg if (curInitIndex < NumInitElements) {
17257330f729Sjoerg // Store the initializer into the field.
17267330f729Sjoerg EmitInitializationToLValue(E->getInit(curInitIndex++), LV);
17277330f729Sjoerg } else {
17287330f729Sjoerg // We're out of initializers; default-initialize to null
17297330f729Sjoerg EmitNullInitializationToLValue(LV);
17307330f729Sjoerg }
17317330f729Sjoerg
17327330f729Sjoerg // Push a destructor if necessary.
17337330f729Sjoerg // FIXME: if we have an array of structures, all explicitly
17347330f729Sjoerg // initialized, we can end up pushing a linear number of cleanups.
17357330f729Sjoerg bool pushedCleanup = false;
17367330f729Sjoerg if (QualType::DestructionKind dtorKind
17377330f729Sjoerg = field->getType().isDestructedType()) {
17387330f729Sjoerg assert(LV.isSimple());
17397330f729Sjoerg if (CGF.needsEHCleanup(dtorKind)) {
1740*e038c9c4Sjoerg CGF.pushDestroy(EHCleanup, LV.getAddress(CGF), field->getType(),
17417330f729Sjoerg CGF.getDestroyer(dtorKind), false);
17427330f729Sjoerg addCleanup(CGF.EHStack.stable_begin());
17437330f729Sjoerg pushedCleanup = true;
17447330f729Sjoerg }
17457330f729Sjoerg }
17467330f729Sjoerg
17477330f729Sjoerg // If the GEP didn't get used because of a dead zero init or something
17487330f729Sjoerg // else, clean it up for -O0 builds and general tidiness.
17497330f729Sjoerg if (!pushedCleanup && LV.isSimple())
17507330f729Sjoerg if (llvm::GetElementPtrInst *GEP =
1751*e038c9c4Sjoerg dyn_cast<llvm::GetElementPtrInst>(LV.getPointer(CGF)))
17527330f729Sjoerg if (GEP->use_empty())
17537330f729Sjoerg GEP->eraseFromParent();
17547330f729Sjoerg }
17557330f729Sjoerg
17567330f729Sjoerg // Deactivate all the partial cleanups in reverse order, which
17577330f729Sjoerg // generally means popping them.
17587330f729Sjoerg assert((cleanupDominator || cleanups.empty()) &&
17597330f729Sjoerg "Missing cleanupDominator before deactivating cleanup blocks");
17607330f729Sjoerg for (unsigned i = cleanups.size(); i != 0; --i)
17617330f729Sjoerg CGF.DeactivateCleanupBlock(cleanups[i-1], cleanupDominator);
17627330f729Sjoerg
17637330f729Sjoerg // Destroy the placeholder if we made one.
17647330f729Sjoerg if (cleanupDominator)
17657330f729Sjoerg cleanupDominator->eraseFromParent();
17667330f729Sjoerg }
17677330f729Sjoerg
VisitArrayInitLoopExpr(const ArrayInitLoopExpr * E,llvm::Value * outerBegin)17687330f729Sjoerg void AggExprEmitter::VisitArrayInitLoopExpr(const ArrayInitLoopExpr *E,
17697330f729Sjoerg llvm::Value *outerBegin) {
17707330f729Sjoerg // Emit the common subexpression.
17717330f729Sjoerg CodeGenFunction::OpaqueValueMapping binding(CGF, E->getCommonExpr());
17727330f729Sjoerg
17737330f729Sjoerg Address destPtr = EnsureSlot(E->getType()).getAddress();
17747330f729Sjoerg uint64_t numElements = E->getArraySize().getZExtValue();
17757330f729Sjoerg
17767330f729Sjoerg if (!numElements)
17777330f729Sjoerg return;
17787330f729Sjoerg
17797330f729Sjoerg // destPtr is an array*. Construct an elementType* by drilling down a level.
17807330f729Sjoerg llvm::Value *zero = llvm::ConstantInt::get(CGF.SizeTy, 0);
17817330f729Sjoerg llvm::Value *indices[] = {zero, zero};
17827330f729Sjoerg llvm::Value *begin = Builder.CreateInBoundsGEP(destPtr.getPointer(), indices,
17837330f729Sjoerg "arrayinit.begin");
17847330f729Sjoerg
17857330f729Sjoerg // Prepare to special-case multidimensional array initialization: we avoid
17867330f729Sjoerg // emitting multiple destructor loops in that case.
17877330f729Sjoerg if (!outerBegin)
17887330f729Sjoerg outerBegin = begin;
17897330f729Sjoerg ArrayInitLoopExpr *InnerLoop = dyn_cast<ArrayInitLoopExpr>(E->getSubExpr());
17907330f729Sjoerg
17917330f729Sjoerg QualType elementType =
17927330f729Sjoerg CGF.getContext().getAsArrayType(E->getType())->getElementType();
17937330f729Sjoerg CharUnits elementSize = CGF.getContext().getTypeSizeInChars(elementType);
17947330f729Sjoerg CharUnits elementAlign =
17957330f729Sjoerg destPtr.getAlignment().alignmentOfArrayElement(elementSize);
17967330f729Sjoerg
17977330f729Sjoerg llvm::BasicBlock *entryBB = Builder.GetInsertBlock();
17987330f729Sjoerg llvm::BasicBlock *bodyBB = CGF.createBasicBlock("arrayinit.body");
17997330f729Sjoerg
18007330f729Sjoerg // Jump into the body.
18017330f729Sjoerg CGF.EmitBlock(bodyBB);
18027330f729Sjoerg llvm::PHINode *index =
18037330f729Sjoerg Builder.CreatePHI(zero->getType(), 2, "arrayinit.index");
18047330f729Sjoerg index->addIncoming(zero, entryBB);
18057330f729Sjoerg llvm::Value *element = Builder.CreateInBoundsGEP(begin, index);
18067330f729Sjoerg
18077330f729Sjoerg // Prepare for a cleanup.
18087330f729Sjoerg QualType::DestructionKind dtorKind = elementType.isDestructedType();
18097330f729Sjoerg EHScopeStack::stable_iterator cleanup;
18107330f729Sjoerg if (CGF.needsEHCleanup(dtorKind) && !InnerLoop) {
18117330f729Sjoerg if (outerBegin->getType() != element->getType())
18127330f729Sjoerg outerBegin = Builder.CreateBitCast(outerBegin, element->getType());
18137330f729Sjoerg CGF.pushRegularPartialArrayCleanup(outerBegin, element, elementType,
18147330f729Sjoerg elementAlign,
18157330f729Sjoerg CGF.getDestroyer(dtorKind));
18167330f729Sjoerg cleanup = CGF.EHStack.stable_begin();
18177330f729Sjoerg } else {
18187330f729Sjoerg dtorKind = QualType::DK_none;
18197330f729Sjoerg }
18207330f729Sjoerg
18217330f729Sjoerg // Emit the actual filler expression.
18227330f729Sjoerg {
18237330f729Sjoerg // Temporaries created in an array initialization loop are destroyed
18247330f729Sjoerg // at the end of each iteration.
18257330f729Sjoerg CodeGenFunction::RunCleanupsScope CleanupsScope(CGF);
18267330f729Sjoerg CodeGenFunction::ArrayInitLoopExprScope Scope(CGF, index);
18277330f729Sjoerg LValue elementLV =
18287330f729Sjoerg CGF.MakeAddrLValue(Address(element, elementAlign), elementType);
18297330f729Sjoerg
18307330f729Sjoerg if (InnerLoop) {
18317330f729Sjoerg // If the subexpression is an ArrayInitLoopExpr, share its cleanup.
18327330f729Sjoerg auto elementSlot = AggValueSlot::forLValue(
1833*e038c9c4Sjoerg elementLV, CGF, AggValueSlot::IsDestructed,
1834*e038c9c4Sjoerg AggValueSlot::DoesNotNeedGCBarriers, AggValueSlot::IsNotAliased,
18357330f729Sjoerg AggValueSlot::DoesNotOverlap);
18367330f729Sjoerg AggExprEmitter(CGF, elementSlot, false)
18377330f729Sjoerg .VisitArrayInitLoopExpr(InnerLoop, outerBegin);
18387330f729Sjoerg } else
18397330f729Sjoerg EmitInitializationToLValue(E->getSubExpr(), elementLV);
18407330f729Sjoerg }
18417330f729Sjoerg
18427330f729Sjoerg // Move on to the next element.
18437330f729Sjoerg llvm::Value *nextIndex = Builder.CreateNUWAdd(
18447330f729Sjoerg index, llvm::ConstantInt::get(CGF.SizeTy, 1), "arrayinit.next");
18457330f729Sjoerg index->addIncoming(nextIndex, Builder.GetInsertBlock());
18467330f729Sjoerg
18477330f729Sjoerg // Leave the loop if we're done.
18487330f729Sjoerg llvm::Value *done = Builder.CreateICmpEQ(
18497330f729Sjoerg nextIndex, llvm::ConstantInt::get(CGF.SizeTy, numElements),
18507330f729Sjoerg "arrayinit.done");
18517330f729Sjoerg llvm::BasicBlock *endBB = CGF.createBasicBlock("arrayinit.end");
18527330f729Sjoerg Builder.CreateCondBr(done, endBB, bodyBB);
18537330f729Sjoerg
18547330f729Sjoerg CGF.EmitBlock(endBB);
18557330f729Sjoerg
18567330f729Sjoerg // Leave the partial-array cleanup if we entered one.
18577330f729Sjoerg if (dtorKind)
18587330f729Sjoerg CGF.DeactivateCleanupBlock(cleanup, index);
18597330f729Sjoerg }
18607330f729Sjoerg
VisitDesignatedInitUpdateExpr(DesignatedInitUpdateExpr * E)18617330f729Sjoerg void AggExprEmitter::VisitDesignatedInitUpdateExpr(DesignatedInitUpdateExpr *E) {
18627330f729Sjoerg AggValueSlot Dest = EnsureSlot(E->getType());
18637330f729Sjoerg
18647330f729Sjoerg LValue DestLV = CGF.MakeAddrLValue(Dest.getAddress(), E->getType());
18657330f729Sjoerg EmitInitializationToLValue(E->getBase(), DestLV);
18667330f729Sjoerg VisitInitListExpr(E->getUpdater());
18677330f729Sjoerg }
18687330f729Sjoerg
18697330f729Sjoerg //===----------------------------------------------------------------------===//
18707330f729Sjoerg // Entry Points into this File
18717330f729Sjoerg //===----------------------------------------------------------------------===//
18727330f729Sjoerg
18737330f729Sjoerg /// GetNumNonZeroBytesInInit - Get an approximate count of the number of
18747330f729Sjoerg /// non-zero bytes that will be stored when outputting the initializer for the
18757330f729Sjoerg /// specified initializer expression.
GetNumNonZeroBytesInInit(const Expr * E,CodeGenFunction & CGF)18767330f729Sjoerg static CharUnits GetNumNonZeroBytesInInit(const Expr *E, CodeGenFunction &CGF) {
1877*e038c9c4Sjoerg if (auto *MTE = dyn_cast<MaterializeTemporaryExpr>(E))
1878*e038c9c4Sjoerg E = MTE->getSubExpr();
1879*e038c9c4Sjoerg E = E->IgnoreParenNoopCasts(CGF.getContext());
18807330f729Sjoerg
18817330f729Sjoerg // 0 and 0.0 won't require any non-zero stores!
18827330f729Sjoerg if (isSimpleZero(E, CGF)) return CharUnits::Zero();
18837330f729Sjoerg
18847330f729Sjoerg // If this is an initlist expr, sum up the size of sizes of the (present)
18857330f729Sjoerg // elements. If this is something weird, assume the whole thing is non-zero.
18867330f729Sjoerg const InitListExpr *ILE = dyn_cast<InitListExpr>(E);
18877330f729Sjoerg while (ILE && ILE->isTransparent())
18887330f729Sjoerg ILE = dyn_cast<InitListExpr>(ILE->getInit(0));
18897330f729Sjoerg if (!ILE || !CGF.getTypes().isZeroInitializable(ILE->getType()))
18907330f729Sjoerg return CGF.getContext().getTypeSizeInChars(E->getType());
18917330f729Sjoerg
18927330f729Sjoerg // InitListExprs for structs have to be handled carefully. If there are
18937330f729Sjoerg // reference members, we need to consider the size of the reference, not the
18947330f729Sjoerg // referencee. InitListExprs for unions and arrays can't have references.
18957330f729Sjoerg if (const RecordType *RT = E->getType()->getAs<RecordType>()) {
18967330f729Sjoerg if (!RT->isUnionType()) {
18977330f729Sjoerg RecordDecl *SD = RT->getDecl();
18987330f729Sjoerg CharUnits NumNonZeroBytes = CharUnits::Zero();
18997330f729Sjoerg
19007330f729Sjoerg unsigned ILEElement = 0;
19017330f729Sjoerg if (auto *CXXRD = dyn_cast<CXXRecordDecl>(SD))
19027330f729Sjoerg while (ILEElement != CXXRD->getNumBases())
19037330f729Sjoerg NumNonZeroBytes +=
19047330f729Sjoerg GetNumNonZeroBytesInInit(ILE->getInit(ILEElement++), CGF);
19057330f729Sjoerg for (const auto *Field : SD->fields()) {
19067330f729Sjoerg // We're done once we hit the flexible array member or run out of
19077330f729Sjoerg // InitListExpr elements.
19087330f729Sjoerg if (Field->getType()->isIncompleteArrayType() ||
19097330f729Sjoerg ILEElement == ILE->getNumInits())
19107330f729Sjoerg break;
19117330f729Sjoerg if (Field->isUnnamedBitfield())
19127330f729Sjoerg continue;
19137330f729Sjoerg
19147330f729Sjoerg const Expr *E = ILE->getInit(ILEElement++);
19157330f729Sjoerg
19167330f729Sjoerg // Reference values are always non-null and have the width of a pointer.
19177330f729Sjoerg if (Field->getType()->isReferenceType())
19187330f729Sjoerg NumNonZeroBytes += CGF.getContext().toCharUnitsFromBits(
19197330f729Sjoerg CGF.getTarget().getPointerWidth(0));
19207330f729Sjoerg else
19217330f729Sjoerg NumNonZeroBytes += GetNumNonZeroBytesInInit(E, CGF);
19227330f729Sjoerg }
19237330f729Sjoerg
19247330f729Sjoerg return NumNonZeroBytes;
19257330f729Sjoerg }
19267330f729Sjoerg }
19277330f729Sjoerg
1928*e038c9c4Sjoerg // FIXME: This overestimates the number of non-zero bytes for bit-fields.
19297330f729Sjoerg CharUnits NumNonZeroBytes = CharUnits::Zero();
19307330f729Sjoerg for (unsigned i = 0, e = ILE->getNumInits(); i != e; ++i)
19317330f729Sjoerg NumNonZeroBytes += GetNumNonZeroBytesInInit(ILE->getInit(i), CGF);
19327330f729Sjoerg return NumNonZeroBytes;
19337330f729Sjoerg }
19347330f729Sjoerg
19357330f729Sjoerg /// CheckAggExprForMemSetUse - If the initializer is large and has a lot of
19367330f729Sjoerg /// zeros in it, emit a memset and avoid storing the individual zeros.
19377330f729Sjoerg ///
CheckAggExprForMemSetUse(AggValueSlot & Slot,const Expr * E,CodeGenFunction & CGF)19387330f729Sjoerg static void CheckAggExprForMemSetUse(AggValueSlot &Slot, const Expr *E,
19397330f729Sjoerg CodeGenFunction &CGF) {
19407330f729Sjoerg // If the slot is already known to be zeroed, nothing to do. Don't mess with
19417330f729Sjoerg // volatile stores.
19427330f729Sjoerg if (Slot.isZeroed() || Slot.isVolatile() || !Slot.getAddress().isValid())
19437330f729Sjoerg return;
19447330f729Sjoerg
19457330f729Sjoerg // C++ objects with a user-declared constructor don't need zero'ing.
19467330f729Sjoerg if (CGF.getLangOpts().CPlusPlus)
19477330f729Sjoerg if (const RecordType *RT = CGF.getContext()
19487330f729Sjoerg .getBaseElementType(E->getType())->getAs<RecordType>()) {
19497330f729Sjoerg const CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl());
19507330f729Sjoerg if (RD->hasUserDeclaredConstructor())
19517330f729Sjoerg return;
19527330f729Sjoerg }
19537330f729Sjoerg
19547330f729Sjoerg // If the type is 16-bytes or smaller, prefer individual stores over memset.
19557330f729Sjoerg CharUnits Size = Slot.getPreferredSize(CGF.getContext(), E->getType());
19567330f729Sjoerg if (Size <= CharUnits::fromQuantity(16))
19577330f729Sjoerg return;
19587330f729Sjoerg
19597330f729Sjoerg // Check to see if over 3/4 of the initializer are known to be zero. If so,
19607330f729Sjoerg // we prefer to emit memset + individual stores for the rest.
19617330f729Sjoerg CharUnits NumNonZeroBytes = GetNumNonZeroBytesInInit(E, CGF);
19627330f729Sjoerg if (NumNonZeroBytes*4 > Size)
19637330f729Sjoerg return;
19647330f729Sjoerg
19657330f729Sjoerg // Okay, it seems like a good idea to use an initial memset, emit the call.
19667330f729Sjoerg llvm::Constant *SizeVal = CGF.Builder.getInt64(Size.getQuantity());
19677330f729Sjoerg
19687330f729Sjoerg Address Loc = Slot.getAddress();
19697330f729Sjoerg Loc = CGF.Builder.CreateElementBitCast(Loc, CGF.Int8Ty);
19707330f729Sjoerg CGF.Builder.CreateMemSet(Loc, CGF.Builder.getInt8(0), SizeVal, false);
19717330f729Sjoerg
19727330f729Sjoerg // Tell the AggExprEmitter that the slot is known zero.
19737330f729Sjoerg Slot.setZeroed();
19747330f729Sjoerg }
19757330f729Sjoerg
19767330f729Sjoerg
19777330f729Sjoerg
19787330f729Sjoerg
19797330f729Sjoerg /// EmitAggExpr - Emit the computation of the specified expression of aggregate
19807330f729Sjoerg /// type. The result is computed into DestPtr. Note that if DestPtr is null,
19817330f729Sjoerg /// the value of the aggregate expression is not needed. If VolatileDest is
19827330f729Sjoerg /// true, DestPtr cannot be 0.
EmitAggExpr(const Expr * E,AggValueSlot Slot)19837330f729Sjoerg void CodeGenFunction::EmitAggExpr(const Expr *E, AggValueSlot Slot) {
19847330f729Sjoerg assert(E && hasAggregateEvaluationKind(E->getType()) &&
19857330f729Sjoerg "Invalid aggregate expression to emit");
19867330f729Sjoerg assert((Slot.getAddress().isValid() || Slot.isIgnored()) &&
19877330f729Sjoerg "slot has bits but no address");
19887330f729Sjoerg
19897330f729Sjoerg // Optimize the slot if possible.
19907330f729Sjoerg CheckAggExprForMemSetUse(Slot, E, *this);
19917330f729Sjoerg
19927330f729Sjoerg AggExprEmitter(*this, Slot, Slot.isIgnored()).Visit(const_cast<Expr*>(E));
19937330f729Sjoerg }
19947330f729Sjoerg
EmitAggExprToLValue(const Expr * E)19957330f729Sjoerg LValue CodeGenFunction::EmitAggExprToLValue(const Expr *E) {
19967330f729Sjoerg assert(hasAggregateEvaluationKind(E->getType()) && "Invalid argument!");
19977330f729Sjoerg Address Temp = CreateMemTemp(E->getType());
19987330f729Sjoerg LValue LV = MakeAddrLValue(Temp, E->getType());
1999*e038c9c4Sjoerg EmitAggExpr(E, AggValueSlot::forLValue(
2000*e038c9c4Sjoerg LV, *this, AggValueSlot::IsNotDestructed,
20017330f729Sjoerg AggValueSlot::DoesNotNeedGCBarriers,
2002*e038c9c4Sjoerg AggValueSlot::IsNotAliased, AggValueSlot::DoesNotOverlap));
20037330f729Sjoerg return LV;
20047330f729Sjoerg }
20057330f729Sjoerg
20067330f729Sjoerg AggValueSlot::Overlap_t
getOverlapForFieldInit(const FieldDecl * FD)20077330f729Sjoerg CodeGenFunction::getOverlapForFieldInit(const FieldDecl *FD) {
20087330f729Sjoerg if (!FD->hasAttr<NoUniqueAddressAttr>() || !FD->getType()->isRecordType())
20097330f729Sjoerg return AggValueSlot::DoesNotOverlap;
20107330f729Sjoerg
20117330f729Sjoerg // If the field lies entirely within the enclosing class's nvsize, its tail
20127330f729Sjoerg // padding cannot overlap any already-initialized object. (The only subobjects
20137330f729Sjoerg // with greater addresses that might already be initialized are vbases.)
20147330f729Sjoerg const RecordDecl *ClassRD = FD->getParent();
20157330f729Sjoerg const ASTRecordLayout &Layout = getContext().getASTRecordLayout(ClassRD);
20167330f729Sjoerg if (Layout.getFieldOffset(FD->getFieldIndex()) +
20177330f729Sjoerg getContext().getTypeSize(FD->getType()) <=
20187330f729Sjoerg (uint64_t)getContext().toBits(Layout.getNonVirtualSize()))
20197330f729Sjoerg return AggValueSlot::DoesNotOverlap;
20207330f729Sjoerg
20217330f729Sjoerg // The tail padding may contain values we need to preserve.
20227330f729Sjoerg return AggValueSlot::MayOverlap;
20237330f729Sjoerg }
20247330f729Sjoerg
getOverlapForBaseInit(const CXXRecordDecl * RD,const CXXRecordDecl * BaseRD,bool IsVirtual)20257330f729Sjoerg AggValueSlot::Overlap_t CodeGenFunction::getOverlapForBaseInit(
20267330f729Sjoerg const CXXRecordDecl *RD, const CXXRecordDecl *BaseRD, bool IsVirtual) {
20277330f729Sjoerg // If the most-derived object is a field declared with [[no_unique_address]],
20287330f729Sjoerg // the tail padding of any virtual base could be reused for other subobjects
20297330f729Sjoerg // of that field's class.
20307330f729Sjoerg if (IsVirtual)
20317330f729Sjoerg return AggValueSlot::MayOverlap;
20327330f729Sjoerg
20337330f729Sjoerg // If the base class is laid out entirely within the nvsize of the derived
20347330f729Sjoerg // class, its tail padding cannot yet be initialized, so we can issue
20357330f729Sjoerg // stores at the full width of the base class.
20367330f729Sjoerg const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD);
20377330f729Sjoerg if (Layout.getBaseClassOffset(BaseRD) +
20387330f729Sjoerg getContext().getASTRecordLayout(BaseRD).getSize() <=
20397330f729Sjoerg Layout.getNonVirtualSize())
20407330f729Sjoerg return AggValueSlot::DoesNotOverlap;
20417330f729Sjoerg
20427330f729Sjoerg // The tail padding may contain values we need to preserve.
20437330f729Sjoerg return AggValueSlot::MayOverlap;
20447330f729Sjoerg }
20457330f729Sjoerg
EmitAggregateCopy(LValue Dest,LValue Src,QualType Ty,AggValueSlot::Overlap_t MayOverlap,bool isVolatile)20467330f729Sjoerg void CodeGenFunction::EmitAggregateCopy(LValue Dest, LValue Src, QualType Ty,
20477330f729Sjoerg AggValueSlot::Overlap_t MayOverlap,
20487330f729Sjoerg bool isVolatile) {
20497330f729Sjoerg assert(!Ty->isAnyComplexType() && "Shouldn't happen for complex");
20507330f729Sjoerg
2051*e038c9c4Sjoerg Address DestPtr = Dest.getAddress(*this);
2052*e038c9c4Sjoerg Address SrcPtr = Src.getAddress(*this);
20537330f729Sjoerg
20547330f729Sjoerg if (getLangOpts().CPlusPlus) {
20557330f729Sjoerg if (const RecordType *RT = Ty->getAs<RecordType>()) {
20567330f729Sjoerg CXXRecordDecl *Record = cast<CXXRecordDecl>(RT->getDecl());
20577330f729Sjoerg assert((Record->hasTrivialCopyConstructor() ||
20587330f729Sjoerg Record->hasTrivialCopyAssignment() ||
20597330f729Sjoerg Record->hasTrivialMoveConstructor() ||
20607330f729Sjoerg Record->hasTrivialMoveAssignment() ||
2061*e038c9c4Sjoerg Record->hasAttr<TrivialABIAttr>() || Record->isUnion()) &&
20627330f729Sjoerg "Trying to aggregate-copy a type without a trivial copy/move "
20637330f729Sjoerg "constructor or assignment operator");
20647330f729Sjoerg // Ignore empty classes in C++.
20657330f729Sjoerg if (Record->isEmpty())
20667330f729Sjoerg return;
20677330f729Sjoerg }
20687330f729Sjoerg }
20697330f729Sjoerg
2070*e038c9c4Sjoerg if (getLangOpts().CUDAIsDevice) {
2071*e038c9c4Sjoerg if (Ty->isCUDADeviceBuiltinSurfaceType()) {
2072*e038c9c4Sjoerg if (getTargetHooks().emitCUDADeviceBuiltinSurfaceDeviceCopy(*this, Dest,
2073*e038c9c4Sjoerg Src))
2074*e038c9c4Sjoerg return;
2075*e038c9c4Sjoerg } else if (Ty->isCUDADeviceBuiltinTextureType()) {
2076*e038c9c4Sjoerg if (getTargetHooks().emitCUDADeviceBuiltinTextureDeviceCopy(*this, Dest,
2077*e038c9c4Sjoerg Src))
2078*e038c9c4Sjoerg return;
2079*e038c9c4Sjoerg }
2080*e038c9c4Sjoerg }
2081*e038c9c4Sjoerg
20827330f729Sjoerg // Aggregate assignment turns into llvm.memcpy. This is almost valid per
20837330f729Sjoerg // C99 6.5.16.1p3, which states "If the value being stored in an object is
20847330f729Sjoerg // read from another object that overlaps in anyway the storage of the first
20857330f729Sjoerg // object, then the overlap shall be exact and the two objects shall have
20867330f729Sjoerg // qualified or unqualified versions of a compatible type."
20877330f729Sjoerg //
20887330f729Sjoerg // memcpy is not defined if the source and destination pointers are exactly
20897330f729Sjoerg // equal, but other compilers do this optimization, and almost every memcpy
20907330f729Sjoerg // implementation handles this case safely. If there is a libc that does not
20917330f729Sjoerg // safely handle this, we can add a target hook.
20927330f729Sjoerg
20937330f729Sjoerg // Get data size info for this aggregate. Don't copy the tail padding if this
20947330f729Sjoerg // might be a potentially-overlapping subobject, since the tail padding might
20957330f729Sjoerg // be occupied by a different object. Otherwise, copying it is fine.
2096*e038c9c4Sjoerg TypeInfoChars TypeInfo;
20977330f729Sjoerg if (MayOverlap)
20987330f729Sjoerg TypeInfo = getContext().getTypeInfoDataSizeInChars(Ty);
20997330f729Sjoerg else
21007330f729Sjoerg TypeInfo = getContext().getTypeInfoInChars(Ty);
21017330f729Sjoerg
21027330f729Sjoerg llvm::Value *SizeVal = nullptr;
2103*e038c9c4Sjoerg if (TypeInfo.Width.isZero()) {
21047330f729Sjoerg // But note that getTypeInfo returns 0 for a VLA.
21057330f729Sjoerg if (auto *VAT = dyn_cast_or_null<VariableArrayType>(
21067330f729Sjoerg getContext().getAsArrayType(Ty))) {
21077330f729Sjoerg QualType BaseEltTy;
21087330f729Sjoerg SizeVal = emitArrayLength(VAT, BaseEltTy, DestPtr);
21097330f729Sjoerg TypeInfo = getContext().getTypeInfoInChars(BaseEltTy);
2110*e038c9c4Sjoerg assert(!TypeInfo.Width.isZero());
21117330f729Sjoerg SizeVal = Builder.CreateNUWMul(
21127330f729Sjoerg SizeVal,
2113*e038c9c4Sjoerg llvm::ConstantInt::get(SizeTy, TypeInfo.Width.getQuantity()));
21147330f729Sjoerg }
21157330f729Sjoerg }
21167330f729Sjoerg if (!SizeVal) {
2117*e038c9c4Sjoerg SizeVal = llvm::ConstantInt::get(SizeTy, TypeInfo.Width.getQuantity());
21187330f729Sjoerg }
21197330f729Sjoerg
21207330f729Sjoerg // FIXME: If we have a volatile struct, the optimizer can remove what might
21217330f729Sjoerg // appear to be `extra' memory ops:
21227330f729Sjoerg //
21237330f729Sjoerg // volatile struct { int i; } a, b;
21247330f729Sjoerg //
21257330f729Sjoerg // int main() {
21267330f729Sjoerg // a = b;
21277330f729Sjoerg // a = b;
21287330f729Sjoerg // }
21297330f729Sjoerg //
21307330f729Sjoerg // we need to use a different call here. We use isVolatile to indicate when
21317330f729Sjoerg // either the source or the destination is volatile.
21327330f729Sjoerg
21337330f729Sjoerg DestPtr = Builder.CreateElementBitCast(DestPtr, Int8Ty);
21347330f729Sjoerg SrcPtr = Builder.CreateElementBitCast(SrcPtr, Int8Ty);
21357330f729Sjoerg
21367330f729Sjoerg // Don't do any of the memmove_collectable tests if GC isn't set.
21377330f729Sjoerg if (CGM.getLangOpts().getGC() == LangOptions::NonGC) {
21387330f729Sjoerg // fall through
21397330f729Sjoerg } else if (const RecordType *RecordTy = Ty->getAs<RecordType>()) {
21407330f729Sjoerg RecordDecl *Record = RecordTy->getDecl();
21417330f729Sjoerg if (Record->hasObjectMember()) {
21427330f729Sjoerg CGM.getObjCRuntime().EmitGCMemmoveCollectable(*this, DestPtr, SrcPtr,
21437330f729Sjoerg SizeVal);
21447330f729Sjoerg return;
21457330f729Sjoerg }
21467330f729Sjoerg } else if (Ty->isArrayType()) {
21477330f729Sjoerg QualType BaseType = getContext().getBaseElementType(Ty);
21487330f729Sjoerg if (const RecordType *RecordTy = BaseType->getAs<RecordType>()) {
21497330f729Sjoerg if (RecordTy->getDecl()->hasObjectMember()) {
21507330f729Sjoerg CGM.getObjCRuntime().EmitGCMemmoveCollectable(*this, DestPtr, SrcPtr,
21517330f729Sjoerg SizeVal);
21527330f729Sjoerg return;
21537330f729Sjoerg }
21547330f729Sjoerg }
21557330f729Sjoerg }
21567330f729Sjoerg
21577330f729Sjoerg auto Inst = Builder.CreateMemCpy(DestPtr, SrcPtr, SizeVal, isVolatile);
21587330f729Sjoerg
21597330f729Sjoerg // Determine the metadata to describe the position of any padding in this
21607330f729Sjoerg // memcpy, as well as the TBAA tags for the members of the struct, in case
21617330f729Sjoerg // the optimizer wishes to expand it in to scalar memory operations.
21627330f729Sjoerg if (llvm::MDNode *TBAAStructTag = CGM.getTBAAStructInfo(Ty))
21637330f729Sjoerg Inst->setMetadata(llvm::LLVMContext::MD_tbaa_struct, TBAAStructTag);
21647330f729Sjoerg
21657330f729Sjoerg if (CGM.getCodeGenOpts().NewStructPathTBAA) {
21667330f729Sjoerg TBAAAccessInfo TBAAInfo = CGM.mergeTBAAInfoForMemoryTransfer(
21677330f729Sjoerg Dest.getTBAAInfo(), Src.getTBAAInfo());
21687330f729Sjoerg CGM.DecorateInstructionWithTBAA(Inst, TBAAInfo);
21697330f729Sjoerg }
21707330f729Sjoerg }
2171