10b57cec5SDimitry Andric //===--- CGCleanup.cpp - Bookkeeping and code emission for cleanups -------===// 20b57cec5SDimitry Andric // 30b57cec5SDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 40b57cec5SDimitry Andric // See https://llvm.org/LICENSE.txt for license information. 50b57cec5SDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 60b57cec5SDimitry Andric // 70b57cec5SDimitry Andric //===----------------------------------------------------------------------===// 80b57cec5SDimitry Andric // 90b57cec5SDimitry Andric // This file contains code dealing with the IR generation for cleanups 100b57cec5SDimitry Andric // and related information. 110b57cec5SDimitry Andric // 120b57cec5SDimitry Andric // A "cleanup" is a piece of code which needs to be executed whenever 130b57cec5SDimitry Andric // control transfers out of a particular scope. This can be 140b57cec5SDimitry Andric // conditionalized to occur only on exceptional control flow, only on 150b57cec5SDimitry Andric // normal control flow, or both. 160b57cec5SDimitry Andric // 170b57cec5SDimitry Andric //===----------------------------------------------------------------------===// 180b57cec5SDimitry Andric 190b57cec5SDimitry Andric #include "CGCleanup.h" 200b57cec5SDimitry Andric #include "CodeGenFunction.h" 210b57cec5SDimitry Andric #include "llvm/Support/SaveAndRestore.h" 220b57cec5SDimitry Andric 230b57cec5SDimitry Andric using namespace clang; 240b57cec5SDimitry Andric using namespace CodeGen; 250b57cec5SDimitry Andric 260b57cec5SDimitry Andric bool DominatingValue<RValue>::saved_type::needsSaving(RValue rv) { 270b57cec5SDimitry Andric if (rv.isScalar()) 280b57cec5SDimitry Andric return DominatingLLVMValue::needsSaving(rv.getScalarVal()); 290b57cec5SDimitry Andric if (rv.isAggregate()) 30*0fca6ea1SDimitry Andric return DominatingValue<Address>::needsSaving(rv.getAggregateAddress()); 310b57cec5SDimitry Andric return true; 320b57cec5SDimitry Andric } 330b57cec5SDimitry Andric 340b57cec5SDimitry Andric DominatingValue<RValue>::saved_type 350b57cec5SDimitry Andric DominatingValue<RValue>::saved_type::save(CodeGenFunction &CGF, RValue rv) { 360b57cec5SDimitry Andric if (rv.isScalar()) { 370b57cec5SDimitry Andric llvm::Value *V = rv.getScalarVal(); 38*0fca6ea1SDimitry Andric return saved_type(DominatingLLVMValue::save(CGF, V), 39*0fca6ea1SDimitry Andric DominatingLLVMValue::needsSaving(V) ? ScalarAddress 40*0fca6ea1SDimitry Andric : ScalarLiteral); 410b57cec5SDimitry Andric } 420b57cec5SDimitry Andric 430b57cec5SDimitry Andric if (rv.isComplex()) { 440b57cec5SDimitry Andric CodeGenFunction::ComplexPairTy V = rv.getComplexVal(); 45*0fca6ea1SDimitry Andric return saved_type(DominatingLLVMValue::save(CGF, V.first), 46*0fca6ea1SDimitry Andric DominatingLLVMValue::save(CGF, V.second)); 470b57cec5SDimitry Andric } 480b57cec5SDimitry Andric 490b57cec5SDimitry Andric assert(rv.isAggregate()); 50*0fca6ea1SDimitry Andric Address V = rv.getAggregateAddress(); 51*0fca6ea1SDimitry Andric return saved_type(DominatingValue<Address>::save(CGF, V), 52*0fca6ea1SDimitry Andric DominatingValue<Address>::needsSaving(V) 53*0fca6ea1SDimitry Andric ? AggregateAddress 54*0fca6ea1SDimitry Andric : AggregateLiteral); 550b57cec5SDimitry Andric } 560b57cec5SDimitry Andric 570b57cec5SDimitry Andric /// Given a saved r-value produced by SaveRValue, perform the code 580b57cec5SDimitry Andric /// necessary to restore it to usability at the current insertion 590b57cec5SDimitry Andric /// point. 600b57cec5SDimitry Andric RValue DominatingValue<RValue>::saved_type::restore(CodeGenFunction &CGF) { 610b57cec5SDimitry Andric switch (K) { 620b57cec5SDimitry Andric case ScalarLiteral: 630b57cec5SDimitry Andric case ScalarAddress: 64*0fca6ea1SDimitry Andric return RValue::get(DominatingLLVMValue::restore(CGF, Vals.first)); 650b57cec5SDimitry Andric case AggregateLiteral: 66*0fca6ea1SDimitry Andric case AggregateAddress: 6781ad6265SDimitry Andric return RValue::getAggregate( 68*0fca6ea1SDimitry Andric DominatingValue<Address>::restore(CGF, AggregateAddr)); 690b57cec5SDimitry Andric case ComplexAddress: { 70*0fca6ea1SDimitry Andric llvm::Value *real = DominatingLLVMValue::restore(CGF, Vals.first); 71*0fca6ea1SDimitry Andric llvm::Value *imag = DominatingLLVMValue::restore(CGF, Vals.second); 720b57cec5SDimitry Andric return RValue::getComplex(real, imag); 730b57cec5SDimitry Andric } 740b57cec5SDimitry Andric } 750b57cec5SDimitry Andric 760b57cec5SDimitry Andric llvm_unreachable("bad saved r-value kind"); 770b57cec5SDimitry Andric } 780b57cec5SDimitry Andric 790b57cec5SDimitry Andric /// Push an entry of the given size onto this protected-scope stack. 800b57cec5SDimitry Andric char *EHScopeStack::allocate(size_t Size) { 810b57cec5SDimitry Andric Size = llvm::alignTo(Size, ScopeStackAlignment); 820b57cec5SDimitry Andric if (!StartOfBuffer) { 830b57cec5SDimitry Andric unsigned Capacity = 1024; 840b57cec5SDimitry Andric while (Capacity < Size) Capacity *= 2; 850b57cec5SDimitry Andric StartOfBuffer = new char[Capacity]; 860b57cec5SDimitry Andric StartOfData = EndOfBuffer = StartOfBuffer + Capacity; 870b57cec5SDimitry Andric } else if (static_cast<size_t>(StartOfData - StartOfBuffer) < Size) { 880b57cec5SDimitry Andric unsigned CurrentCapacity = EndOfBuffer - StartOfBuffer; 890b57cec5SDimitry Andric unsigned UsedCapacity = CurrentCapacity - (StartOfData - StartOfBuffer); 900b57cec5SDimitry Andric 910b57cec5SDimitry Andric unsigned NewCapacity = CurrentCapacity; 920b57cec5SDimitry Andric do { 930b57cec5SDimitry Andric NewCapacity *= 2; 940b57cec5SDimitry Andric } while (NewCapacity < UsedCapacity + Size); 950b57cec5SDimitry Andric 960b57cec5SDimitry Andric char *NewStartOfBuffer = new char[NewCapacity]; 970b57cec5SDimitry Andric char *NewEndOfBuffer = NewStartOfBuffer + NewCapacity; 980b57cec5SDimitry Andric char *NewStartOfData = NewEndOfBuffer - UsedCapacity; 990b57cec5SDimitry Andric memcpy(NewStartOfData, StartOfData, UsedCapacity); 1000b57cec5SDimitry Andric delete [] StartOfBuffer; 1010b57cec5SDimitry Andric StartOfBuffer = NewStartOfBuffer; 1020b57cec5SDimitry Andric EndOfBuffer = NewEndOfBuffer; 1030b57cec5SDimitry Andric StartOfData = NewStartOfData; 1040b57cec5SDimitry Andric } 1050b57cec5SDimitry Andric 1060b57cec5SDimitry Andric assert(StartOfBuffer + Size <= StartOfData); 1070b57cec5SDimitry Andric StartOfData -= Size; 1080b57cec5SDimitry Andric return StartOfData; 1090b57cec5SDimitry Andric } 1100b57cec5SDimitry Andric 1110b57cec5SDimitry Andric void EHScopeStack::deallocate(size_t Size) { 1120b57cec5SDimitry Andric StartOfData += llvm::alignTo(Size, ScopeStackAlignment); 1130b57cec5SDimitry Andric } 1140b57cec5SDimitry Andric 1150b57cec5SDimitry Andric bool EHScopeStack::containsOnlyLifetimeMarkers( 1160b57cec5SDimitry Andric EHScopeStack::stable_iterator Old) const { 1170b57cec5SDimitry Andric for (EHScopeStack::iterator it = begin(); stabilize(it) != Old; it++) { 1180b57cec5SDimitry Andric EHCleanupScope *cleanup = dyn_cast<EHCleanupScope>(&*it); 1190b57cec5SDimitry Andric if (!cleanup || !cleanup->isLifetimeMarker()) 1200b57cec5SDimitry Andric return false; 1210b57cec5SDimitry Andric } 1220b57cec5SDimitry Andric 1230b57cec5SDimitry Andric return true; 1240b57cec5SDimitry Andric } 1250b57cec5SDimitry Andric 1260b57cec5SDimitry Andric bool EHScopeStack::requiresLandingPad() const { 1270b57cec5SDimitry Andric for (stable_iterator si = getInnermostEHScope(); si != stable_end(); ) { 1280b57cec5SDimitry Andric // Skip lifetime markers. 1290b57cec5SDimitry Andric if (auto *cleanup = dyn_cast<EHCleanupScope>(&*find(si))) 1300b57cec5SDimitry Andric if (cleanup->isLifetimeMarker()) { 1310b57cec5SDimitry Andric si = cleanup->getEnclosingEHScope(); 1320b57cec5SDimitry Andric continue; 1330b57cec5SDimitry Andric } 1340b57cec5SDimitry Andric return true; 1350b57cec5SDimitry Andric } 1360b57cec5SDimitry Andric 1370b57cec5SDimitry Andric return false; 1380b57cec5SDimitry Andric } 1390b57cec5SDimitry Andric 1400b57cec5SDimitry Andric EHScopeStack::stable_iterator 1410b57cec5SDimitry Andric EHScopeStack::getInnermostActiveNormalCleanup() const { 1420b57cec5SDimitry Andric for (stable_iterator si = getInnermostNormalCleanup(), se = stable_end(); 1430b57cec5SDimitry Andric si != se; ) { 1440b57cec5SDimitry Andric EHCleanupScope &cleanup = cast<EHCleanupScope>(*find(si)); 1450b57cec5SDimitry Andric if (cleanup.isActive()) return si; 1460b57cec5SDimitry Andric si = cleanup.getEnclosingNormalCleanup(); 1470b57cec5SDimitry Andric } 1480b57cec5SDimitry Andric return stable_end(); 1490b57cec5SDimitry Andric } 1500b57cec5SDimitry Andric 1510b57cec5SDimitry Andric 1520b57cec5SDimitry Andric void *EHScopeStack::pushCleanup(CleanupKind Kind, size_t Size) { 1530b57cec5SDimitry Andric char *Buffer = allocate(EHCleanupScope::getSizeForCleanupSize(Size)); 1540b57cec5SDimitry Andric bool IsNormalCleanup = Kind & NormalCleanup; 1550b57cec5SDimitry Andric bool IsEHCleanup = Kind & EHCleanup; 1560b57cec5SDimitry Andric bool IsLifetimeMarker = Kind & LifetimeMarker; 15781ad6265SDimitry Andric 15881ad6265SDimitry Andric // Per C++ [except.terminate], it is implementation-defined whether none, 15981ad6265SDimitry Andric // some, or all cleanups are called before std::terminate. Thus, when 16081ad6265SDimitry Andric // terminate is the current EH scope, we may skip adding any EH cleanup 16181ad6265SDimitry Andric // scopes. 16281ad6265SDimitry Andric if (InnermostEHScope != stable_end() && 16381ad6265SDimitry Andric find(InnermostEHScope)->getKind() == EHScope::Terminate) 16481ad6265SDimitry Andric IsEHCleanup = false; 16581ad6265SDimitry Andric 1660b57cec5SDimitry Andric EHCleanupScope *Scope = 1670b57cec5SDimitry Andric new (Buffer) EHCleanupScope(IsNormalCleanup, 1680b57cec5SDimitry Andric IsEHCleanup, 1690b57cec5SDimitry Andric Size, 1700b57cec5SDimitry Andric BranchFixups.size(), 1710b57cec5SDimitry Andric InnermostNormalCleanup, 1720b57cec5SDimitry Andric InnermostEHScope); 1730b57cec5SDimitry Andric if (IsNormalCleanup) 1740b57cec5SDimitry Andric InnermostNormalCleanup = stable_begin(); 1750b57cec5SDimitry Andric if (IsEHCleanup) 1760b57cec5SDimitry Andric InnermostEHScope = stable_begin(); 1770b57cec5SDimitry Andric if (IsLifetimeMarker) 1780b57cec5SDimitry Andric Scope->setLifetimeMarker(); 1790b57cec5SDimitry Andric 180fe6060f1SDimitry Andric // With Windows -EHa, Invoke llvm.seh.scope.begin() for EHCleanup 1815f757f3fSDimitry Andric // If exceptions are disabled/ignored and SEH is not in use, then there is no 1825f757f3fSDimitry Andric // invoke destination. SEH "works" even if exceptions are off. In practice, 1835f757f3fSDimitry Andric // this means that C++ destructors and other EH cleanups don't run, which is 1845f757f3fSDimitry Andric // consistent with MSVC's behavior, except in the presence of -EHa. 1855f757f3fSDimitry Andric // Check getInvokeDest() to generate llvm.seh.scope.begin() as needed. 186fe6060f1SDimitry Andric if (CGF->getLangOpts().EHAsynch && IsEHCleanup && !IsLifetimeMarker && 1875f757f3fSDimitry Andric CGF->getTarget().getCXXABI().isMicrosoft() && CGF->getInvokeDest()) 188fe6060f1SDimitry Andric CGF->EmitSehCppScopeBegin(); 189fe6060f1SDimitry Andric 1900b57cec5SDimitry Andric return Scope->getCleanupBuffer(); 1910b57cec5SDimitry Andric } 1920b57cec5SDimitry Andric 1930b57cec5SDimitry Andric void EHScopeStack::popCleanup() { 1940b57cec5SDimitry Andric assert(!empty() && "popping exception stack when not empty"); 1950b57cec5SDimitry Andric 1960b57cec5SDimitry Andric assert(isa<EHCleanupScope>(*begin())); 1970b57cec5SDimitry Andric EHCleanupScope &Cleanup = cast<EHCleanupScope>(*begin()); 1980b57cec5SDimitry Andric InnermostNormalCleanup = Cleanup.getEnclosingNormalCleanup(); 1990b57cec5SDimitry Andric InnermostEHScope = Cleanup.getEnclosingEHScope(); 2000b57cec5SDimitry Andric deallocate(Cleanup.getAllocatedSize()); 2010b57cec5SDimitry Andric 2020b57cec5SDimitry Andric // Destroy the cleanup. 2030b57cec5SDimitry Andric Cleanup.Destroy(); 2040b57cec5SDimitry Andric 2050b57cec5SDimitry Andric // Check whether we can shrink the branch-fixups stack. 2060b57cec5SDimitry Andric if (!BranchFixups.empty()) { 2070b57cec5SDimitry Andric // If we no longer have any normal cleanups, all the fixups are 2080b57cec5SDimitry Andric // complete. 2090b57cec5SDimitry Andric if (!hasNormalCleanups()) 2100b57cec5SDimitry Andric BranchFixups.clear(); 2110b57cec5SDimitry Andric 2120b57cec5SDimitry Andric // Otherwise we can still trim out unnecessary nulls. 2130b57cec5SDimitry Andric else 2140b57cec5SDimitry Andric popNullFixups(); 2150b57cec5SDimitry Andric } 2160b57cec5SDimitry Andric } 2170b57cec5SDimitry Andric 2180b57cec5SDimitry Andric EHFilterScope *EHScopeStack::pushFilter(unsigned numFilters) { 2190b57cec5SDimitry Andric assert(getInnermostEHScope() == stable_end()); 2200b57cec5SDimitry Andric char *buffer = allocate(EHFilterScope::getSizeForNumFilters(numFilters)); 2210b57cec5SDimitry Andric EHFilterScope *filter = new (buffer) EHFilterScope(numFilters); 2220b57cec5SDimitry Andric InnermostEHScope = stable_begin(); 2230b57cec5SDimitry Andric return filter; 2240b57cec5SDimitry Andric } 2250b57cec5SDimitry Andric 2260b57cec5SDimitry Andric void EHScopeStack::popFilter() { 2270b57cec5SDimitry Andric assert(!empty() && "popping exception stack when not empty"); 2280b57cec5SDimitry Andric 2290b57cec5SDimitry Andric EHFilterScope &filter = cast<EHFilterScope>(*begin()); 2300b57cec5SDimitry Andric deallocate(EHFilterScope::getSizeForNumFilters(filter.getNumFilters())); 2310b57cec5SDimitry Andric 2320b57cec5SDimitry Andric InnermostEHScope = filter.getEnclosingEHScope(); 2330b57cec5SDimitry Andric } 2340b57cec5SDimitry Andric 2350b57cec5SDimitry Andric EHCatchScope *EHScopeStack::pushCatch(unsigned numHandlers) { 2360b57cec5SDimitry Andric char *buffer = allocate(EHCatchScope::getSizeForNumHandlers(numHandlers)); 2370b57cec5SDimitry Andric EHCatchScope *scope = 2380b57cec5SDimitry Andric new (buffer) EHCatchScope(numHandlers, InnermostEHScope); 2390b57cec5SDimitry Andric InnermostEHScope = stable_begin(); 2400b57cec5SDimitry Andric return scope; 2410b57cec5SDimitry Andric } 2420b57cec5SDimitry Andric 2430b57cec5SDimitry Andric void EHScopeStack::pushTerminate() { 2440b57cec5SDimitry Andric char *Buffer = allocate(EHTerminateScope::getSize()); 2450b57cec5SDimitry Andric new (Buffer) EHTerminateScope(InnermostEHScope); 2460b57cec5SDimitry Andric InnermostEHScope = stable_begin(); 2470b57cec5SDimitry Andric } 2480b57cec5SDimitry Andric 2490b57cec5SDimitry Andric /// Remove any 'null' fixups on the stack. However, we can't pop more 2500b57cec5SDimitry Andric /// fixups than the fixup depth on the innermost normal cleanup, or 2510b57cec5SDimitry Andric /// else fixups that we try to add to that cleanup will end up in the 2520b57cec5SDimitry Andric /// wrong place. We *could* try to shrink fixup depths, but that's 2530b57cec5SDimitry Andric /// actually a lot of work for little benefit. 2540b57cec5SDimitry Andric void EHScopeStack::popNullFixups() { 2550b57cec5SDimitry Andric // We expect this to only be called when there's still an innermost 2560b57cec5SDimitry Andric // normal cleanup; otherwise there really shouldn't be any fixups. 2570b57cec5SDimitry Andric assert(hasNormalCleanups()); 2580b57cec5SDimitry Andric 2590b57cec5SDimitry Andric EHScopeStack::iterator it = find(InnermostNormalCleanup); 2600b57cec5SDimitry Andric unsigned MinSize = cast<EHCleanupScope>(*it).getFixupDepth(); 2610b57cec5SDimitry Andric assert(BranchFixups.size() >= MinSize && "fixup stack out of order"); 2620b57cec5SDimitry Andric 2630b57cec5SDimitry Andric while (BranchFixups.size() > MinSize && 2640b57cec5SDimitry Andric BranchFixups.back().Destination == nullptr) 2650b57cec5SDimitry Andric BranchFixups.pop_back(); 2660b57cec5SDimitry Andric } 2670b57cec5SDimitry Andric 268*0fca6ea1SDimitry Andric RawAddress CodeGenFunction::createCleanupActiveFlag() { 2690b57cec5SDimitry Andric // Create a variable to decide whether the cleanup needs to be run. 270*0fca6ea1SDimitry Andric RawAddress active = CreateTempAllocaWithoutCast( 2710b57cec5SDimitry Andric Builder.getInt1Ty(), CharUnits::One(), "cleanup.cond"); 2720b57cec5SDimitry Andric 2730b57cec5SDimitry Andric // Initialize it to false at a site that's guaranteed to be run 2740b57cec5SDimitry Andric // before each evaluation. 275*0fca6ea1SDimitry Andric setBeforeOutermostConditional(Builder.getFalse(), active, *this); 2760b57cec5SDimitry Andric 2770b57cec5SDimitry Andric // Initialize it to true at the current location. 2780b57cec5SDimitry Andric Builder.CreateStore(Builder.getTrue(), active); 2790b57cec5SDimitry Andric 2800b57cec5SDimitry Andric return active; 2810b57cec5SDimitry Andric } 2820b57cec5SDimitry Andric 283*0fca6ea1SDimitry Andric void CodeGenFunction::initFullExprCleanupWithFlag(RawAddress ActiveFlag) { 2840b57cec5SDimitry Andric // Set that as the active flag in the cleanup. 2850b57cec5SDimitry Andric EHCleanupScope &cleanup = cast<EHCleanupScope>(*EHStack.begin()); 2860b57cec5SDimitry Andric assert(!cleanup.hasActiveFlag() && "cleanup already has active flag?"); 2870b57cec5SDimitry Andric cleanup.setActiveFlag(ActiveFlag); 2880b57cec5SDimitry Andric 2890b57cec5SDimitry Andric if (cleanup.isNormalCleanup()) cleanup.setTestFlagInNormalCleanup(); 2900b57cec5SDimitry Andric if (cleanup.isEHCleanup()) cleanup.setTestFlagInEHCleanup(); 2910b57cec5SDimitry Andric } 2920b57cec5SDimitry Andric 2930b57cec5SDimitry Andric void EHScopeStack::Cleanup::anchor() {} 2940b57cec5SDimitry Andric 2950b57cec5SDimitry Andric static void createStoreInstBefore(llvm::Value *value, Address addr, 296*0fca6ea1SDimitry Andric llvm::Instruction *beforeInst, 297*0fca6ea1SDimitry Andric CodeGenFunction &CGF) { 298*0fca6ea1SDimitry Andric auto store = new llvm::StoreInst(value, addr.emitRawPointer(CGF), beforeInst); 299a7dea167SDimitry Andric store->setAlignment(addr.getAlignment().getAsAlign()); 3000b57cec5SDimitry Andric } 3010b57cec5SDimitry Andric 3020b57cec5SDimitry Andric static llvm::LoadInst *createLoadInstBefore(Address addr, const Twine &name, 303*0fca6ea1SDimitry Andric llvm::Instruction *beforeInst, 304*0fca6ea1SDimitry Andric CodeGenFunction &CGF) { 305*0fca6ea1SDimitry Andric return new llvm::LoadInst(addr.getElementType(), addr.emitRawPointer(CGF), 306*0fca6ea1SDimitry Andric name, false, addr.getAlignment().getAsAlign(), 3075ffd83dbSDimitry Andric beforeInst); 3080b57cec5SDimitry Andric } 3090b57cec5SDimitry Andric 3100b57cec5SDimitry Andric /// All the branch fixups on the EH stack have propagated out past the 3110b57cec5SDimitry Andric /// outermost normal cleanup; resolve them all by adding cases to the 3120b57cec5SDimitry Andric /// given switch instruction. 3130b57cec5SDimitry Andric static void ResolveAllBranchFixups(CodeGenFunction &CGF, 3140b57cec5SDimitry Andric llvm::SwitchInst *Switch, 3150b57cec5SDimitry Andric llvm::BasicBlock *CleanupEntry) { 3160b57cec5SDimitry Andric llvm::SmallPtrSet<llvm::BasicBlock*, 4> CasesAdded; 3170b57cec5SDimitry Andric 3180b57cec5SDimitry Andric for (unsigned I = 0, E = CGF.EHStack.getNumBranchFixups(); I != E; ++I) { 3190b57cec5SDimitry Andric // Skip this fixup if its destination isn't set. 3200b57cec5SDimitry Andric BranchFixup &Fixup = CGF.EHStack.getBranchFixup(I); 3210b57cec5SDimitry Andric if (Fixup.Destination == nullptr) continue; 3220b57cec5SDimitry Andric 3230b57cec5SDimitry Andric // If there isn't an OptimisticBranchBlock, then InitialBranch is 3240b57cec5SDimitry Andric // still pointing directly to its destination; forward it to the 3250b57cec5SDimitry Andric // appropriate cleanup entry. This is required in the specific 3260b57cec5SDimitry Andric // case of 3270b57cec5SDimitry Andric // { std::string s; goto lbl; } 3280b57cec5SDimitry Andric // lbl: 3290b57cec5SDimitry Andric // i.e. where there's an unresolved fixup inside a single cleanup 3300b57cec5SDimitry Andric // entry which we're currently popping. 3310b57cec5SDimitry Andric if (Fixup.OptimisticBranchBlock == nullptr) { 3320b57cec5SDimitry Andric createStoreInstBefore(CGF.Builder.getInt32(Fixup.DestinationIndex), 333*0fca6ea1SDimitry Andric CGF.getNormalCleanupDestSlot(), Fixup.InitialBranch, 334*0fca6ea1SDimitry Andric CGF); 3350b57cec5SDimitry Andric Fixup.InitialBranch->setSuccessor(0, CleanupEntry); 3360b57cec5SDimitry Andric } 3370b57cec5SDimitry Andric 3380b57cec5SDimitry Andric // Don't add this case to the switch statement twice. 3390b57cec5SDimitry Andric if (!CasesAdded.insert(Fixup.Destination).second) 3400b57cec5SDimitry Andric continue; 3410b57cec5SDimitry Andric 3420b57cec5SDimitry Andric Switch->addCase(CGF.Builder.getInt32(Fixup.DestinationIndex), 3430b57cec5SDimitry Andric Fixup.Destination); 3440b57cec5SDimitry Andric } 3450b57cec5SDimitry Andric 3460b57cec5SDimitry Andric CGF.EHStack.clearFixups(); 3470b57cec5SDimitry Andric } 3480b57cec5SDimitry Andric 3490b57cec5SDimitry Andric /// Transitions the terminator of the given exit-block of a cleanup to 3500b57cec5SDimitry Andric /// be a cleanup switch. 3510b57cec5SDimitry Andric static llvm::SwitchInst *TransitionToCleanupSwitch(CodeGenFunction &CGF, 3520b57cec5SDimitry Andric llvm::BasicBlock *Block) { 3530b57cec5SDimitry Andric // If it's a branch, turn it into a switch whose default 3540b57cec5SDimitry Andric // destination is its original target. 3550b57cec5SDimitry Andric llvm::Instruction *Term = Block->getTerminator(); 3560b57cec5SDimitry Andric assert(Term && "can't transition block without terminator"); 3570b57cec5SDimitry Andric 3580b57cec5SDimitry Andric if (llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Term)) { 3590b57cec5SDimitry Andric assert(Br->isUnconditional()); 3600b57cec5SDimitry Andric auto Load = createLoadInstBefore(CGF.getNormalCleanupDestSlot(), 361*0fca6ea1SDimitry Andric "cleanup.dest", Term, CGF); 3620b57cec5SDimitry Andric llvm::SwitchInst *Switch = 3630b57cec5SDimitry Andric llvm::SwitchInst::Create(Load, Br->getSuccessor(0), 4, Block); 3640b57cec5SDimitry Andric Br->eraseFromParent(); 3650b57cec5SDimitry Andric return Switch; 3660b57cec5SDimitry Andric } else { 3670b57cec5SDimitry Andric return cast<llvm::SwitchInst>(Term); 3680b57cec5SDimitry Andric } 3690b57cec5SDimitry Andric } 3700b57cec5SDimitry Andric 3710b57cec5SDimitry Andric void CodeGenFunction::ResolveBranchFixups(llvm::BasicBlock *Block) { 3720b57cec5SDimitry Andric assert(Block && "resolving a null target block"); 3730b57cec5SDimitry Andric if (!EHStack.getNumBranchFixups()) return; 3740b57cec5SDimitry Andric 3750b57cec5SDimitry Andric assert(EHStack.hasNormalCleanups() && 3760b57cec5SDimitry Andric "branch fixups exist with no normal cleanups on stack"); 3770b57cec5SDimitry Andric 3780b57cec5SDimitry Andric llvm::SmallPtrSet<llvm::BasicBlock*, 4> ModifiedOptimisticBlocks; 3790b57cec5SDimitry Andric bool ResolvedAny = false; 3800b57cec5SDimitry Andric 3810b57cec5SDimitry Andric for (unsigned I = 0, E = EHStack.getNumBranchFixups(); I != E; ++I) { 3820b57cec5SDimitry Andric // Skip this fixup if its destination doesn't match. 3830b57cec5SDimitry Andric BranchFixup &Fixup = EHStack.getBranchFixup(I); 3840b57cec5SDimitry Andric if (Fixup.Destination != Block) continue; 3850b57cec5SDimitry Andric 3860b57cec5SDimitry Andric Fixup.Destination = nullptr; 3870b57cec5SDimitry Andric ResolvedAny = true; 3880b57cec5SDimitry Andric 3890b57cec5SDimitry Andric // If it doesn't have an optimistic branch block, LatestBranch is 3900b57cec5SDimitry Andric // already pointing to the right place. 3910b57cec5SDimitry Andric llvm::BasicBlock *BranchBB = Fixup.OptimisticBranchBlock; 3920b57cec5SDimitry Andric if (!BranchBB) 3930b57cec5SDimitry Andric continue; 3940b57cec5SDimitry Andric 3950b57cec5SDimitry Andric // Don't process the same optimistic branch block twice. 3960b57cec5SDimitry Andric if (!ModifiedOptimisticBlocks.insert(BranchBB).second) 3970b57cec5SDimitry Andric continue; 3980b57cec5SDimitry Andric 3990b57cec5SDimitry Andric llvm::SwitchInst *Switch = TransitionToCleanupSwitch(*this, BranchBB); 4000b57cec5SDimitry Andric 4010b57cec5SDimitry Andric // Add a case to the switch. 4020b57cec5SDimitry Andric Switch->addCase(Builder.getInt32(Fixup.DestinationIndex), Block); 4030b57cec5SDimitry Andric } 4040b57cec5SDimitry Andric 4050b57cec5SDimitry Andric if (ResolvedAny) 4060b57cec5SDimitry Andric EHStack.popNullFixups(); 4070b57cec5SDimitry Andric } 4080b57cec5SDimitry Andric 4090b57cec5SDimitry Andric /// Pops cleanup blocks until the given savepoint is reached. 4100b57cec5SDimitry Andric void CodeGenFunction::PopCleanupBlocks( 4110b57cec5SDimitry Andric EHScopeStack::stable_iterator Old, 4120b57cec5SDimitry Andric std::initializer_list<llvm::Value **> ValuesToReload) { 4130b57cec5SDimitry Andric assert(Old.isValid()); 4140b57cec5SDimitry Andric 4150b57cec5SDimitry Andric bool HadBranches = false; 4160b57cec5SDimitry Andric while (EHStack.stable_begin() != Old) { 4170b57cec5SDimitry Andric EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.begin()); 4180b57cec5SDimitry Andric HadBranches |= Scope.hasBranches(); 4190b57cec5SDimitry Andric 4200b57cec5SDimitry Andric // As long as Old strictly encloses the scope's enclosing normal 4210b57cec5SDimitry Andric // cleanup, we're going to emit another normal cleanup which 4220b57cec5SDimitry Andric // fallthrough can propagate through. 4230b57cec5SDimitry Andric bool FallThroughIsBranchThrough = 4240b57cec5SDimitry Andric Old.strictlyEncloses(Scope.getEnclosingNormalCleanup()); 4250b57cec5SDimitry Andric 4260b57cec5SDimitry Andric PopCleanupBlock(FallThroughIsBranchThrough); 4270b57cec5SDimitry Andric } 4280b57cec5SDimitry Andric 4290b57cec5SDimitry Andric // If we didn't have any branches, the insertion point before cleanups must 4300b57cec5SDimitry Andric // dominate the current insertion point and we don't need to reload any 4310b57cec5SDimitry Andric // values. 4320b57cec5SDimitry Andric if (!HadBranches) 4330b57cec5SDimitry Andric return; 4340b57cec5SDimitry Andric 4350b57cec5SDimitry Andric // Spill and reload all values that the caller wants to be live at the current 4360b57cec5SDimitry Andric // insertion point. 4370b57cec5SDimitry Andric for (llvm::Value **ReloadedValue : ValuesToReload) { 4380b57cec5SDimitry Andric auto *Inst = dyn_cast_or_null<llvm::Instruction>(*ReloadedValue); 4390b57cec5SDimitry Andric if (!Inst) 4400b57cec5SDimitry Andric continue; 4410b57cec5SDimitry Andric 4420b57cec5SDimitry Andric // Don't spill static allocas, they dominate all cleanups. These are created 4430b57cec5SDimitry Andric // by binding a reference to a local variable or temporary. 4440b57cec5SDimitry Andric auto *AI = dyn_cast<llvm::AllocaInst>(Inst); 4450b57cec5SDimitry Andric if (AI && AI->isStaticAlloca()) 4460b57cec5SDimitry Andric continue; 4470b57cec5SDimitry Andric 4480b57cec5SDimitry Andric Address Tmp = 4490b57cec5SDimitry Andric CreateDefaultAlignTempAlloca(Inst->getType(), "tmp.exprcleanup"); 4500b57cec5SDimitry Andric 4510b57cec5SDimitry Andric // Find an insertion point after Inst and spill it to the temporary. 4520b57cec5SDimitry Andric llvm::BasicBlock::iterator InsertBefore; 4530b57cec5SDimitry Andric if (auto *Invoke = dyn_cast<llvm::InvokeInst>(Inst)) 4540b57cec5SDimitry Andric InsertBefore = Invoke->getNormalDest()->getFirstInsertionPt(); 4550b57cec5SDimitry Andric else 4560b57cec5SDimitry Andric InsertBefore = std::next(Inst->getIterator()); 4570b57cec5SDimitry Andric CGBuilderTy(CGM, &*InsertBefore).CreateStore(Inst, Tmp); 4580b57cec5SDimitry Andric 4590b57cec5SDimitry Andric // Reload the value at the current insertion point. 4600b57cec5SDimitry Andric *ReloadedValue = Builder.CreateLoad(Tmp); 4610b57cec5SDimitry Andric } 4620b57cec5SDimitry Andric } 4630b57cec5SDimitry Andric 4640b57cec5SDimitry Andric /// Pops cleanup blocks until the given savepoint is reached, then add the 4650b57cec5SDimitry Andric /// cleanups from the given savepoint in the lifetime-extended cleanups stack. 4660b57cec5SDimitry Andric void CodeGenFunction::PopCleanupBlocks( 4670b57cec5SDimitry Andric EHScopeStack::stable_iterator Old, size_t OldLifetimeExtendedSize, 4680b57cec5SDimitry Andric std::initializer_list<llvm::Value **> ValuesToReload) { 4690b57cec5SDimitry Andric PopCleanupBlocks(Old, ValuesToReload); 4700b57cec5SDimitry Andric 4710b57cec5SDimitry Andric // Move our deferred cleanups onto the EH stack. 4720b57cec5SDimitry Andric for (size_t I = OldLifetimeExtendedSize, 4730b57cec5SDimitry Andric E = LifetimeExtendedCleanupStack.size(); I != E; /**/) { 4740b57cec5SDimitry Andric // Alignment should be guaranteed by the vptrs in the individual cleanups. 4750b57cec5SDimitry Andric assert((I % alignof(LifetimeExtendedCleanupHeader) == 0) && 4760b57cec5SDimitry Andric "misaligned cleanup stack entry"); 4770b57cec5SDimitry Andric 4780b57cec5SDimitry Andric LifetimeExtendedCleanupHeader &Header = 4790b57cec5SDimitry Andric reinterpret_cast<LifetimeExtendedCleanupHeader&>( 4800b57cec5SDimitry Andric LifetimeExtendedCleanupStack[I]); 4810b57cec5SDimitry Andric I += sizeof(Header); 4820b57cec5SDimitry Andric 4830b57cec5SDimitry Andric EHStack.pushCopyOfCleanup(Header.getKind(), 4840b57cec5SDimitry Andric &LifetimeExtendedCleanupStack[I], 4850b57cec5SDimitry Andric Header.getSize()); 4860b57cec5SDimitry Andric I += Header.getSize(); 4870b57cec5SDimitry Andric 4880b57cec5SDimitry Andric if (Header.isConditional()) { 489*0fca6ea1SDimitry Andric RawAddress ActiveFlag = 490*0fca6ea1SDimitry Andric reinterpret_cast<RawAddress &>(LifetimeExtendedCleanupStack[I]); 4910b57cec5SDimitry Andric initFullExprCleanupWithFlag(ActiveFlag); 4920b57cec5SDimitry Andric I += sizeof(ActiveFlag); 4930b57cec5SDimitry Andric } 4940b57cec5SDimitry Andric } 4950b57cec5SDimitry Andric LifetimeExtendedCleanupStack.resize(OldLifetimeExtendedSize); 4960b57cec5SDimitry Andric } 4970b57cec5SDimitry Andric 4980b57cec5SDimitry Andric static llvm::BasicBlock *CreateNormalEntry(CodeGenFunction &CGF, 4990b57cec5SDimitry Andric EHCleanupScope &Scope) { 5000b57cec5SDimitry Andric assert(Scope.isNormalCleanup()); 5010b57cec5SDimitry Andric llvm::BasicBlock *Entry = Scope.getNormalBlock(); 5020b57cec5SDimitry Andric if (!Entry) { 5030b57cec5SDimitry Andric Entry = CGF.createBasicBlock("cleanup"); 5040b57cec5SDimitry Andric Scope.setNormalBlock(Entry); 5050b57cec5SDimitry Andric } 5060b57cec5SDimitry Andric return Entry; 5070b57cec5SDimitry Andric } 5080b57cec5SDimitry Andric 5090b57cec5SDimitry Andric /// Attempts to reduce a cleanup's entry block to a fallthrough. This 5100b57cec5SDimitry Andric /// is basically llvm::MergeBlockIntoPredecessor, except 5110b57cec5SDimitry Andric /// simplified/optimized for the tighter constraints on cleanup blocks. 5120b57cec5SDimitry Andric /// 5130b57cec5SDimitry Andric /// Returns the new block, whatever it is. 5140b57cec5SDimitry Andric static llvm::BasicBlock *SimplifyCleanupEntry(CodeGenFunction &CGF, 5150b57cec5SDimitry Andric llvm::BasicBlock *Entry) { 5160b57cec5SDimitry Andric llvm::BasicBlock *Pred = Entry->getSinglePredecessor(); 5170b57cec5SDimitry Andric if (!Pred) return Entry; 5180b57cec5SDimitry Andric 5190b57cec5SDimitry Andric llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Pred->getTerminator()); 5200b57cec5SDimitry Andric if (!Br || Br->isConditional()) return Entry; 5210b57cec5SDimitry Andric assert(Br->getSuccessor(0) == Entry); 5220b57cec5SDimitry Andric 5230b57cec5SDimitry Andric // If we were previously inserting at the end of the cleanup entry 5240b57cec5SDimitry Andric // block, we'll need to continue inserting at the end of the 5250b57cec5SDimitry Andric // predecessor. 5260b57cec5SDimitry Andric bool WasInsertBlock = CGF.Builder.GetInsertBlock() == Entry; 5270b57cec5SDimitry Andric assert(!WasInsertBlock || CGF.Builder.GetInsertPoint() == Entry->end()); 5280b57cec5SDimitry Andric 5290b57cec5SDimitry Andric // Kill the branch. 5300b57cec5SDimitry Andric Br->eraseFromParent(); 5310b57cec5SDimitry Andric 5320b57cec5SDimitry Andric // Replace all uses of the entry with the predecessor, in case there 5330b57cec5SDimitry Andric // are phis in the cleanup. 5340b57cec5SDimitry Andric Entry->replaceAllUsesWith(Pred); 5350b57cec5SDimitry Andric 5360b57cec5SDimitry Andric // Merge the blocks. 537bdd1243dSDimitry Andric Pred->splice(Pred->end(), Entry); 5380b57cec5SDimitry Andric 5390b57cec5SDimitry Andric // Kill the entry block. 5400b57cec5SDimitry Andric Entry->eraseFromParent(); 5410b57cec5SDimitry Andric 5420b57cec5SDimitry Andric if (WasInsertBlock) 5430b57cec5SDimitry Andric CGF.Builder.SetInsertPoint(Pred); 5440b57cec5SDimitry Andric 5450b57cec5SDimitry Andric return Pred; 5460b57cec5SDimitry Andric } 5470b57cec5SDimitry Andric 5480b57cec5SDimitry Andric static void EmitCleanup(CodeGenFunction &CGF, 5490b57cec5SDimitry Andric EHScopeStack::Cleanup *Fn, 5500b57cec5SDimitry Andric EHScopeStack::Cleanup::Flags flags, 5510b57cec5SDimitry Andric Address ActiveFlag) { 5520b57cec5SDimitry Andric // If there's an active flag, load it and skip the cleanup if it's 5530b57cec5SDimitry Andric // false. 5540b57cec5SDimitry Andric llvm::BasicBlock *ContBB = nullptr; 5550b57cec5SDimitry Andric if (ActiveFlag.isValid()) { 5560b57cec5SDimitry Andric ContBB = CGF.createBasicBlock("cleanup.done"); 5570b57cec5SDimitry Andric llvm::BasicBlock *CleanupBB = CGF.createBasicBlock("cleanup.action"); 5580b57cec5SDimitry Andric llvm::Value *IsActive 5590b57cec5SDimitry Andric = CGF.Builder.CreateLoad(ActiveFlag, "cleanup.is_active"); 5600b57cec5SDimitry Andric CGF.Builder.CreateCondBr(IsActive, CleanupBB, ContBB); 5610b57cec5SDimitry Andric CGF.EmitBlock(CleanupBB); 5620b57cec5SDimitry Andric } 5630b57cec5SDimitry Andric 5640b57cec5SDimitry Andric // Ask the cleanup to emit itself. 5650b57cec5SDimitry Andric Fn->Emit(CGF, flags); 5660b57cec5SDimitry Andric assert(CGF.HaveInsertPoint() && "cleanup ended with no insertion point?"); 5670b57cec5SDimitry Andric 5680b57cec5SDimitry Andric // Emit the continuation block if there was an active flag. 5690b57cec5SDimitry Andric if (ActiveFlag.isValid()) 5700b57cec5SDimitry Andric CGF.EmitBlock(ContBB); 5710b57cec5SDimitry Andric } 5720b57cec5SDimitry Andric 5730b57cec5SDimitry Andric static void ForwardPrebranchedFallthrough(llvm::BasicBlock *Exit, 5740b57cec5SDimitry Andric llvm::BasicBlock *From, 5750b57cec5SDimitry Andric llvm::BasicBlock *To) { 5760b57cec5SDimitry Andric // Exit is the exit block of a cleanup, so it always terminates in 5770b57cec5SDimitry Andric // an unconditional branch or a switch. 5780b57cec5SDimitry Andric llvm::Instruction *Term = Exit->getTerminator(); 5790b57cec5SDimitry Andric 5800b57cec5SDimitry Andric if (llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Term)) { 5810b57cec5SDimitry Andric assert(Br->isUnconditional() && Br->getSuccessor(0) == From); 5820b57cec5SDimitry Andric Br->setSuccessor(0, To); 5830b57cec5SDimitry Andric } else { 5840b57cec5SDimitry Andric llvm::SwitchInst *Switch = cast<llvm::SwitchInst>(Term); 5850b57cec5SDimitry Andric for (unsigned I = 0, E = Switch->getNumSuccessors(); I != E; ++I) 5860b57cec5SDimitry Andric if (Switch->getSuccessor(I) == From) 5870b57cec5SDimitry Andric Switch->setSuccessor(I, To); 5880b57cec5SDimitry Andric } 5890b57cec5SDimitry Andric } 5900b57cec5SDimitry Andric 5910b57cec5SDimitry Andric /// We don't need a normal entry block for the given cleanup. 5920b57cec5SDimitry Andric /// Optimistic fixup branches can cause these blocks to come into 5930b57cec5SDimitry Andric /// existence anyway; if so, destroy it. 5940b57cec5SDimitry Andric /// 5950b57cec5SDimitry Andric /// The validity of this transformation is very much specific to the 5960b57cec5SDimitry Andric /// exact ways in which we form branches to cleanup entries. 5970b57cec5SDimitry Andric static void destroyOptimisticNormalEntry(CodeGenFunction &CGF, 5980b57cec5SDimitry Andric EHCleanupScope &scope) { 5990b57cec5SDimitry Andric llvm::BasicBlock *entry = scope.getNormalBlock(); 6000b57cec5SDimitry Andric if (!entry) return; 6010b57cec5SDimitry Andric 6020b57cec5SDimitry Andric // Replace all the uses with unreachable. 6030b57cec5SDimitry Andric llvm::BasicBlock *unreachableBB = CGF.getUnreachableBlock(); 6040b57cec5SDimitry Andric for (llvm::BasicBlock::use_iterator 6050b57cec5SDimitry Andric i = entry->use_begin(), e = entry->use_end(); i != e; ) { 6060b57cec5SDimitry Andric llvm::Use &use = *i; 6070b57cec5SDimitry Andric ++i; 6080b57cec5SDimitry Andric 6090b57cec5SDimitry Andric use.set(unreachableBB); 6100b57cec5SDimitry Andric 6110b57cec5SDimitry Andric // The only uses should be fixup switches. 6120b57cec5SDimitry Andric llvm::SwitchInst *si = cast<llvm::SwitchInst>(use.getUser()); 6130b57cec5SDimitry Andric if (si->getNumCases() == 1 && si->getDefaultDest() == unreachableBB) { 6140b57cec5SDimitry Andric // Replace the switch with a branch. 6150b57cec5SDimitry Andric llvm::BranchInst::Create(si->case_begin()->getCaseSuccessor(), si); 6160b57cec5SDimitry Andric 6170b57cec5SDimitry Andric // The switch operand is a load from the cleanup-dest alloca. 6180b57cec5SDimitry Andric llvm::LoadInst *condition = cast<llvm::LoadInst>(si->getCondition()); 6190b57cec5SDimitry Andric 6200b57cec5SDimitry Andric // Destroy the switch. 6210b57cec5SDimitry Andric si->eraseFromParent(); 6220b57cec5SDimitry Andric 6230b57cec5SDimitry Andric // Destroy the load. 6240b57cec5SDimitry Andric assert(condition->getOperand(0) == CGF.NormalCleanupDest.getPointer()); 6250b57cec5SDimitry Andric assert(condition->use_empty()); 6260b57cec5SDimitry Andric condition->eraseFromParent(); 6270b57cec5SDimitry Andric } 6280b57cec5SDimitry Andric } 6290b57cec5SDimitry Andric 6300b57cec5SDimitry Andric assert(entry->use_empty()); 6310b57cec5SDimitry Andric delete entry; 6320b57cec5SDimitry Andric } 6330b57cec5SDimitry Andric 6340b57cec5SDimitry Andric /// Pops a cleanup block. If the block includes a normal cleanup, the 6350b57cec5SDimitry Andric /// current insertion point is threaded through the cleanup, as are 6360b57cec5SDimitry Andric /// any branch fixups on the cleanup. 637*0fca6ea1SDimitry Andric void CodeGenFunction::PopCleanupBlock(bool FallthroughIsBranchThrough, 638*0fca6ea1SDimitry Andric bool ForDeactivation) { 6390b57cec5SDimitry Andric assert(!EHStack.empty() && "cleanup stack is empty!"); 6400b57cec5SDimitry Andric assert(isa<EHCleanupScope>(*EHStack.begin()) && "top not a cleanup!"); 6410b57cec5SDimitry Andric EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.begin()); 6420b57cec5SDimitry Andric assert(Scope.getFixupDepth() <= EHStack.getNumBranchFixups()); 6430b57cec5SDimitry Andric 644*0fca6ea1SDimitry Andric // If we are deactivating a normal cleanup, we need to pretend that the 645*0fca6ea1SDimitry Andric // fallthrough is unreachable. We restore this IP before returning. 646*0fca6ea1SDimitry Andric CGBuilderTy::InsertPoint NormalDeactivateOrigIP; 647*0fca6ea1SDimitry Andric if (ForDeactivation && (Scope.isNormalCleanup() || !getLangOpts().EHAsynch)) { 648*0fca6ea1SDimitry Andric NormalDeactivateOrigIP = Builder.saveAndClearIP(); 649*0fca6ea1SDimitry Andric } 6500b57cec5SDimitry Andric // Remember activation information. 6510b57cec5SDimitry Andric bool IsActive = Scope.isActive(); 6520b57cec5SDimitry Andric Address NormalActiveFlag = 6530b57cec5SDimitry Andric Scope.shouldTestFlagInNormalCleanup() ? Scope.getActiveFlag() 6540b57cec5SDimitry Andric : Address::invalid(); 6550b57cec5SDimitry Andric Address EHActiveFlag = 6560b57cec5SDimitry Andric Scope.shouldTestFlagInEHCleanup() ? Scope.getActiveFlag() 6570b57cec5SDimitry Andric : Address::invalid(); 6580b57cec5SDimitry Andric 6590b57cec5SDimitry Andric // Check whether we need an EH cleanup. This is only true if we've 6600b57cec5SDimitry Andric // generated a lazy EH cleanup block. 6610b57cec5SDimitry Andric llvm::BasicBlock *EHEntry = Scope.getCachedEHDispatchBlock(); 6620b57cec5SDimitry Andric assert(Scope.hasEHBranches() == (EHEntry != nullptr)); 6630b57cec5SDimitry Andric bool RequiresEHCleanup = (EHEntry != nullptr); 6640b57cec5SDimitry Andric EHScopeStack::stable_iterator EHParent = Scope.getEnclosingEHScope(); 6650b57cec5SDimitry Andric 6660b57cec5SDimitry Andric // Check the three conditions which might require a normal cleanup: 6670b57cec5SDimitry Andric 6680b57cec5SDimitry Andric // - whether there are branch fix-ups through this cleanup 6690b57cec5SDimitry Andric unsigned FixupDepth = Scope.getFixupDepth(); 6700b57cec5SDimitry Andric bool HasFixups = EHStack.getNumBranchFixups() != FixupDepth; 6710b57cec5SDimitry Andric 6720b57cec5SDimitry Andric // - whether there are branch-throughs or branch-afters 6730b57cec5SDimitry Andric bool HasExistingBranches = Scope.hasBranches(); 6740b57cec5SDimitry Andric 6750b57cec5SDimitry Andric // - whether there's a fallthrough 6760b57cec5SDimitry Andric llvm::BasicBlock *FallthroughSource = Builder.GetInsertBlock(); 677*0fca6ea1SDimitry Andric bool HasFallthrough = 678*0fca6ea1SDimitry Andric FallthroughSource != nullptr && (IsActive || HasExistingBranches); 6790b57cec5SDimitry Andric 6800b57cec5SDimitry Andric // Branch-through fall-throughs leave the insertion point set to the 6810b57cec5SDimitry Andric // end of the last cleanup, which points to the current scope. The 6820b57cec5SDimitry Andric // rest of IR gen doesn't need to worry about this; it only happens 6830b57cec5SDimitry Andric // during the execution of PopCleanupBlocks(). 6840b57cec5SDimitry Andric bool HasPrebranchedFallthrough = 6850b57cec5SDimitry Andric (FallthroughSource && FallthroughSource->getTerminator()); 6860b57cec5SDimitry Andric 6870b57cec5SDimitry Andric // If this is a normal cleanup, then having a prebranched 6880b57cec5SDimitry Andric // fallthrough implies that the fallthrough source unconditionally 6890b57cec5SDimitry Andric // jumps here. 6900b57cec5SDimitry Andric assert(!Scope.isNormalCleanup() || !HasPrebranchedFallthrough || 6910b57cec5SDimitry Andric (Scope.getNormalBlock() && 6920b57cec5SDimitry Andric FallthroughSource->getTerminator()->getSuccessor(0) 6930b57cec5SDimitry Andric == Scope.getNormalBlock())); 6940b57cec5SDimitry Andric 6950b57cec5SDimitry Andric bool RequiresNormalCleanup = false; 6960b57cec5SDimitry Andric if (Scope.isNormalCleanup() && 6970b57cec5SDimitry Andric (HasFixups || HasExistingBranches || HasFallthrough)) { 6980b57cec5SDimitry Andric RequiresNormalCleanup = true; 6990b57cec5SDimitry Andric } 7000b57cec5SDimitry Andric 7010b57cec5SDimitry Andric // If we have a prebranched fallthrough into an inactive normal 7020b57cec5SDimitry Andric // cleanup, rewrite it so that it leads to the appropriate place. 703*0fca6ea1SDimitry Andric if (Scope.isNormalCleanup() && HasPrebranchedFallthrough && 704*0fca6ea1SDimitry Andric !RequiresNormalCleanup) { 705*0fca6ea1SDimitry Andric // FIXME: Come up with a program which would need forwarding prebranched 706*0fca6ea1SDimitry Andric // fallthrough and add tests. Otherwise delete this and assert against it. 707*0fca6ea1SDimitry Andric assert(!IsActive); 7080b57cec5SDimitry Andric llvm::BasicBlock *prebranchDest; 7090b57cec5SDimitry Andric 7100b57cec5SDimitry Andric // If the prebranch is semantically branching through the next 7110b57cec5SDimitry Andric // cleanup, just forward it to the next block, leaving the 7120b57cec5SDimitry Andric // insertion point in the prebranched block. 7130b57cec5SDimitry Andric if (FallthroughIsBranchThrough) { 7140b57cec5SDimitry Andric EHScope &enclosing = *EHStack.find(Scope.getEnclosingNormalCleanup()); 7150b57cec5SDimitry Andric prebranchDest = CreateNormalEntry(*this, cast<EHCleanupScope>(enclosing)); 7160b57cec5SDimitry Andric 7170b57cec5SDimitry Andric // Otherwise, we need to make a new block. If the normal cleanup 7180b57cec5SDimitry Andric // isn't being used at all, we could actually reuse the normal 7190b57cec5SDimitry Andric // entry block, but this is simpler, and it avoids conflicts with 7200b57cec5SDimitry Andric // dead optimistic fixup branches. 7210b57cec5SDimitry Andric } else { 7220b57cec5SDimitry Andric prebranchDest = createBasicBlock("forwarded-prebranch"); 7230b57cec5SDimitry Andric EmitBlock(prebranchDest); 7240b57cec5SDimitry Andric } 7250b57cec5SDimitry Andric 7260b57cec5SDimitry Andric llvm::BasicBlock *normalEntry = Scope.getNormalBlock(); 7270b57cec5SDimitry Andric assert(normalEntry && !normalEntry->use_empty()); 7280b57cec5SDimitry Andric 7290b57cec5SDimitry Andric ForwardPrebranchedFallthrough(FallthroughSource, 7300b57cec5SDimitry Andric normalEntry, prebranchDest); 7310b57cec5SDimitry Andric } 7320b57cec5SDimitry Andric 7330b57cec5SDimitry Andric // If we don't need the cleanup at all, we're done. 7340b57cec5SDimitry Andric if (!RequiresNormalCleanup && !RequiresEHCleanup) { 7350b57cec5SDimitry Andric destroyOptimisticNormalEntry(*this, Scope); 7360b57cec5SDimitry Andric EHStack.popCleanup(); // safe because there are no fixups 7370b57cec5SDimitry Andric assert(EHStack.getNumBranchFixups() == 0 || 7380b57cec5SDimitry Andric EHStack.hasNormalCleanups()); 739*0fca6ea1SDimitry Andric if (NormalDeactivateOrigIP.isSet()) 740*0fca6ea1SDimitry Andric Builder.restoreIP(NormalDeactivateOrigIP); 7410b57cec5SDimitry Andric return; 7420b57cec5SDimitry Andric } 7430b57cec5SDimitry Andric 7440b57cec5SDimitry Andric // Copy the cleanup emission data out. This uses either a stack 7450b57cec5SDimitry Andric // array or malloc'd memory, depending on the size, which is 7460b57cec5SDimitry Andric // behavior that SmallVector would provide, if we could use it 7470b57cec5SDimitry Andric // here. Unfortunately, if you ask for a SmallVector<char>, the 7480b57cec5SDimitry Andric // alignment isn't sufficient. 7490b57cec5SDimitry Andric auto *CleanupSource = reinterpret_cast<char *>(Scope.getCleanupBuffer()); 750a7dea167SDimitry Andric alignas(EHScopeStack::ScopeStackAlignment) char 751a7dea167SDimitry Andric CleanupBufferStack[8 * sizeof(void *)]; 7520b57cec5SDimitry Andric std::unique_ptr<char[]> CleanupBufferHeap; 7530b57cec5SDimitry Andric size_t CleanupSize = Scope.getCleanupSize(); 7540b57cec5SDimitry Andric EHScopeStack::Cleanup *Fn; 7550b57cec5SDimitry Andric 7560b57cec5SDimitry Andric if (CleanupSize <= sizeof(CleanupBufferStack)) { 757a7dea167SDimitry Andric memcpy(CleanupBufferStack, CleanupSource, CleanupSize); 758a7dea167SDimitry Andric Fn = reinterpret_cast<EHScopeStack::Cleanup *>(CleanupBufferStack); 7590b57cec5SDimitry Andric } else { 7600b57cec5SDimitry Andric CleanupBufferHeap.reset(new char[CleanupSize]); 7610b57cec5SDimitry Andric memcpy(CleanupBufferHeap.get(), CleanupSource, CleanupSize); 7620b57cec5SDimitry Andric Fn = reinterpret_cast<EHScopeStack::Cleanup *>(CleanupBufferHeap.get()); 7630b57cec5SDimitry Andric } 7640b57cec5SDimitry Andric 7650b57cec5SDimitry Andric EHScopeStack::Cleanup::Flags cleanupFlags; 7660b57cec5SDimitry Andric if (Scope.isNormalCleanup()) 7670b57cec5SDimitry Andric cleanupFlags.setIsNormalCleanupKind(); 7680b57cec5SDimitry Andric if (Scope.isEHCleanup()) 7690b57cec5SDimitry Andric cleanupFlags.setIsEHCleanupKind(); 7700b57cec5SDimitry Andric 771fe6060f1SDimitry Andric // Under -EHa, invoke seh.scope.end() to mark scope end before dtor 772fe6060f1SDimitry Andric bool IsEHa = getLangOpts().EHAsynch && !Scope.isLifetimeMarker(); 773fe6060f1SDimitry Andric const EHPersonality &Personality = EHPersonality::get(*this); 7740b57cec5SDimitry Andric if (!RequiresNormalCleanup) { 775fe6060f1SDimitry Andric // Mark CPP scope end for passed-by-value Arg temp 776fe6060f1SDimitry Andric // per Windows ABI which is "normally" Cleanup in callee 777*0fca6ea1SDimitry Andric if (IsEHa && getInvokeDest()) { 778*0fca6ea1SDimitry Andric // If we are deactivating a normal cleanup then we don't have a 779*0fca6ea1SDimitry Andric // fallthrough. Restore original IP to emit CPP scope ends in the correct 780*0fca6ea1SDimitry Andric // block. 781*0fca6ea1SDimitry Andric if (NormalDeactivateOrigIP.isSet()) 782*0fca6ea1SDimitry Andric Builder.restoreIP(NormalDeactivateOrigIP); 783*0fca6ea1SDimitry Andric if (Personality.isMSVCXXPersonality() && Builder.GetInsertBlock()) 784fe6060f1SDimitry Andric EmitSehCppScopeEnd(); 785*0fca6ea1SDimitry Andric if (NormalDeactivateOrigIP.isSet()) 786*0fca6ea1SDimitry Andric NormalDeactivateOrigIP = Builder.saveAndClearIP(); 787fe6060f1SDimitry Andric } 7880b57cec5SDimitry Andric destroyOptimisticNormalEntry(*this, Scope); 789*0fca6ea1SDimitry Andric Scope.MarkEmitted(); 7900b57cec5SDimitry Andric EHStack.popCleanup(); 7910b57cec5SDimitry Andric } else { 7920b57cec5SDimitry Andric // If we have a fallthrough and no other need for the cleanup, 7930b57cec5SDimitry Andric // emit it directly. 794fe6060f1SDimitry Andric if (HasFallthrough && !HasPrebranchedFallthrough && !HasFixups && 795fe6060f1SDimitry Andric !HasExistingBranches) { 796fe6060f1SDimitry Andric 797fe6060f1SDimitry Andric // mark SEH scope end for fall-through flow 798fe6060f1SDimitry Andric if (IsEHa && getInvokeDest()) { 799fe6060f1SDimitry Andric if (Personality.isMSVCXXPersonality()) 800fe6060f1SDimitry Andric EmitSehCppScopeEnd(); 801fe6060f1SDimitry Andric else 802fe6060f1SDimitry Andric EmitSehTryScopeEnd(); 803fe6060f1SDimitry Andric } 8040b57cec5SDimitry Andric 8050b57cec5SDimitry Andric destroyOptimisticNormalEntry(*this, Scope); 806*0fca6ea1SDimitry Andric Scope.MarkEmitted(); 8070b57cec5SDimitry Andric EHStack.popCleanup(); 8080b57cec5SDimitry Andric 8090b57cec5SDimitry Andric EmitCleanup(*this, Fn, cleanupFlags, NormalActiveFlag); 8100b57cec5SDimitry Andric 8110b57cec5SDimitry Andric // Otherwise, the best approach is to thread everything through 8120b57cec5SDimitry Andric // the cleanup block and then try to clean up after ourselves. 8130b57cec5SDimitry Andric } else { 8140b57cec5SDimitry Andric // Force the entry block to exist. 8150b57cec5SDimitry Andric llvm::BasicBlock *NormalEntry = CreateNormalEntry(*this, Scope); 8160b57cec5SDimitry Andric 8170b57cec5SDimitry Andric // I. Set up the fallthrough edge in. 8180b57cec5SDimitry Andric 8190b57cec5SDimitry Andric CGBuilderTy::InsertPoint savedInactiveFallthroughIP; 8200b57cec5SDimitry Andric 8210b57cec5SDimitry Andric // If there's a fallthrough, we need to store the cleanup 8220b57cec5SDimitry Andric // destination index. For fall-throughs this is always zero. 8230b57cec5SDimitry Andric if (HasFallthrough) { 8240b57cec5SDimitry Andric if (!HasPrebranchedFallthrough) 8250b57cec5SDimitry Andric Builder.CreateStore(Builder.getInt32(0), getNormalCleanupDestSlot()); 8260b57cec5SDimitry Andric 8270b57cec5SDimitry Andric // Otherwise, save and clear the IP if we don't have fallthrough 8280b57cec5SDimitry Andric // because the cleanup is inactive. 8290b57cec5SDimitry Andric } else if (FallthroughSource) { 8300b57cec5SDimitry Andric assert(!IsActive && "source without fallthrough for active cleanup"); 8310b57cec5SDimitry Andric savedInactiveFallthroughIP = Builder.saveAndClearIP(); 8320b57cec5SDimitry Andric } 8330b57cec5SDimitry Andric 8340b57cec5SDimitry Andric // II. Emit the entry block. This implicitly branches to it if 8350b57cec5SDimitry Andric // we have fallthrough. All the fixups and existing branches 8360b57cec5SDimitry Andric // should already be branched to it. 8370b57cec5SDimitry Andric EmitBlock(NormalEntry); 8380b57cec5SDimitry Andric 839fe6060f1SDimitry Andric // intercept normal cleanup to mark SEH scope end 84006c3fb27SDimitry Andric if (IsEHa && getInvokeDest()) { 841fe6060f1SDimitry Andric if (Personality.isMSVCXXPersonality()) 842fe6060f1SDimitry Andric EmitSehCppScopeEnd(); 843fe6060f1SDimitry Andric else 844fe6060f1SDimitry Andric EmitSehTryScopeEnd(); 845fe6060f1SDimitry Andric } 846fe6060f1SDimitry Andric 8470b57cec5SDimitry Andric // III. Figure out where we're going and build the cleanup 8480b57cec5SDimitry Andric // epilogue. 8490b57cec5SDimitry Andric 8500b57cec5SDimitry Andric bool HasEnclosingCleanups = 8510b57cec5SDimitry Andric (Scope.getEnclosingNormalCleanup() != EHStack.stable_end()); 8520b57cec5SDimitry Andric 8530b57cec5SDimitry Andric // Compute the branch-through dest if we need it: 8540b57cec5SDimitry Andric // - if there are branch-throughs threaded through the scope 8550b57cec5SDimitry Andric // - if fall-through is a branch-through 8560b57cec5SDimitry Andric // - if there are fixups that will be optimistically forwarded 8570b57cec5SDimitry Andric // to the enclosing cleanup 8580b57cec5SDimitry Andric llvm::BasicBlock *BranchThroughDest = nullptr; 8590b57cec5SDimitry Andric if (Scope.hasBranchThroughs() || 8600b57cec5SDimitry Andric (FallthroughSource && FallthroughIsBranchThrough) || 8610b57cec5SDimitry Andric (HasFixups && HasEnclosingCleanups)) { 8620b57cec5SDimitry Andric assert(HasEnclosingCleanups); 8630b57cec5SDimitry Andric EHScope &S = *EHStack.find(Scope.getEnclosingNormalCleanup()); 8640b57cec5SDimitry Andric BranchThroughDest = CreateNormalEntry(*this, cast<EHCleanupScope>(S)); 8650b57cec5SDimitry Andric } 8660b57cec5SDimitry Andric 8670b57cec5SDimitry Andric llvm::BasicBlock *FallthroughDest = nullptr; 8680b57cec5SDimitry Andric SmallVector<llvm::Instruction*, 2> InstsToAppend; 8690b57cec5SDimitry Andric 8700b57cec5SDimitry Andric // If there's exactly one branch-after and no other threads, 8710b57cec5SDimitry Andric // we can route it without a switch. 8725f757f3fSDimitry Andric // Skip for SEH, since ExitSwitch is used to generate code to indicate 8735f757f3fSDimitry Andric // abnormal termination. (SEH: Except _leave and fall-through at 8745f757f3fSDimitry Andric // the end, all other exits in a _try (return/goto/continue/break) 8755f757f3fSDimitry Andric // are considered as abnormal terminations, using NormalCleanupDestSlot 8765f757f3fSDimitry Andric // to indicate abnormal termination) 8770b57cec5SDimitry Andric if (!Scope.hasBranchThroughs() && !HasFixups && !HasFallthrough && 8785f757f3fSDimitry Andric !currentFunctionUsesSEHTry() && Scope.getNumBranchAfters() == 1) { 8790b57cec5SDimitry Andric assert(!BranchThroughDest || !IsActive); 8800b57cec5SDimitry Andric 8810b57cec5SDimitry Andric // Clean up the possibly dead store to the cleanup dest slot. 8820b57cec5SDimitry Andric llvm::Instruction *NormalCleanupDestSlot = 8830b57cec5SDimitry Andric cast<llvm::Instruction>(getNormalCleanupDestSlot().getPointer()); 8840b57cec5SDimitry Andric if (NormalCleanupDestSlot->hasOneUse()) { 8850b57cec5SDimitry Andric NormalCleanupDestSlot->user_back()->eraseFromParent(); 8860b57cec5SDimitry Andric NormalCleanupDestSlot->eraseFromParent(); 887*0fca6ea1SDimitry Andric NormalCleanupDest = RawAddress::invalid(); 8880b57cec5SDimitry Andric } 8890b57cec5SDimitry Andric 8900b57cec5SDimitry Andric llvm::BasicBlock *BranchAfter = Scope.getBranchAfterBlock(0); 8910b57cec5SDimitry Andric InstsToAppend.push_back(llvm::BranchInst::Create(BranchAfter)); 8920b57cec5SDimitry Andric 8930b57cec5SDimitry Andric // Build a switch-out if we need it: 8940b57cec5SDimitry Andric // - if there are branch-afters threaded through the scope 8950b57cec5SDimitry Andric // - if fall-through is a branch-after 8960b57cec5SDimitry Andric // - if there are fixups that have nowhere left to go and 8970b57cec5SDimitry Andric // so must be immediately resolved 8980b57cec5SDimitry Andric } else if (Scope.getNumBranchAfters() || 8990b57cec5SDimitry Andric (HasFallthrough && !FallthroughIsBranchThrough) || 9000b57cec5SDimitry Andric (HasFixups && !HasEnclosingCleanups)) { 9010b57cec5SDimitry Andric 9020b57cec5SDimitry Andric llvm::BasicBlock *Default = 9030b57cec5SDimitry Andric (BranchThroughDest ? BranchThroughDest : getUnreachableBlock()); 9040b57cec5SDimitry Andric 9050b57cec5SDimitry Andric // TODO: base this on the number of branch-afters and fixups 9060b57cec5SDimitry Andric const unsigned SwitchCapacity = 10; 9070b57cec5SDimitry Andric 9085ffd83dbSDimitry Andric // pass the abnormal exit flag to Fn (SEH cleanup) 9095ffd83dbSDimitry Andric cleanupFlags.setHasExitSwitch(); 9105ffd83dbSDimitry Andric 911*0fca6ea1SDimitry Andric llvm::LoadInst *Load = createLoadInstBefore( 912*0fca6ea1SDimitry Andric getNormalCleanupDestSlot(), "cleanup.dest", nullptr, *this); 9130b57cec5SDimitry Andric llvm::SwitchInst *Switch = 9140b57cec5SDimitry Andric llvm::SwitchInst::Create(Load, Default, SwitchCapacity); 9150b57cec5SDimitry Andric 9160b57cec5SDimitry Andric InstsToAppend.push_back(Load); 9170b57cec5SDimitry Andric InstsToAppend.push_back(Switch); 9180b57cec5SDimitry Andric 9190b57cec5SDimitry Andric // Branch-after fallthrough. 9200b57cec5SDimitry Andric if (FallthroughSource && !FallthroughIsBranchThrough) { 9210b57cec5SDimitry Andric FallthroughDest = createBasicBlock("cleanup.cont"); 9220b57cec5SDimitry Andric if (HasFallthrough) 9230b57cec5SDimitry Andric Switch->addCase(Builder.getInt32(0), FallthroughDest); 9240b57cec5SDimitry Andric } 9250b57cec5SDimitry Andric 9260b57cec5SDimitry Andric for (unsigned I = 0, E = Scope.getNumBranchAfters(); I != E; ++I) { 9270b57cec5SDimitry Andric Switch->addCase(Scope.getBranchAfterIndex(I), 9280b57cec5SDimitry Andric Scope.getBranchAfterBlock(I)); 9290b57cec5SDimitry Andric } 9300b57cec5SDimitry Andric 9310b57cec5SDimitry Andric // If there aren't any enclosing cleanups, we can resolve all 9320b57cec5SDimitry Andric // the fixups now. 9330b57cec5SDimitry Andric if (HasFixups && !HasEnclosingCleanups) 9340b57cec5SDimitry Andric ResolveAllBranchFixups(*this, Switch, NormalEntry); 9350b57cec5SDimitry Andric } else { 9360b57cec5SDimitry Andric // We should always have a branch-through destination in this case. 9370b57cec5SDimitry Andric assert(BranchThroughDest); 9380b57cec5SDimitry Andric InstsToAppend.push_back(llvm::BranchInst::Create(BranchThroughDest)); 9390b57cec5SDimitry Andric } 9400b57cec5SDimitry Andric 9410b57cec5SDimitry Andric // IV. Pop the cleanup and emit it. 942*0fca6ea1SDimitry Andric Scope.MarkEmitted(); 9430b57cec5SDimitry Andric EHStack.popCleanup(); 9440b57cec5SDimitry Andric assert(EHStack.hasNormalCleanups() == HasEnclosingCleanups); 9450b57cec5SDimitry Andric 9460b57cec5SDimitry Andric EmitCleanup(*this, Fn, cleanupFlags, NormalActiveFlag); 9470b57cec5SDimitry Andric 9480b57cec5SDimitry Andric // Append the prepared cleanup prologue from above. 9490b57cec5SDimitry Andric llvm::BasicBlock *NormalExit = Builder.GetInsertBlock(); 9500b57cec5SDimitry Andric for (unsigned I = 0, E = InstsToAppend.size(); I != E; ++I) 951bdd1243dSDimitry Andric InstsToAppend[I]->insertInto(NormalExit, NormalExit->end()); 9520b57cec5SDimitry Andric 9530b57cec5SDimitry Andric // Optimistically hope that any fixups will continue falling through. 9540b57cec5SDimitry Andric for (unsigned I = FixupDepth, E = EHStack.getNumBranchFixups(); 9550b57cec5SDimitry Andric I < E; ++I) { 9560b57cec5SDimitry Andric BranchFixup &Fixup = EHStack.getBranchFixup(I); 9570b57cec5SDimitry Andric if (!Fixup.Destination) continue; 9580b57cec5SDimitry Andric if (!Fixup.OptimisticBranchBlock) { 9590b57cec5SDimitry Andric createStoreInstBefore(Builder.getInt32(Fixup.DestinationIndex), 960*0fca6ea1SDimitry Andric getNormalCleanupDestSlot(), Fixup.InitialBranch, 961*0fca6ea1SDimitry Andric *this); 9620b57cec5SDimitry Andric Fixup.InitialBranch->setSuccessor(0, NormalEntry); 9630b57cec5SDimitry Andric } 9640b57cec5SDimitry Andric Fixup.OptimisticBranchBlock = NormalExit; 9650b57cec5SDimitry Andric } 9660b57cec5SDimitry Andric 9670b57cec5SDimitry Andric // V. Set up the fallthrough edge out. 9680b57cec5SDimitry Andric 9690b57cec5SDimitry Andric // Case 1: a fallthrough source exists but doesn't branch to the 9700b57cec5SDimitry Andric // cleanup because the cleanup is inactive. 9710b57cec5SDimitry Andric if (!HasFallthrough && FallthroughSource) { 9720b57cec5SDimitry Andric // Prebranched fallthrough was forwarded earlier. 9730b57cec5SDimitry Andric // Non-prebranched fallthrough doesn't need to be forwarded. 9740b57cec5SDimitry Andric // Either way, all we need to do is restore the IP we cleared before. 9750b57cec5SDimitry Andric assert(!IsActive); 9760b57cec5SDimitry Andric Builder.restoreIP(savedInactiveFallthroughIP); 9770b57cec5SDimitry Andric 9780b57cec5SDimitry Andric // Case 2: a fallthrough source exists and should branch to the 9790b57cec5SDimitry Andric // cleanup, but we're not supposed to branch through to the next 9800b57cec5SDimitry Andric // cleanup. 9810b57cec5SDimitry Andric } else if (HasFallthrough && FallthroughDest) { 9820b57cec5SDimitry Andric assert(!FallthroughIsBranchThrough); 9830b57cec5SDimitry Andric EmitBlock(FallthroughDest); 9840b57cec5SDimitry Andric 9850b57cec5SDimitry Andric // Case 3: a fallthrough source exists and should branch to the 9860b57cec5SDimitry Andric // cleanup and then through to the next. 9870b57cec5SDimitry Andric } else if (HasFallthrough) { 9880b57cec5SDimitry Andric // Everything is already set up for this. 9890b57cec5SDimitry Andric 9900b57cec5SDimitry Andric // Case 4: no fallthrough source exists. 9910b57cec5SDimitry Andric } else { 9920b57cec5SDimitry Andric Builder.ClearInsertionPoint(); 9930b57cec5SDimitry Andric } 9940b57cec5SDimitry Andric 9950b57cec5SDimitry Andric // VI. Assorted cleaning. 9960b57cec5SDimitry Andric 9970b57cec5SDimitry Andric // Check whether we can merge NormalEntry into a single predecessor. 9980b57cec5SDimitry Andric // This might invalidate (non-IR) pointers to NormalEntry. 9990b57cec5SDimitry Andric llvm::BasicBlock *NewNormalEntry = 10000b57cec5SDimitry Andric SimplifyCleanupEntry(*this, NormalEntry); 10010b57cec5SDimitry Andric 10020b57cec5SDimitry Andric // If it did invalidate those pointers, and NormalEntry was the same 10030b57cec5SDimitry Andric // as NormalExit, go back and patch up the fixups. 10040b57cec5SDimitry Andric if (NewNormalEntry != NormalEntry && NormalEntry == NormalExit) 10050b57cec5SDimitry Andric for (unsigned I = FixupDepth, E = EHStack.getNumBranchFixups(); 10060b57cec5SDimitry Andric I < E; ++I) 10070b57cec5SDimitry Andric EHStack.getBranchFixup(I).OptimisticBranchBlock = NewNormalEntry; 10080b57cec5SDimitry Andric } 10090b57cec5SDimitry Andric } 10100b57cec5SDimitry Andric 1011*0fca6ea1SDimitry Andric if (NormalDeactivateOrigIP.isSet()) 1012*0fca6ea1SDimitry Andric Builder.restoreIP(NormalDeactivateOrigIP); 10130b57cec5SDimitry Andric assert(EHStack.hasNormalCleanups() || EHStack.getNumBranchFixups() == 0); 10140b57cec5SDimitry Andric 10150b57cec5SDimitry Andric // Emit the EH cleanup if required. 10160b57cec5SDimitry Andric if (RequiresEHCleanup) { 10170b57cec5SDimitry Andric CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP(); 10180b57cec5SDimitry Andric 10190b57cec5SDimitry Andric EmitBlock(EHEntry); 10200b57cec5SDimitry Andric 10210b57cec5SDimitry Andric llvm::BasicBlock *NextAction = getEHDispatchBlock(EHParent); 10220b57cec5SDimitry Andric 10230b57cec5SDimitry Andric // Push a terminate scope or cleanupendpad scope around the potentially 10240b57cec5SDimitry Andric // throwing cleanups. For funclet EH personalities, the cleanupendpad models 10250b57cec5SDimitry Andric // program termination when cleanups throw. 10260b57cec5SDimitry Andric bool PushedTerminate = false; 1027bdd1243dSDimitry Andric SaveAndRestore RestoreCurrentFuncletPad(CurrentFuncletPad); 10280b57cec5SDimitry Andric llvm::CleanupPadInst *CPI = nullptr; 10290b57cec5SDimitry Andric 10300b57cec5SDimitry Andric const EHPersonality &Personality = EHPersonality::get(*this); 10310b57cec5SDimitry Andric if (Personality.usesFuncletPads()) { 10320b57cec5SDimitry Andric llvm::Value *ParentPad = CurrentFuncletPad; 10330b57cec5SDimitry Andric if (!ParentPad) 10340b57cec5SDimitry Andric ParentPad = llvm::ConstantTokenNone::get(CGM.getLLVMContext()); 10350b57cec5SDimitry Andric CurrentFuncletPad = CPI = Builder.CreateCleanupPad(ParentPad); 10360b57cec5SDimitry Andric } 10370b57cec5SDimitry Andric 10380b57cec5SDimitry Andric // Non-MSVC personalities need to terminate when an EH cleanup throws. 10390b57cec5SDimitry Andric if (!Personality.isMSVCPersonality()) { 10400b57cec5SDimitry Andric EHStack.pushTerminate(); 10410b57cec5SDimitry Andric PushedTerminate = true; 104206c3fb27SDimitry Andric } else if (IsEHa && getInvokeDest()) { 104306c3fb27SDimitry Andric EmitSehCppScopeEnd(); 10440b57cec5SDimitry Andric } 10450b57cec5SDimitry Andric 10460b57cec5SDimitry Andric // We only actually emit the cleanup code if the cleanup is either 10470b57cec5SDimitry Andric // active or was used before it was deactivated. 10480b57cec5SDimitry Andric if (EHActiveFlag.isValid() || IsActive) { 10490b57cec5SDimitry Andric cleanupFlags.setIsForEHCleanup(); 10500b57cec5SDimitry Andric EmitCleanup(*this, Fn, cleanupFlags, EHActiveFlag); 10510b57cec5SDimitry Andric } 10520b57cec5SDimitry Andric 10530b57cec5SDimitry Andric if (CPI) 10540b57cec5SDimitry Andric Builder.CreateCleanupRet(CPI, NextAction); 10550b57cec5SDimitry Andric else 10560b57cec5SDimitry Andric Builder.CreateBr(NextAction); 10570b57cec5SDimitry Andric 10580b57cec5SDimitry Andric // Leave the terminate scope. 10590b57cec5SDimitry Andric if (PushedTerminate) 10600b57cec5SDimitry Andric EHStack.popTerminate(); 10610b57cec5SDimitry Andric 10620b57cec5SDimitry Andric Builder.restoreIP(SavedIP); 10630b57cec5SDimitry Andric 10640b57cec5SDimitry Andric SimplifyCleanupEntry(*this, EHEntry); 10650b57cec5SDimitry Andric } 10660b57cec5SDimitry Andric } 10670b57cec5SDimitry Andric 10680b57cec5SDimitry Andric /// isObviouslyBranchWithoutCleanups - Return true if a branch to the 10690b57cec5SDimitry Andric /// specified destination obviously has no cleanups to run. 'false' is always 10700b57cec5SDimitry Andric /// a conservatively correct answer for this method. 10710b57cec5SDimitry Andric bool CodeGenFunction::isObviouslyBranchWithoutCleanups(JumpDest Dest) const { 10720b57cec5SDimitry Andric assert(Dest.getScopeDepth().encloses(EHStack.stable_begin()) 10730b57cec5SDimitry Andric && "stale jump destination"); 10740b57cec5SDimitry Andric 10750b57cec5SDimitry Andric // Calculate the innermost active normal cleanup. 10760b57cec5SDimitry Andric EHScopeStack::stable_iterator TopCleanup = 10770b57cec5SDimitry Andric EHStack.getInnermostActiveNormalCleanup(); 10780b57cec5SDimitry Andric 10790b57cec5SDimitry Andric // If we're not in an active normal cleanup scope, or if the 10800b57cec5SDimitry Andric // destination scope is within the innermost active normal cleanup 10810b57cec5SDimitry Andric // scope, we don't need to worry about fixups. 10820b57cec5SDimitry Andric if (TopCleanup == EHStack.stable_end() || 10830b57cec5SDimitry Andric TopCleanup.encloses(Dest.getScopeDepth())) // works for invalid 10840b57cec5SDimitry Andric return true; 10850b57cec5SDimitry Andric 10860b57cec5SDimitry Andric // Otherwise, we might need some cleanups. 10870b57cec5SDimitry Andric return false; 10880b57cec5SDimitry Andric } 10890b57cec5SDimitry Andric 10900b57cec5SDimitry Andric 10910b57cec5SDimitry Andric /// Terminate the current block by emitting a branch which might leave 10920b57cec5SDimitry Andric /// the current cleanup-protected scope. The target scope may not yet 10930b57cec5SDimitry Andric /// be known, in which case this will require a fixup. 10940b57cec5SDimitry Andric /// 10950b57cec5SDimitry Andric /// As a side-effect, this method clears the insertion point. 10960b57cec5SDimitry Andric void CodeGenFunction::EmitBranchThroughCleanup(JumpDest Dest) { 10970b57cec5SDimitry Andric assert(Dest.getScopeDepth().encloses(EHStack.stable_begin()) 10980b57cec5SDimitry Andric && "stale jump destination"); 10990b57cec5SDimitry Andric 11000b57cec5SDimitry Andric if (!HaveInsertPoint()) 11010b57cec5SDimitry Andric return; 11020b57cec5SDimitry Andric 11030b57cec5SDimitry Andric // Create the branch. 11040b57cec5SDimitry Andric llvm::BranchInst *BI = Builder.CreateBr(Dest.getBlock()); 11050b57cec5SDimitry Andric 11060b57cec5SDimitry Andric // Calculate the innermost active normal cleanup. 11070b57cec5SDimitry Andric EHScopeStack::stable_iterator 11080b57cec5SDimitry Andric TopCleanup = EHStack.getInnermostActiveNormalCleanup(); 11090b57cec5SDimitry Andric 11100b57cec5SDimitry Andric // If we're not in an active normal cleanup scope, or if the 11110b57cec5SDimitry Andric // destination scope is within the innermost active normal cleanup 11120b57cec5SDimitry Andric // scope, we don't need to worry about fixups. 11130b57cec5SDimitry Andric if (TopCleanup == EHStack.stable_end() || 11140b57cec5SDimitry Andric TopCleanup.encloses(Dest.getScopeDepth())) { // works for invalid 11150b57cec5SDimitry Andric Builder.ClearInsertionPoint(); 11160b57cec5SDimitry Andric return; 11170b57cec5SDimitry Andric } 11180b57cec5SDimitry Andric 11190b57cec5SDimitry Andric // If we can't resolve the destination cleanup scope, just add this 11200b57cec5SDimitry Andric // to the current cleanup scope as a branch fixup. 11210b57cec5SDimitry Andric if (!Dest.getScopeDepth().isValid()) { 11220b57cec5SDimitry Andric BranchFixup &Fixup = EHStack.addBranchFixup(); 11230b57cec5SDimitry Andric Fixup.Destination = Dest.getBlock(); 11240b57cec5SDimitry Andric Fixup.DestinationIndex = Dest.getDestIndex(); 11250b57cec5SDimitry Andric Fixup.InitialBranch = BI; 11260b57cec5SDimitry Andric Fixup.OptimisticBranchBlock = nullptr; 11270b57cec5SDimitry Andric 11280b57cec5SDimitry Andric Builder.ClearInsertionPoint(); 11290b57cec5SDimitry Andric return; 11300b57cec5SDimitry Andric } 11310b57cec5SDimitry Andric 11320b57cec5SDimitry Andric // Otherwise, thread through all the normal cleanups in scope. 11330b57cec5SDimitry Andric 11340b57cec5SDimitry Andric // Store the index at the start. 11350b57cec5SDimitry Andric llvm::ConstantInt *Index = Builder.getInt32(Dest.getDestIndex()); 1136*0fca6ea1SDimitry Andric createStoreInstBefore(Index, getNormalCleanupDestSlot(), BI, *this); 11370b57cec5SDimitry Andric 11380b57cec5SDimitry Andric // Adjust BI to point to the first cleanup block. 11390b57cec5SDimitry Andric { 11400b57cec5SDimitry Andric EHCleanupScope &Scope = 11410b57cec5SDimitry Andric cast<EHCleanupScope>(*EHStack.find(TopCleanup)); 11420b57cec5SDimitry Andric BI->setSuccessor(0, CreateNormalEntry(*this, Scope)); 11430b57cec5SDimitry Andric } 11440b57cec5SDimitry Andric 11450b57cec5SDimitry Andric // Add this destination to all the scopes involved. 11460b57cec5SDimitry Andric EHScopeStack::stable_iterator I = TopCleanup; 11470b57cec5SDimitry Andric EHScopeStack::stable_iterator E = Dest.getScopeDepth(); 11480b57cec5SDimitry Andric if (E.strictlyEncloses(I)) { 11490b57cec5SDimitry Andric while (true) { 11500b57cec5SDimitry Andric EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(I)); 11510b57cec5SDimitry Andric assert(Scope.isNormalCleanup()); 11520b57cec5SDimitry Andric I = Scope.getEnclosingNormalCleanup(); 11530b57cec5SDimitry Andric 11540b57cec5SDimitry Andric // If this is the last cleanup we're propagating through, tell it 11550b57cec5SDimitry Andric // that there's a resolved jump moving through it. 11560b57cec5SDimitry Andric if (!E.strictlyEncloses(I)) { 11570b57cec5SDimitry Andric Scope.addBranchAfter(Index, Dest.getBlock()); 11580b57cec5SDimitry Andric break; 11590b57cec5SDimitry Andric } 11600b57cec5SDimitry Andric 11610b57cec5SDimitry Andric // Otherwise, tell the scope that there's a jump propagating 11620b57cec5SDimitry Andric // through it. If this isn't new information, all the rest of 11630b57cec5SDimitry Andric // the work has been done before. 11640b57cec5SDimitry Andric if (!Scope.addBranchThrough(Dest.getBlock())) 11650b57cec5SDimitry Andric break; 11660b57cec5SDimitry Andric } 11670b57cec5SDimitry Andric } 11680b57cec5SDimitry Andric 11690b57cec5SDimitry Andric Builder.ClearInsertionPoint(); 11700b57cec5SDimitry Andric } 11710b57cec5SDimitry Andric 11720b57cec5SDimitry Andric static bool IsUsedAsEHCleanup(EHScopeStack &EHStack, 11730b57cec5SDimitry Andric EHScopeStack::stable_iterator cleanup) { 11740b57cec5SDimitry Andric // If we needed an EH block for any reason, that counts. 11750b57cec5SDimitry Andric if (EHStack.find(cleanup)->hasEHBranches()) 11760b57cec5SDimitry Andric return true; 11770b57cec5SDimitry Andric 11780b57cec5SDimitry Andric // Check whether any enclosed cleanups were needed. 11790b57cec5SDimitry Andric for (EHScopeStack::stable_iterator 11800b57cec5SDimitry Andric i = EHStack.getInnermostEHScope(); i != cleanup; ) { 11810b57cec5SDimitry Andric assert(cleanup.strictlyEncloses(i)); 11820b57cec5SDimitry Andric 11830b57cec5SDimitry Andric EHScope &scope = *EHStack.find(i); 11840b57cec5SDimitry Andric if (scope.hasEHBranches()) 11850b57cec5SDimitry Andric return true; 11860b57cec5SDimitry Andric 11870b57cec5SDimitry Andric i = scope.getEnclosingEHScope(); 11880b57cec5SDimitry Andric } 11890b57cec5SDimitry Andric 11900b57cec5SDimitry Andric return false; 11910b57cec5SDimitry Andric } 11920b57cec5SDimitry Andric 11930b57cec5SDimitry Andric enum ForActivation_t { 11940b57cec5SDimitry Andric ForActivation, 11950b57cec5SDimitry Andric ForDeactivation 11960b57cec5SDimitry Andric }; 11970b57cec5SDimitry Andric 11980b57cec5SDimitry Andric /// The given cleanup block is changing activation state. Configure a 11990b57cec5SDimitry Andric /// cleanup variable if necessary. 12000b57cec5SDimitry Andric /// 12010b57cec5SDimitry Andric /// It would be good if we had some way of determining if there were 12020b57cec5SDimitry Andric /// extra uses *after* the change-over point. 12030b57cec5SDimitry Andric static void SetupCleanupBlockActivation(CodeGenFunction &CGF, 12040b57cec5SDimitry Andric EHScopeStack::stable_iterator C, 12050b57cec5SDimitry Andric ForActivation_t kind, 12060b57cec5SDimitry Andric llvm::Instruction *dominatingIP) { 12070b57cec5SDimitry Andric EHCleanupScope &Scope = cast<EHCleanupScope>(*CGF.EHStack.find(C)); 12080b57cec5SDimitry Andric 12090b57cec5SDimitry Andric // We always need the flag if we're activating the cleanup in a 12100b57cec5SDimitry Andric // conditional context, because we have to assume that the current 12110b57cec5SDimitry Andric // location doesn't necessarily dominate the cleanup's code. 12120b57cec5SDimitry Andric bool isActivatedInConditional = 12130b57cec5SDimitry Andric (kind == ForActivation && CGF.isInConditionalBranch()); 12140b57cec5SDimitry Andric 12150b57cec5SDimitry Andric bool needFlag = false; 12160b57cec5SDimitry Andric 12170b57cec5SDimitry Andric // Calculate whether the cleanup was used: 12180b57cec5SDimitry Andric 12190b57cec5SDimitry Andric // - as a normal cleanup 1220*0fca6ea1SDimitry Andric if (Scope.isNormalCleanup()) { 12210b57cec5SDimitry Andric Scope.setTestFlagInNormalCleanup(); 12220b57cec5SDimitry Andric needFlag = true; 12230b57cec5SDimitry Andric } 12240b57cec5SDimitry Andric 12250b57cec5SDimitry Andric // - as an EH cleanup 12260b57cec5SDimitry Andric if (Scope.isEHCleanup() && 12270b57cec5SDimitry Andric (isActivatedInConditional || IsUsedAsEHCleanup(CGF.EHStack, C))) { 12280b57cec5SDimitry Andric Scope.setTestFlagInEHCleanup(); 12290b57cec5SDimitry Andric needFlag = true; 12300b57cec5SDimitry Andric } 12310b57cec5SDimitry Andric 12320b57cec5SDimitry Andric // If it hasn't yet been used as either, we're done. 1233*0fca6ea1SDimitry Andric if (!needFlag) 1234*0fca6ea1SDimitry Andric return; 12350b57cec5SDimitry Andric 12360b57cec5SDimitry Andric Address var = Scope.getActiveFlag(); 12370b57cec5SDimitry Andric if (!var.isValid()) { 1238*0fca6ea1SDimitry Andric CodeGenFunction::AllocaTrackerRAII AllocaTracker(CGF); 12390b57cec5SDimitry Andric var = CGF.CreateTempAlloca(CGF.Builder.getInt1Ty(), CharUnits::One(), 12400b57cec5SDimitry Andric "cleanup.isactive"); 12410b57cec5SDimitry Andric Scope.setActiveFlag(var); 1242*0fca6ea1SDimitry Andric Scope.AddAuxAllocas(AllocaTracker.Take()); 12430b57cec5SDimitry Andric 12440b57cec5SDimitry Andric assert(dominatingIP && "no existing variable and no dominating IP!"); 12450b57cec5SDimitry Andric 12460b57cec5SDimitry Andric // Initialize to true or false depending on whether it was 12470b57cec5SDimitry Andric // active up to this point. 12480b57cec5SDimitry Andric llvm::Constant *value = CGF.Builder.getInt1(kind == ForDeactivation); 12490b57cec5SDimitry Andric 12500b57cec5SDimitry Andric // If we're in a conditional block, ignore the dominating IP and 12510b57cec5SDimitry Andric // use the outermost conditional branch. 12520b57cec5SDimitry Andric if (CGF.isInConditionalBranch()) { 1253*0fca6ea1SDimitry Andric CGF.setBeforeOutermostConditional(value, var, CGF); 12540b57cec5SDimitry Andric } else { 1255*0fca6ea1SDimitry Andric createStoreInstBefore(value, var, dominatingIP, CGF); 12560b57cec5SDimitry Andric } 12570b57cec5SDimitry Andric } 12580b57cec5SDimitry Andric 12590b57cec5SDimitry Andric CGF.Builder.CreateStore(CGF.Builder.getInt1(kind == ForActivation), var); 12600b57cec5SDimitry Andric } 12610b57cec5SDimitry Andric 12620b57cec5SDimitry Andric /// Activate a cleanup that was created in an inactivated state. 12630b57cec5SDimitry Andric void CodeGenFunction::ActivateCleanupBlock(EHScopeStack::stable_iterator C, 12640b57cec5SDimitry Andric llvm::Instruction *dominatingIP) { 12650b57cec5SDimitry Andric assert(C != EHStack.stable_end() && "activating bottom of stack?"); 12660b57cec5SDimitry Andric EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(C)); 12670b57cec5SDimitry Andric assert(!Scope.isActive() && "double activation"); 12680b57cec5SDimitry Andric 12690b57cec5SDimitry Andric SetupCleanupBlockActivation(*this, C, ForActivation, dominatingIP); 12700b57cec5SDimitry Andric 12710b57cec5SDimitry Andric Scope.setActive(true); 12720b57cec5SDimitry Andric } 12730b57cec5SDimitry Andric 12740b57cec5SDimitry Andric /// Deactive a cleanup that was created in an active state. 12750b57cec5SDimitry Andric void CodeGenFunction::DeactivateCleanupBlock(EHScopeStack::stable_iterator C, 12760b57cec5SDimitry Andric llvm::Instruction *dominatingIP) { 12770b57cec5SDimitry Andric assert(C != EHStack.stable_end() && "deactivating bottom of stack?"); 12780b57cec5SDimitry Andric EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(C)); 12790b57cec5SDimitry Andric assert(Scope.isActive() && "double deactivation"); 12800b57cec5SDimitry Andric 12810b57cec5SDimitry Andric // If it's the top of the stack, just pop it, but do so only if it belongs 12820b57cec5SDimitry Andric // to the current RunCleanupsScope. 12830b57cec5SDimitry Andric if (C == EHStack.stable_begin() && 12840b57cec5SDimitry Andric CurrentCleanupScopeDepth.strictlyEncloses(C)) { 1285*0fca6ea1SDimitry Andric PopCleanupBlock(/*FallthroughIsBranchThrough=*/false, 1286*0fca6ea1SDimitry Andric /*ForDeactivation=*/true); 12870b57cec5SDimitry Andric return; 12880b57cec5SDimitry Andric } 12890b57cec5SDimitry Andric 12900b57cec5SDimitry Andric // Otherwise, follow the general case. 12910b57cec5SDimitry Andric SetupCleanupBlockActivation(*this, C, ForDeactivation, dominatingIP); 12920b57cec5SDimitry Andric 12930b57cec5SDimitry Andric Scope.setActive(false); 12940b57cec5SDimitry Andric } 12950b57cec5SDimitry Andric 1296*0fca6ea1SDimitry Andric RawAddress CodeGenFunction::getNormalCleanupDestSlot() { 12970b57cec5SDimitry Andric if (!NormalCleanupDest.isValid()) 12980b57cec5SDimitry Andric NormalCleanupDest = 12990b57cec5SDimitry Andric CreateDefaultAlignTempAlloca(Builder.getInt32Ty(), "cleanup.dest.slot"); 13000b57cec5SDimitry Andric return NormalCleanupDest; 13010b57cec5SDimitry Andric } 13020b57cec5SDimitry Andric 13030b57cec5SDimitry Andric /// Emits all the code to cause the given temporary to be cleaned up. 13040b57cec5SDimitry Andric void CodeGenFunction::EmitCXXTemporary(const CXXTemporary *Temporary, 13050b57cec5SDimitry Andric QualType TempType, 13060b57cec5SDimitry Andric Address Ptr) { 13070b57cec5SDimitry Andric pushDestroy(NormalAndEHCleanup, Ptr, TempType, destroyCXXObject, 13080b57cec5SDimitry Andric /*useEHCleanup*/ true); 13090b57cec5SDimitry Andric } 1310fe6060f1SDimitry Andric 1311fe6060f1SDimitry Andric // Need to set "funclet" in OperandBundle properly for noThrow 1312fe6060f1SDimitry Andric // intrinsic (see CGCall.cpp) 1313fe6060f1SDimitry Andric static void EmitSehScope(CodeGenFunction &CGF, 1314fe6060f1SDimitry Andric llvm::FunctionCallee &SehCppScope) { 1315fe6060f1SDimitry Andric llvm::BasicBlock *InvokeDest = CGF.getInvokeDest(); 1316fe6060f1SDimitry Andric assert(CGF.Builder.GetInsertBlock() && InvokeDest); 1317fe6060f1SDimitry Andric llvm::BasicBlock *Cont = CGF.createBasicBlock("invoke.cont"); 1318fe6060f1SDimitry Andric SmallVector<llvm::OperandBundleDef, 1> BundleList = 1319fe6060f1SDimitry Andric CGF.getBundlesForFunclet(SehCppScope.getCallee()); 1320fe6060f1SDimitry Andric if (CGF.CurrentFuncletPad) 1321fe6060f1SDimitry Andric BundleList.emplace_back("funclet", CGF.CurrentFuncletPad); 1322bdd1243dSDimitry Andric CGF.Builder.CreateInvoke(SehCppScope, Cont, InvokeDest, std::nullopt, 1323bdd1243dSDimitry Andric BundleList); 1324fe6060f1SDimitry Andric CGF.EmitBlock(Cont); 1325fe6060f1SDimitry Andric } 1326fe6060f1SDimitry Andric 1327fe6060f1SDimitry Andric // Invoke a llvm.seh.scope.begin at the beginning of a CPP scope for -EHa 1328fe6060f1SDimitry Andric void CodeGenFunction::EmitSehCppScopeBegin() { 1329fe6060f1SDimitry Andric assert(getLangOpts().EHAsynch); 1330fe6060f1SDimitry Andric llvm::FunctionType *FTy = 1331fe6060f1SDimitry Andric llvm::FunctionType::get(CGM.VoidTy, /*isVarArg=*/false); 1332fe6060f1SDimitry Andric llvm::FunctionCallee SehCppScope = 1333fe6060f1SDimitry Andric CGM.CreateRuntimeFunction(FTy, "llvm.seh.scope.begin"); 1334fe6060f1SDimitry Andric EmitSehScope(*this, SehCppScope); 1335fe6060f1SDimitry Andric } 1336fe6060f1SDimitry Andric 1337fe6060f1SDimitry Andric // Invoke a llvm.seh.scope.end at the end of a CPP scope for -EHa 1338fe6060f1SDimitry Andric // llvm.seh.scope.end is emitted before popCleanup, so it's "invoked" 1339fe6060f1SDimitry Andric void CodeGenFunction::EmitSehCppScopeEnd() { 1340fe6060f1SDimitry Andric assert(getLangOpts().EHAsynch); 1341fe6060f1SDimitry Andric llvm::FunctionType *FTy = 1342fe6060f1SDimitry Andric llvm::FunctionType::get(CGM.VoidTy, /*isVarArg=*/false); 1343fe6060f1SDimitry Andric llvm::FunctionCallee SehCppScope = 1344fe6060f1SDimitry Andric CGM.CreateRuntimeFunction(FTy, "llvm.seh.scope.end"); 1345fe6060f1SDimitry Andric EmitSehScope(*this, SehCppScope); 1346fe6060f1SDimitry Andric } 1347fe6060f1SDimitry Andric 1348fe6060f1SDimitry Andric // Invoke a llvm.seh.try.begin at the beginning of a SEH scope for -EHa 1349fe6060f1SDimitry Andric void CodeGenFunction::EmitSehTryScopeBegin() { 1350fe6060f1SDimitry Andric assert(getLangOpts().EHAsynch); 1351fe6060f1SDimitry Andric llvm::FunctionType *FTy = 1352fe6060f1SDimitry Andric llvm::FunctionType::get(CGM.VoidTy, /*isVarArg=*/false); 1353fe6060f1SDimitry Andric llvm::FunctionCallee SehCppScope = 1354fe6060f1SDimitry Andric CGM.CreateRuntimeFunction(FTy, "llvm.seh.try.begin"); 1355fe6060f1SDimitry Andric EmitSehScope(*this, SehCppScope); 1356fe6060f1SDimitry Andric } 1357fe6060f1SDimitry Andric 1358fe6060f1SDimitry Andric // Invoke a llvm.seh.try.end at the end of a SEH scope for -EHa 1359fe6060f1SDimitry Andric void CodeGenFunction::EmitSehTryScopeEnd() { 1360fe6060f1SDimitry Andric assert(getLangOpts().EHAsynch); 1361fe6060f1SDimitry Andric llvm::FunctionType *FTy = 1362fe6060f1SDimitry Andric llvm::FunctionType::get(CGM.VoidTy, /*isVarArg=*/false); 1363fe6060f1SDimitry Andric llvm::FunctionCallee SehCppScope = 1364fe6060f1SDimitry Andric CGM.CreateRuntimeFunction(FTy, "llvm.seh.try.end"); 1365fe6060f1SDimitry Andric EmitSehScope(*this, SehCppScope); 1366fe6060f1SDimitry Andric } 1367