1e5dd7070Spatrick //===--- CGCleanup.cpp - Bookkeeping and code emission for cleanups -------===//
2e5dd7070Spatrick //
3e5dd7070Spatrick // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4e5dd7070Spatrick // See https://llvm.org/LICENSE.txt for license information.
5e5dd7070Spatrick // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6e5dd7070Spatrick //
7e5dd7070Spatrick //===----------------------------------------------------------------------===//
8e5dd7070Spatrick //
9e5dd7070Spatrick // This file contains code dealing with the IR generation for cleanups
10e5dd7070Spatrick // and related information.
11e5dd7070Spatrick //
12e5dd7070Spatrick // A "cleanup" is a piece of code which needs to be executed whenever
13e5dd7070Spatrick // control transfers out of a particular scope. This can be
14e5dd7070Spatrick // conditionalized to occur only on exceptional control flow, only on
15e5dd7070Spatrick // normal control flow, or both.
16e5dd7070Spatrick //
17e5dd7070Spatrick //===----------------------------------------------------------------------===//
18e5dd7070Spatrick
19e5dd7070Spatrick #include "CGCleanup.h"
20e5dd7070Spatrick #include "CodeGenFunction.h"
21e5dd7070Spatrick #include "llvm/Support/SaveAndRestore.h"
22e5dd7070Spatrick
23e5dd7070Spatrick using namespace clang;
24e5dd7070Spatrick using namespace CodeGen;
25e5dd7070Spatrick
needsSaving(RValue rv)26e5dd7070Spatrick bool DominatingValue<RValue>::saved_type::needsSaving(RValue rv) {
27e5dd7070Spatrick if (rv.isScalar())
28e5dd7070Spatrick return DominatingLLVMValue::needsSaving(rv.getScalarVal());
29e5dd7070Spatrick if (rv.isAggregate())
30e5dd7070Spatrick return DominatingLLVMValue::needsSaving(rv.getAggregatePointer());
31e5dd7070Spatrick return true;
32e5dd7070Spatrick }
33e5dd7070Spatrick
34e5dd7070Spatrick DominatingValue<RValue>::saved_type
save(CodeGenFunction & CGF,RValue rv)35e5dd7070Spatrick DominatingValue<RValue>::saved_type::save(CodeGenFunction &CGF, RValue rv) {
36e5dd7070Spatrick if (rv.isScalar()) {
37e5dd7070Spatrick llvm::Value *V = rv.getScalarVal();
38e5dd7070Spatrick
39e5dd7070Spatrick // These automatically dominate and don't need to be saved.
40e5dd7070Spatrick if (!DominatingLLVMValue::needsSaving(V))
41*12c85518Srobert return saved_type(V, nullptr, ScalarLiteral);
42e5dd7070Spatrick
43e5dd7070Spatrick // Everything else needs an alloca.
44e5dd7070Spatrick Address addr =
45e5dd7070Spatrick CGF.CreateDefaultAlignTempAlloca(V->getType(), "saved-rvalue");
46e5dd7070Spatrick CGF.Builder.CreateStore(V, addr);
47*12c85518Srobert return saved_type(addr.getPointer(), nullptr, ScalarAddress);
48e5dd7070Spatrick }
49e5dd7070Spatrick
50e5dd7070Spatrick if (rv.isComplex()) {
51e5dd7070Spatrick CodeGenFunction::ComplexPairTy V = rv.getComplexVal();
52e5dd7070Spatrick llvm::Type *ComplexTy =
53e5dd7070Spatrick llvm::StructType::get(V.first->getType(), V.second->getType());
54e5dd7070Spatrick Address addr = CGF.CreateDefaultAlignTempAlloca(ComplexTy, "saved-complex");
55e5dd7070Spatrick CGF.Builder.CreateStore(V.first, CGF.Builder.CreateStructGEP(addr, 0));
56e5dd7070Spatrick CGF.Builder.CreateStore(V.second, CGF.Builder.CreateStructGEP(addr, 1));
57*12c85518Srobert return saved_type(addr.getPointer(), nullptr, ComplexAddress);
58e5dd7070Spatrick }
59e5dd7070Spatrick
60e5dd7070Spatrick assert(rv.isAggregate());
61e5dd7070Spatrick Address V = rv.getAggregateAddress(); // TODO: volatile?
62e5dd7070Spatrick if (!DominatingLLVMValue::needsSaving(V.getPointer()))
63*12c85518Srobert return saved_type(V.getPointer(), V.getElementType(), AggregateLiteral,
64e5dd7070Spatrick V.getAlignment().getQuantity());
65e5dd7070Spatrick
66e5dd7070Spatrick Address addr =
67e5dd7070Spatrick CGF.CreateTempAlloca(V.getType(), CGF.getPointerAlign(), "saved-rvalue");
68e5dd7070Spatrick CGF.Builder.CreateStore(V.getPointer(), addr);
69*12c85518Srobert return saved_type(addr.getPointer(), V.getElementType(), AggregateAddress,
70e5dd7070Spatrick V.getAlignment().getQuantity());
71e5dd7070Spatrick }
72e5dd7070Spatrick
73e5dd7070Spatrick /// Given a saved r-value produced by SaveRValue, perform the code
74e5dd7070Spatrick /// necessary to restore it to usability at the current insertion
75e5dd7070Spatrick /// point.
restore(CodeGenFunction & CGF)76e5dd7070Spatrick RValue DominatingValue<RValue>::saved_type::restore(CodeGenFunction &CGF) {
77e5dd7070Spatrick auto getSavingAddress = [&](llvm::Value *value) {
78*12c85518Srobert auto *AI = cast<llvm::AllocaInst>(value);
79*12c85518Srobert return Address(value, AI->getAllocatedType(),
80*12c85518Srobert CharUnits::fromQuantity(AI->getAlign().value()));
81e5dd7070Spatrick };
82e5dd7070Spatrick switch (K) {
83e5dd7070Spatrick case ScalarLiteral:
84e5dd7070Spatrick return RValue::get(Value);
85e5dd7070Spatrick case ScalarAddress:
86e5dd7070Spatrick return RValue::get(CGF.Builder.CreateLoad(getSavingAddress(Value)));
87e5dd7070Spatrick case AggregateLiteral:
88*12c85518Srobert return RValue::getAggregate(
89*12c85518Srobert Address(Value, ElementType, CharUnits::fromQuantity(Align)));
90e5dd7070Spatrick case AggregateAddress: {
91e5dd7070Spatrick auto addr = CGF.Builder.CreateLoad(getSavingAddress(Value));
92*12c85518Srobert return RValue::getAggregate(
93*12c85518Srobert Address(addr, ElementType, CharUnits::fromQuantity(Align)));
94e5dd7070Spatrick }
95e5dd7070Spatrick case ComplexAddress: {
96e5dd7070Spatrick Address address = getSavingAddress(Value);
97e5dd7070Spatrick llvm::Value *real =
98e5dd7070Spatrick CGF.Builder.CreateLoad(CGF.Builder.CreateStructGEP(address, 0));
99e5dd7070Spatrick llvm::Value *imag =
100e5dd7070Spatrick CGF.Builder.CreateLoad(CGF.Builder.CreateStructGEP(address, 1));
101e5dd7070Spatrick return RValue::getComplex(real, imag);
102e5dd7070Spatrick }
103e5dd7070Spatrick }
104e5dd7070Spatrick
105e5dd7070Spatrick llvm_unreachable("bad saved r-value kind");
106e5dd7070Spatrick }
107e5dd7070Spatrick
108e5dd7070Spatrick /// Push an entry of the given size onto this protected-scope stack.
allocate(size_t Size)109e5dd7070Spatrick char *EHScopeStack::allocate(size_t Size) {
110e5dd7070Spatrick Size = llvm::alignTo(Size, ScopeStackAlignment);
111e5dd7070Spatrick if (!StartOfBuffer) {
112e5dd7070Spatrick unsigned Capacity = 1024;
113e5dd7070Spatrick while (Capacity < Size) Capacity *= 2;
114e5dd7070Spatrick StartOfBuffer = new char[Capacity];
115e5dd7070Spatrick StartOfData = EndOfBuffer = StartOfBuffer + Capacity;
116e5dd7070Spatrick } else if (static_cast<size_t>(StartOfData - StartOfBuffer) < Size) {
117e5dd7070Spatrick unsigned CurrentCapacity = EndOfBuffer - StartOfBuffer;
118e5dd7070Spatrick unsigned UsedCapacity = CurrentCapacity - (StartOfData - StartOfBuffer);
119e5dd7070Spatrick
120e5dd7070Spatrick unsigned NewCapacity = CurrentCapacity;
121e5dd7070Spatrick do {
122e5dd7070Spatrick NewCapacity *= 2;
123e5dd7070Spatrick } while (NewCapacity < UsedCapacity + Size);
124e5dd7070Spatrick
125e5dd7070Spatrick char *NewStartOfBuffer = new char[NewCapacity];
126e5dd7070Spatrick char *NewEndOfBuffer = NewStartOfBuffer + NewCapacity;
127e5dd7070Spatrick char *NewStartOfData = NewEndOfBuffer - UsedCapacity;
128e5dd7070Spatrick memcpy(NewStartOfData, StartOfData, UsedCapacity);
129e5dd7070Spatrick delete [] StartOfBuffer;
130e5dd7070Spatrick StartOfBuffer = NewStartOfBuffer;
131e5dd7070Spatrick EndOfBuffer = NewEndOfBuffer;
132e5dd7070Spatrick StartOfData = NewStartOfData;
133e5dd7070Spatrick }
134e5dd7070Spatrick
135e5dd7070Spatrick assert(StartOfBuffer + Size <= StartOfData);
136e5dd7070Spatrick StartOfData -= Size;
137e5dd7070Spatrick return StartOfData;
138e5dd7070Spatrick }
139e5dd7070Spatrick
deallocate(size_t Size)140e5dd7070Spatrick void EHScopeStack::deallocate(size_t Size) {
141e5dd7070Spatrick StartOfData += llvm::alignTo(Size, ScopeStackAlignment);
142e5dd7070Spatrick }
143e5dd7070Spatrick
containsOnlyLifetimeMarkers(EHScopeStack::stable_iterator Old) const144e5dd7070Spatrick bool EHScopeStack::containsOnlyLifetimeMarkers(
145e5dd7070Spatrick EHScopeStack::stable_iterator Old) const {
146e5dd7070Spatrick for (EHScopeStack::iterator it = begin(); stabilize(it) != Old; it++) {
147e5dd7070Spatrick EHCleanupScope *cleanup = dyn_cast<EHCleanupScope>(&*it);
148e5dd7070Spatrick if (!cleanup || !cleanup->isLifetimeMarker())
149e5dd7070Spatrick return false;
150e5dd7070Spatrick }
151e5dd7070Spatrick
152e5dd7070Spatrick return true;
153e5dd7070Spatrick }
154e5dd7070Spatrick
requiresLandingPad() const155e5dd7070Spatrick bool EHScopeStack::requiresLandingPad() const {
156e5dd7070Spatrick for (stable_iterator si = getInnermostEHScope(); si != stable_end(); ) {
157e5dd7070Spatrick // Skip lifetime markers.
158e5dd7070Spatrick if (auto *cleanup = dyn_cast<EHCleanupScope>(&*find(si)))
159e5dd7070Spatrick if (cleanup->isLifetimeMarker()) {
160e5dd7070Spatrick si = cleanup->getEnclosingEHScope();
161e5dd7070Spatrick continue;
162e5dd7070Spatrick }
163e5dd7070Spatrick return true;
164e5dd7070Spatrick }
165e5dd7070Spatrick
166e5dd7070Spatrick return false;
167e5dd7070Spatrick }
168e5dd7070Spatrick
169e5dd7070Spatrick EHScopeStack::stable_iterator
getInnermostActiveNormalCleanup() const170e5dd7070Spatrick EHScopeStack::getInnermostActiveNormalCleanup() const {
171e5dd7070Spatrick for (stable_iterator si = getInnermostNormalCleanup(), se = stable_end();
172e5dd7070Spatrick si != se; ) {
173e5dd7070Spatrick EHCleanupScope &cleanup = cast<EHCleanupScope>(*find(si));
174e5dd7070Spatrick if (cleanup.isActive()) return si;
175e5dd7070Spatrick si = cleanup.getEnclosingNormalCleanup();
176e5dd7070Spatrick }
177e5dd7070Spatrick return stable_end();
178e5dd7070Spatrick }
179e5dd7070Spatrick
180e5dd7070Spatrick
pushCleanup(CleanupKind Kind,size_t Size)181e5dd7070Spatrick void *EHScopeStack::pushCleanup(CleanupKind Kind, size_t Size) {
182e5dd7070Spatrick char *Buffer = allocate(EHCleanupScope::getSizeForCleanupSize(Size));
183e5dd7070Spatrick bool IsNormalCleanup = Kind & NormalCleanup;
184e5dd7070Spatrick bool IsEHCleanup = Kind & EHCleanup;
185e5dd7070Spatrick bool IsLifetimeMarker = Kind & LifetimeMarker;
186*12c85518Srobert
187*12c85518Srobert // Per C++ [except.terminate], it is implementation-defined whether none,
188*12c85518Srobert // some, or all cleanups are called before std::terminate. Thus, when
189*12c85518Srobert // terminate is the current EH scope, we may skip adding any EH cleanup
190*12c85518Srobert // scopes.
191*12c85518Srobert if (InnermostEHScope != stable_end() &&
192*12c85518Srobert find(InnermostEHScope)->getKind() == EHScope::Terminate)
193*12c85518Srobert IsEHCleanup = false;
194*12c85518Srobert
195e5dd7070Spatrick EHCleanupScope *Scope =
196e5dd7070Spatrick new (Buffer) EHCleanupScope(IsNormalCleanup,
197e5dd7070Spatrick IsEHCleanup,
198e5dd7070Spatrick Size,
199e5dd7070Spatrick BranchFixups.size(),
200e5dd7070Spatrick InnermostNormalCleanup,
201e5dd7070Spatrick InnermostEHScope);
202e5dd7070Spatrick if (IsNormalCleanup)
203e5dd7070Spatrick InnermostNormalCleanup = stable_begin();
204e5dd7070Spatrick if (IsEHCleanup)
205e5dd7070Spatrick InnermostEHScope = stable_begin();
206e5dd7070Spatrick if (IsLifetimeMarker)
207e5dd7070Spatrick Scope->setLifetimeMarker();
208e5dd7070Spatrick
209a9ac8606Spatrick // With Windows -EHa, Invoke llvm.seh.scope.begin() for EHCleanup
210a9ac8606Spatrick if (CGF->getLangOpts().EHAsynch && IsEHCleanup && !IsLifetimeMarker &&
211a9ac8606Spatrick CGF->getTarget().getCXXABI().isMicrosoft())
212a9ac8606Spatrick CGF->EmitSehCppScopeBegin();
213a9ac8606Spatrick
214e5dd7070Spatrick return Scope->getCleanupBuffer();
215e5dd7070Spatrick }
216e5dd7070Spatrick
popCleanup()217e5dd7070Spatrick void EHScopeStack::popCleanup() {
218e5dd7070Spatrick assert(!empty() && "popping exception stack when not empty");
219e5dd7070Spatrick
220e5dd7070Spatrick assert(isa<EHCleanupScope>(*begin()));
221e5dd7070Spatrick EHCleanupScope &Cleanup = cast<EHCleanupScope>(*begin());
222e5dd7070Spatrick InnermostNormalCleanup = Cleanup.getEnclosingNormalCleanup();
223e5dd7070Spatrick InnermostEHScope = Cleanup.getEnclosingEHScope();
224e5dd7070Spatrick deallocate(Cleanup.getAllocatedSize());
225e5dd7070Spatrick
226e5dd7070Spatrick // Destroy the cleanup.
227e5dd7070Spatrick Cleanup.Destroy();
228e5dd7070Spatrick
229e5dd7070Spatrick // Check whether we can shrink the branch-fixups stack.
230e5dd7070Spatrick if (!BranchFixups.empty()) {
231e5dd7070Spatrick // If we no longer have any normal cleanups, all the fixups are
232e5dd7070Spatrick // complete.
233e5dd7070Spatrick if (!hasNormalCleanups())
234e5dd7070Spatrick BranchFixups.clear();
235e5dd7070Spatrick
236e5dd7070Spatrick // Otherwise we can still trim out unnecessary nulls.
237e5dd7070Spatrick else
238e5dd7070Spatrick popNullFixups();
239e5dd7070Spatrick }
240e5dd7070Spatrick }
241e5dd7070Spatrick
pushFilter(unsigned numFilters)242e5dd7070Spatrick EHFilterScope *EHScopeStack::pushFilter(unsigned numFilters) {
243e5dd7070Spatrick assert(getInnermostEHScope() == stable_end());
244e5dd7070Spatrick char *buffer = allocate(EHFilterScope::getSizeForNumFilters(numFilters));
245e5dd7070Spatrick EHFilterScope *filter = new (buffer) EHFilterScope(numFilters);
246e5dd7070Spatrick InnermostEHScope = stable_begin();
247e5dd7070Spatrick return filter;
248e5dd7070Spatrick }
249e5dd7070Spatrick
popFilter()250e5dd7070Spatrick void EHScopeStack::popFilter() {
251e5dd7070Spatrick assert(!empty() && "popping exception stack when not empty");
252e5dd7070Spatrick
253e5dd7070Spatrick EHFilterScope &filter = cast<EHFilterScope>(*begin());
254e5dd7070Spatrick deallocate(EHFilterScope::getSizeForNumFilters(filter.getNumFilters()));
255e5dd7070Spatrick
256e5dd7070Spatrick InnermostEHScope = filter.getEnclosingEHScope();
257e5dd7070Spatrick }
258e5dd7070Spatrick
pushCatch(unsigned numHandlers)259e5dd7070Spatrick EHCatchScope *EHScopeStack::pushCatch(unsigned numHandlers) {
260e5dd7070Spatrick char *buffer = allocate(EHCatchScope::getSizeForNumHandlers(numHandlers));
261e5dd7070Spatrick EHCatchScope *scope =
262e5dd7070Spatrick new (buffer) EHCatchScope(numHandlers, InnermostEHScope);
263e5dd7070Spatrick InnermostEHScope = stable_begin();
264e5dd7070Spatrick return scope;
265e5dd7070Spatrick }
266e5dd7070Spatrick
pushTerminate()267e5dd7070Spatrick void EHScopeStack::pushTerminate() {
268e5dd7070Spatrick char *Buffer = allocate(EHTerminateScope::getSize());
269e5dd7070Spatrick new (Buffer) EHTerminateScope(InnermostEHScope);
270e5dd7070Spatrick InnermostEHScope = stable_begin();
271e5dd7070Spatrick }
272e5dd7070Spatrick
273e5dd7070Spatrick /// Remove any 'null' fixups on the stack. However, we can't pop more
274e5dd7070Spatrick /// fixups than the fixup depth on the innermost normal cleanup, or
275e5dd7070Spatrick /// else fixups that we try to add to that cleanup will end up in the
276e5dd7070Spatrick /// wrong place. We *could* try to shrink fixup depths, but that's
277e5dd7070Spatrick /// actually a lot of work for little benefit.
popNullFixups()278e5dd7070Spatrick void EHScopeStack::popNullFixups() {
279e5dd7070Spatrick // We expect this to only be called when there's still an innermost
280e5dd7070Spatrick // normal cleanup; otherwise there really shouldn't be any fixups.
281e5dd7070Spatrick assert(hasNormalCleanups());
282e5dd7070Spatrick
283e5dd7070Spatrick EHScopeStack::iterator it = find(InnermostNormalCleanup);
284e5dd7070Spatrick unsigned MinSize = cast<EHCleanupScope>(*it).getFixupDepth();
285e5dd7070Spatrick assert(BranchFixups.size() >= MinSize && "fixup stack out of order");
286e5dd7070Spatrick
287e5dd7070Spatrick while (BranchFixups.size() > MinSize &&
288e5dd7070Spatrick BranchFixups.back().Destination == nullptr)
289e5dd7070Spatrick BranchFixups.pop_back();
290e5dd7070Spatrick }
291e5dd7070Spatrick
createCleanupActiveFlag()292e5dd7070Spatrick Address CodeGenFunction::createCleanupActiveFlag() {
293e5dd7070Spatrick // Create a variable to decide whether the cleanup needs to be run.
294e5dd7070Spatrick Address active = CreateTempAllocaWithoutCast(
295e5dd7070Spatrick Builder.getInt1Ty(), CharUnits::One(), "cleanup.cond");
296e5dd7070Spatrick
297e5dd7070Spatrick // Initialize it to false at a site that's guaranteed to be run
298e5dd7070Spatrick // before each evaluation.
299e5dd7070Spatrick setBeforeOutermostConditional(Builder.getFalse(), active);
300e5dd7070Spatrick
301e5dd7070Spatrick // Initialize it to true at the current location.
302e5dd7070Spatrick Builder.CreateStore(Builder.getTrue(), active);
303e5dd7070Spatrick
304e5dd7070Spatrick return active;
305e5dd7070Spatrick }
306e5dd7070Spatrick
initFullExprCleanupWithFlag(Address ActiveFlag)307e5dd7070Spatrick void CodeGenFunction::initFullExprCleanupWithFlag(Address ActiveFlag) {
308e5dd7070Spatrick // Set that as the active flag in the cleanup.
309e5dd7070Spatrick EHCleanupScope &cleanup = cast<EHCleanupScope>(*EHStack.begin());
310e5dd7070Spatrick assert(!cleanup.hasActiveFlag() && "cleanup already has active flag?");
311e5dd7070Spatrick cleanup.setActiveFlag(ActiveFlag);
312e5dd7070Spatrick
313e5dd7070Spatrick if (cleanup.isNormalCleanup()) cleanup.setTestFlagInNormalCleanup();
314e5dd7070Spatrick if (cleanup.isEHCleanup()) cleanup.setTestFlagInEHCleanup();
315e5dd7070Spatrick }
316e5dd7070Spatrick
anchor()317e5dd7070Spatrick void EHScopeStack::Cleanup::anchor() {}
318e5dd7070Spatrick
createStoreInstBefore(llvm::Value * value,Address addr,llvm::Instruction * beforeInst)319e5dd7070Spatrick static void createStoreInstBefore(llvm::Value *value, Address addr,
320e5dd7070Spatrick llvm::Instruction *beforeInst) {
321e5dd7070Spatrick auto store = new llvm::StoreInst(value, addr.getPointer(), beforeInst);
322e5dd7070Spatrick store->setAlignment(addr.getAlignment().getAsAlign());
323e5dd7070Spatrick }
324e5dd7070Spatrick
createLoadInstBefore(Address addr,const Twine & name,llvm::Instruction * beforeInst)325e5dd7070Spatrick static llvm::LoadInst *createLoadInstBefore(Address addr, const Twine &name,
326e5dd7070Spatrick llvm::Instruction *beforeInst) {
327ec727ea7Spatrick return new llvm::LoadInst(addr.getElementType(), addr.getPointer(), name,
328ec727ea7Spatrick false, addr.getAlignment().getAsAlign(),
329ec727ea7Spatrick beforeInst);
330e5dd7070Spatrick }
331e5dd7070Spatrick
332e5dd7070Spatrick /// All the branch fixups on the EH stack have propagated out past the
333e5dd7070Spatrick /// outermost normal cleanup; resolve them all by adding cases to the
334e5dd7070Spatrick /// given switch instruction.
ResolveAllBranchFixups(CodeGenFunction & CGF,llvm::SwitchInst * Switch,llvm::BasicBlock * CleanupEntry)335e5dd7070Spatrick static void ResolveAllBranchFixups(CodeGenFunction &CGF,
336e5dd7070Spatrick llvm::SwitchInst *Switch,
337e5dd7070Spatrick llvm::BasicBlock *CleanupEntry) {
338e5dd7070Spatrick llvm::SmallPtrSet<llvm::BasicBlock*, 4> CasesAdded;
339e5dd7070Spatrick
340e5dd7070Spatrick for (unsigned I = 0, E = CGF.EHStack.getNumBranchFixups(); I != E; ++I) {
341e5dd7070Spatrick // Skip this fixup if its destination isn't set.
342e5dd7070Spatrick BranchFixup &Fixup = CGF.EHStack.getBranchFixup(I);
343e5dd7070Spatrick if (Fixup.Destination == nullptr) continue;
344e5dd7070Spatrick
345e5dd7070Spatrick // If there isn't an OptimisticBranchBlock, then InitialBranch is
346e5dd7070Spatrick // still pointing directly to its destination; forward it to the
347e5dd7070Spatrick // appropriate cleanup entry. This is required in the specific
348e5dd7070Spatrick // case of
349e5dd7070Spatrick // { std::string s; goto lbl; }
350e5dd7070Spatrick // lbl:
351e5dd7070Spatrick // i.e. where there's an unresolved fixup inside a single cleanup
352e5dd7070Spatrick // entry which we're currently popping.
353e5dd7070Spatrick if (Fixup.OptimisticBranchBlock == nullptr) {
354e5dd7070Spatrick createStoreInstBefore(CGF.Builder.getInt32(Fixup.DestinationIndex),
355e5dd7070Spatrick CGF.getNormalCleanupDestSlot(),
356e5dd7070Spatrick Fixup.InitialBranch);
357e5dd7070Spatrick Fixup.InitialBranch->setSuccessor(0, CleanupEntry);
358e5dd7070Spatrick }
359e5dd7070Spatrick
360e5dd7070Spatrick // Don't add this case to the switch statement twice.
361e5dd7070Spatrick if (!CasesAdded.insert(Fixup.Destination).second)
362e5dd7070Spatrick continue;
363e5dd7070Spatrick
364e5dd7070Spatrick Switch->addCase(CGF.Builder.getInt32(Fixup.DestinationIndex),
365e5dd7070Spatrick Fixup.Destination);
366e5dd7070Spatrick }
367e5dd7070Spatrick
368e5dd7070Spatrick CGF.EHStack.clearFixups();
369e5dd7070Spatrick }
370e5dd7070Spatrick
371e5dd7070Spatrick /// Transitions the terminator of the given exit-block of a cleanup to
372e5dd7070Spatrick /// be a cleanup switch.
TransitionToCleanupSwitch(CodeGenFunction & CGF,llvm::BasicBlock * Block)373e5dd7070Spatrick static llvm::SwitchInst *TransitionToCleanupSwitch(CodeGenFunction &CGF,
374e5dd7070Spatrick llvm::BasicBlock *Block) {
375e5dd7070Spatrick // If it's a branch, turn it into a switch whose default
376e5dd7070Spatrick // destination is its original target.
377e5dd7070Spatrick llvm::Instruction *Term = Block->getTerminator();
378e5dd7070Spatrick assert(Term && "can't transition block without terminator");
379e5dd7070Spatrick
380e5dd7070Spatrick if (llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Term)) {
381e5dd7070Spatrick assert(Br->isUnconditional());
382e5dd7070Spatrick auto Load = createLoadInstBefore(CGF.getNormalCleanupDestSlot(),
383e5dd7070Spatrick "cleanup.dest", Term);
384e5dd7070Spatrick llvm::SwitchInst *Switch =
385e5dd7070Spatrick llvm::SwitchInst::Create(Load, Br->getSuccessor(0), 4, Block);
386e5dd7070Spatrick Br->eraseFromParent();
387e5dd7070Spatrick return Switch;
388e5dd7070Spatrick } else {
389e5dd7070Spatrick return cast<llvm::SwitchInst>(Term);
390e5dd7070Spatrick }
391e5dd7070Spatrick }
392e5dd7070Spatrick
ResolveBranchFixups(llvm::BasicBlock * Block)393e5dd7070Spatrick void CodeGenFunction::ResolveBranchFixups(llvm::BasicBlock *Block) {
394e5dd7070Spatrick assert(Block && "resolving a null target block");
395e5dd7070Spatrick if (!EHStack.getNumBranchFixups()) return;
396e5dd7070Spatrick
397e5dd7070Spatrick assert(EHStack.hasNormalCleanups() &&
398e5dd7070Spatrick "branch fixups exist with no normal cleanups on stack");
399e5dd7070Spatrick
400e5dd7070Spatrick llvm::SmallPtrSet<llvm::BasicBlock*, 4> ModifiedOptimisticBlocks;
401e5dd7070Spatrick bool ResolvedAny = false;
402e5dd7070Spatrick
403e5dd7070Spatrick for (unsigned I = 0, E = EHStack.getNumBranchFixups(); I != E; ++I) {
404e5dd7070Spatrick // Skip this fixup if its destination doesn't match.
405e5dd7070Spatrick BranchFixup &Fixup = EHStack.getBranchFixup(I);
406e5dd7070Spatrick if (Fixup.Destination != Block) continue;
407e5dd7070Spatrick
408e5dd7070Spatrick Fixup.Destination = nullptr;
409e5dd7070Spatrick ResolvedAny = true;
410e5dd7070Spatrick
411e5dd7070Spatrick // If it doesn't have an optimistic branch block, LatestBranch is
412e5dd7070Spatrick // already pointing to the right place.
413e5dd7070Spatrick llvm::BasicBlock *BranchBB = Fixup.OptimisticBranchBlock;
414e5dd7070Spatrick if (!BranchBB)
415e5dd7070Spatrick continue;
416e5dd7070Spatrick
417e5dd7070Spatrick // Don't process the same optimistic branch block twice.
418e5dd7070Spatrick if (!ModifiedOptimisticBlocks.insert(BranchBB).second)
419e5dd7070Spatrick continue;
420e5dd7070Spatrick
421e5dd7070Spatrick llvm::SwitchInst *Switch = TransitionToCleanupSwitch(*this, BranchBB);
422e5dd7070Spatrick
423e5dd7070Spatrick // Add a case to the switch.
424e5dd7070Spatrick Switch->addCase(Builder.getInt32(Fixup.DestinationIndex), Block);
425e5dd7070Spatrick }
426e5dd7070Spatrick
427e5dd7070Spatrick if (ResolvedAny)
428e5dd7070Spatrick EHStack.popNullFixups();
429e5dd7070Spatrick }
430e5dd7070Spatrick
431e5dd7070Spatrick /// Pops cleanup blocks until the given savepoint is reached.
PopCleanupBlocks(EHScopeStack::stable_iterator Old,std::initializer_list<llvm::Value ** > ValuesToReload)432e5dd7070Spatrick void CodeGenFunction::PopCleanupBlocks(
433e5dd7070Spatrick EHScopeStack::stable_iterator Old,
434e5dd7070Spatrick std::initializer_list<llvm::Value **> ValuesToReload) {
435e5dd7070Spatrick assert(Old.isValid());
436e5dd7070Spatrick
437e5dd7070Spatrick bool HadBranches = false;
438e5dd7070Spatrick while (EHStack.stable_begin() != Old) {
439e5dd7070Spatrick EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.begin());
440e5dd7070Spatrick HadBranches |= Scope.hasBranches();
441e5dd7070Spatrick
442e5dd7070Spatrick // As long as Old strictly encloses the scope's enclosing normal
443e5dd7070Spatrick // cleanup, we're going to emit another normal cleanup which
444e5dd7070Spatrick // fallthrough can propagate through.
445e5dd7070Spatrick bool FallThroughIsBranchThrough =
446e5dd7070Spatrick Old.strictlyEncloses(Scope.getEnclosingNormalCleanup());
447e5dd7070Spatrick
448e5dd7070Spatrick PopCleanupBlock(FallThroughIsBranchThrough);
449e5dd7070Spatrick }
450e5dd7070Spatrick
451e5dd7070Spatrick // If we didn't have any branches, the insertion point before cleanups must
452e5dd7070Spatrick // dominate the current insertion point and we don't need to reload any
453e5dd7070Spatrick // values.
454e5dd7070Spatrick if (!HadBranches)
455e5dd7070Spatrick return;
456e5dd7070Spatrick
457e5dd7070Spatrick // Spill and reload all values that the caller wants to be live at the current
458e5dd7070Spatrick // insertion point.
459e5dd7070Spatrick for (llvm::Value **ReloadedValue : ValuesToReload) {
460e5dd7070Spatrick auto *Inst = dyn_cast_or_null<llvm::Instruction>(*ReloadedValue);
461e5dd7070Spatrick if (!Inst)
462e5dd7070Spatrick continue;
463e5dd7070Spatrick
464e5dd7070Spatrick // Don't spill static allocas, they dominate all cleanups. These are created
465e5dd7070Spatrick // by binding a reference to a local variable or temporary.
466e5dd7070Spatrick auto *AI = dyn_cast<llvm::AllocaInst>(Inst);
467e5dd7070Spatrick if (AI && AI->isStaticAlloca())
468e5dd7070Spatrick continue;
469e5dd7070Spatrick
470e5dd7070Spatrick Address Tmp =
471e5dd7070Spatrick CreateDefaultAlignTempAlloca(Inst->getType(), "tmp.exprcleanup");
472e5dd7070Spatrick
473e5dd7070Spatrick // Find an insertion point after Inst and spill it to the temporary.
474e5dd7070Spatrick llvm::BasicBlock::iterator InsertBefore;
475e5dd7070Spatrick if (auto *Invoke = dyn_cast<llvm::InvokeInst>(Inst))
476e5dd7070Spatrick InsertBefore = Invoke->getNormalDest()->getFirstInsertionPt();
477e5dd7070Spatrick else
478e5dd7070Spatrick InsertBefore = std::next(Inst->getIterator());
479e5dd7070Spatrick CGBuilderTy(CGM, &*InsertBefore).CreateStore(Inst, Tmp);
480e5dd7070Spatrick
481e5dd7070Spatrick // Reload the value at the current insertion point.
482e5dd7070Spatrick *ReloadedValue = Builder.CreateLoad(Tmp);
483e5dd7070Spatrick }
484e5dd7070Spatrick }
485e5dd7070Spatrick
486e5dd7070Spatrick /// Pops cleanup blocks until the given savepoint is reached, then add the
487e5dd7070Spatrick /// cleanups from the given savepoint in the lifetime-extended cleanups stack.
PopCleanupBlocks(EHScopeStack::stable_iterator Old,size_t OldLifetimeExtendedSize,std::initializer_list<llvm::Value ** > ValuesToReload)488e5dd7070Spatrick void CodeGenFunction::PopCleanupBlocks(
489e5dd7070Spatrick EHScopeStack::stable_iterator Old, size_t OldLifetimeExtendedSize,
490e5dd7070Spatrick std::initializer_list<llvm::Value **> ValuesToReload) {
491e5dd7070Spatrick PopCleanupBlocks(Old, ValuesToReload);
492e5dd7070Spatrick
493e5dd7070Spatrick // Move our deferred cleanups onto the EH stack.
494e5dd7070Spatrick for (size_t I = OldLifetimeExtendedSize,
495e5dd7070Spatrick E = LifetimeExtendedCleanupStack.size(); I != E; /**/) {
496e5dd7070Spatrick // Alignment should be guaranteed by the vptrs in the individual cleanups.
497e5dd7070Spatrick assert((I % alignof(LifetimeExtendedCleanupHeader) == 0) &&
498e5dd7070Spatrick "misaligned cleanup stack entry");
499e5dd7070Spatrick
500e5dd7070Spatrick LifetimeExtendedCleanupHeader &Header =
501e5dd7070Spatrick reinterpret_cast<LifetimeExtendedCleanupHeader&>(
502e5dd7070Spatrick LifetimeExtendedCleanupStack[I]);
503e5dd7070Spatrick I += sizeof(Header);
504e5dd7070Spatrick
505e5dd7070Spatrick EHStack.pushCopyOfCleanup(Header.getKind(),
506e5dd7070Spatrick &LifetimeExtendedCleanupStack[I],
507e5dd7070Spatrick Header.getSize());
508e5dd7070Spatrick I += Header.getSize();
509e5dd7070Spatrick
510e5dd7070Spatrick if (Header.isConditional()) {
511e5dd7070Spatrick Address ActiveFlag =
512e5dd7070Spatrick reinterpret_cast<Address &>(LifetimeExtendedCleanupStack[I]);
513e5dd7070Spatrick initFullExprCleanupWithFlag(ActiveFlag);
514e5dd7070Spatrick I += sizeof(ActiveFlag);
515e5dd7070Spatrick }
516e5dd7070Spatrick }
517e5dd7070Spatrick LifetimeExtendedCleanupStack.resize(OldLifetimeExtendedSize);
518e5dd7070Spatrick }
519e5dd7070Spatrick
CreateNormalEntry(CodeGenFunction & CGF,EHCleanupScope & Scope)520e5dd7070Spatrick static llvm::BasicBlock *CreateNormalEntry(CodeGenFunction &CGF,
521e5dd7070Spatrick EHCleanupScope &Scope) {
522e5dd7070Spatrick assert(Scope.isNormalCleanup());
523e5dd7070Spatrick llvm::BasicBlock *Entry = Scope.getNormalBlock();
524e5dd7070Spatrick if (!Entry) {
525e5dd7070Spatrick Entry = CGF.createBasicBlock("cleanup");
526e5dd7070Spatrick Scope.setNormalBlock(Entry);
527e5dd7070Spatrick }
528e5dd7070Spatrick return Entry;
529e5dd7070Spatrick }
530e5dd7070Spatrick
531e5dd7070Spatrick /// Attempts to reduce a cleanup's entry block to a fallthrough. This
532e5dd7070Spatrick /// is basically llvm::MergeBlockIntoPredecessor, except
533e5dd7070Spatrick /// simplified/optimized for the tighter constraints on cleanup blocks.
534e5dd7070Spatrick ///
535e5dd7070Spatrick /// Returns the new block, whatever it is.
SimplifyCleanupEntry(CodeGenFunction & CGF,llvm::BasicBlock * Entry)536e5dd7070Spatrick static llvm::BasicBlock *SimplifyCleanupEntry(CodeGenFunction &CGF,
537e5dd7070Spatrick llvm::BasicBlock *Entry) {
538e5dd7070Spatrick llvm::BasicBlock *Pred = Entry->getSinglePredecessor();
539e5dd7070Spatrick if (!Pred) return Entry;
540e5dd7070Spatrick
541e5dd7070Spatrick llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Pred->getTerminator());
542e5dd7070Spatrick if (!Br || Br->isConditional()) return Entry;
543e5dd7070Spatrick assert(Br->getSuccessor(0) == Entry);
544e5dd7070Spatrick
545e5dd7070Spatrick // If we were previously inserting at the end of the cleanup entry
546e5dd7070Spatrick // block, we'll need to continue inserting at the end of the
547e5dd7070Spatrick // predecessor.
548e5dd7070Spatrick bool WasInsertBlock = CGF.Builder.GetInsertBlock() == Entry;
549e5dd7070Spatrick assert(!WasInsertBlock || CGF.Builder.GetInsertPoint() == Entry->end());
550e5dd7070Spatrick
551e5dd7070Spatrick // Kill the branch.
552e5dd7070Spatrick Br->eraseFromParent();
553e5dd7070Spatrick
554e5dd7070Spatrick // Replace all uses of the entry with the predecessor, in case there
555e5dd7070Spatrick // are phis in the cleanup.
556e5dd7070Spatrick Entry->replaceAllUsesWith(Pred);
557e5dd7070Spatrick
558e5dd7070Spatrick // Merge the blocks.
559*12c85518Srobert Pred->splice(Pred->end(), Entry);
560e5dd7070Spatrick
561e5dd7070Spatrick // Kill the entry block.
562e5dd7070Spatrick Entry->eraseFromParent();
563e5dd7070Spatrick
564e5dd7070Spatrick if (WasInsertBlock)
565e5dd7070Spatrick CGF.Builder.SetInsertPoint(Pred);
566e5dd7070Spatrick
567e5dd7070Spatrick return Pred;
568e5dd7070Spatrick }
569e5dd7070Spatrick
EmitCleanup(CodeGenFunction & CGF,EHScopeStack::Cleanup * Fn,EHScopeStack::Cleanup::Flags flags,Address ActiveFlag)570e5dd7070Spatrick static void EmitCleanup(CodeGenFunction &CGF,
571e5dd7070Spatrick EHScopeStack::Cleanup *Fn,
572e5dd7070Spatrick EHScopeStack::Cleanup::Flags flags,
573e5dd7070Spatrick Address ActiveFlag) {
574e5dd7070Spatrick // If there's an active flag, load it and skip the cleanup if it's
575e5dd7070Spatrick // false.
576e5dd7070Spatrick llvm::BasicBlock *ContBB = nullptr;
577e5dd7070Spatrick if (ActiveFlag.isValid()) {
578e5dd7070Spatrick ContBB = CGF.createBasicBlock("cleanup.done");
579e5dd7070Spatrick llvm::BasicBlock *CleanupBB = CGF.createBasicBlock("cleanup.action");
580e5dd7070Spatrick llvm::Value *IsActive
581e5dd7070Spatrick = CGF.Builder.CreateLoad(ActiveFlag, "cleanup.is_active");
582e5dd7070Spatrick CGF.Builder.CreateCondBr(IsActive, CleanupBB, ContBB);
583e5dd7070Spatrick CGF.EmitBlock(CleanupBB);
584e5dd7070Spatrick }
585e5dd7070Spatrick
586e5dd7070Spatrick // Ask the cleanup to emit itself.
587e5dd7070Spatrick Fn->Emit(CGF, flags);
588e5dd7070Spatrick assert(CGF.HaveInsertPoint() && "cleanup ended with no insertion point?");
589e5dd7070Spatrick
590e5dd7070Spatrick // Emit the continuation block if there was an active flag.
591e5dd7070Spatrick if (ActiveFlag.isValid())
592e5dd7070Spatrick CGF.EmitBlock(ContBB);
593e5dd7070Spatrick }
594e5dd7070Spatrick
ForwardPrebranchedFallthrough(llvm::BasicBlock * Exit,llvm::BasicBlock * From,llvm::BasicBlock * To)595e5dd7070Spatrick static void ForwardPrebranchedFallthrough(llvm::BasicBlock *Exit,
596e5dd7070Spatrick llvm::BasicBlock *From,
597e5dd7070Spatrick llvm::BasicBlock *To) {
598e5dd7070Spatrick // Exit is the exit block of a cleanup, so it always terminates in
599e5dd7070Spatrick // an unconditional branch or a switch.
600e5dd7070Spatrick llvm::Instruction *Term = Exit->getTerminator();
601e5dd7070Spatrick
602e5dd7070Spatrick if (llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Term)) {
603e5dd7070Spatrick assert(Br->isUnconditional() && Br->getSuccessor(0) == From);
604e5dd7070Spatrick Br->setSuccessor(0, To);
605e5dd7070Spatrick } else {
606e5dd7070Spatrick llvm::SwitchInst *Switch = cast<llvm::SwitchInst>(Term);
607e5dd7070Spatrick for (unsigned I = 0, E = Switch->getNumSuccessors(); I != E; ++I)
608e5dd7070Spatrick if (Switch->getSuccessor(I) == From)
609e5dd7070Spatrick Switch->setSuccessor(I, To);
610e5dd7070Spatrick }
611e5dd7070Spatrick }
612e5dd7070Spatrick
613e5dd7070Spatrick /// We don't need a normal entry block for the given cleanup.
614e5dd7070Spatrick /// Optimistic fixup branches can cause these blocks to come into
615e5dd7070Spatrick /// existence anyway; if so, destroy it.
616e5dd7070Spatrick ///
617e5dd7070Spatrick /// The validity of this transformation is very much specific to the
618e5dd7070Spatrick /// exact ways in which we form branches to cleanup entries.
destroyOptimisticNormalEntry(CodeGenFunction & CGF,EHCleanupScope & scope)619e5dd7070Spatrick static void destroyOptimisticNormalEntry(CodeGenFunction &CGF,
620e5dd7070Spatrick EHCleanupScope &scope) {
621e5dd7070Spatrick llvm::BasicBlock *entry = scope.getNormalBlock();
622e5dd7070Spatrick if (!entry) return;
623e5dd7070Spatrick
624e5dd7070Spatrick // Replace all the uses with unreachable.
625e5dd7070Spatrick llvm::BasicBlock *unreachableBB = CGF.getUnreachableBlock();
626e5dd7070Spatrick for (llvm::BasicBlock::use_iterator
627e5dd7070Spatrick i = entry->use_begin(), e = entry->use_end(); i != e; ) {
628e5dd7070Spatrick llvm::Use &use = *i;
629e5dd7070Spatrick ++i;
630e5dd7070Spatrick
631e5dd7070Spatrick use.set(unreachableBB);
632e5dd7070Spatrick
633e5dd7070Spatrick // The only uses should be fixup switches.
634e5dd7070Spatrick llvm::SwitchInst *si = cast<llvm::SwitchInst>(use.getUser());
635e5dd7070Spatrick if (si->getNumCases() == 1 && si->getDefaultDest() == unreachableBB) {
636e5dd7070Spatrick // Replace the switch with a branch.
637e5dd7070Spatrick llvm::BranchInst::Create(si->case_begin()->getCaseSuccessor(), si);
638e5dd7070Spatrick
639e5dd7070Spatrick // The switch operand is a load from the cleanup-dest alloca.
640e5dd7070Spatrick llvm::LoadInst *condition = cast<llvm::LoadInst>(si->getCondition());
641e5dd7070Spatrick
642e5dd7070Spatrick // Destroy the switch.
643e5dd7070Spatrick si->eraseFromParent();
644e5dd7070Spatrick
645e5dd7070Spatrick // Destroy the load.
646e5dd7070Spatrick assert(condition->getOperand(0) == CGF.NormalCleanupDest.getPointer());
647e5dd7070Spatrick assert(condition->use_empty());
648e5dd7070Spatrick condition->eraseFromParent();
649e5dd7070Spatrick }
650e5dd7070Spatrick }
651e5dd7070Spatrick
652e5dd7070Spatrick assert(entry->use_empty());
653e5dd7070Spatrick delete entry;
654e5dd7070Spatrick }
655e5dd7070Spatrick
656e5dd7070Spatrick /// Pops a cleanup block. If the block includes a normal cleanup, the
657e5dd7070Spatrick /// current insertion point is threaded through the cleanup, as are
658e5dd7070Spatrick /// any branch fixups on the cleanup.
PopCleanupBlock(bool FallthroughIsBranchThrough)659e5dd7070Spatrick void CodeGenFunction::PopCleanupBlock(bool FallthroughIsBranchThrough) {
660e5dd7070Spatrick assert(!EHStack.empty() && "cleanup stack is empty!");
661e5dd7070Spatrick assert(isa<EHCleanupScope>(*EHStack.begin()) && "top not a cleanup!");
662e5dd7070Spatrick EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.begin());
663e5dd7070Spatrick assert(Scope.getFixupDepth() <= EHStack.getNumBranchFixups());
664e5dd7070Spatrick
665e5dd7070Spatrick // Remember activation information.
666e5dd7070Spatrick bool IsActive = Scope.isActive();
667e5dd7070Spatrick Address NormalActiveFlag =
668e5dd7070Spatrick Scope.shouldTestFlagInNormalCleanup() ? Scope.getActiveFlag()
669e5dd7070Spatrick : Address::invalid();
670e5dd7070Spatrick Address EHActiveFlag =
671e5dd7070Spatrick Scope.shouldTestFlagInEHCleanup() ? Scope.getActiveFlag()
672e5dd7070Spatrick : Address::invalid();
673e5dd7070Spatrick
674e5dd7070Spatrick // Check whether we need an EH cleanup. This is only true if we've
675e5dd7070Spatrick // generated a lazy EH cleanup block.
676e5dd7070Spatrick llvm::BasicBlock *EHEntry = Scope.getCachedEHDispatchBlock();
677e5dd7070Spatrick assert(Scope.hasEHBranches() == (EHEntry != nullptr));
678e5dd7070Spatrick bool RequiresEHCleanup = (EHEntry != nullptr);
679e5dd7070Spatrick EHScopeStack::stable_iterator EHParent = Scope.getEnclosingEHScope();
680e5dd7070Spatrick
681e5dd7070Spatrick // Check the three conditions which might require a normal cleanup:
682e5dd7070Spatrick
683e5dd7070Spatrick // - whether there are branch fix-ups through this cleanup
684e5dd7070Spatrick unsigned FixupDepth = Scope.getFixupDepth();
685e5dd7070Spatrick bool HasFixups = EHStack.getNumBranchFixups() != FixupDepth;
686e5dd7070Spatrick
687e5dd7070Spatrick // - whether there are branch-throughs or branch-afters
688e5dd7070Spatrick bool HasExistingBranches = Scope.hasBranches();
689e5dd7070Spatrick
690e5dd7070Spatrick // - whether there's a fallthrough
691e5dd7070Spatrick llvm::BasicBlock *FallthroughSource = Builder.GetInsertBlock();
692e5dd7070Spatrick bool HasFallthrough = (FallthroughSource != nullptr && IsActive);
693e5dd7070Spatrick
694e5dd7070Spatrick // Branch-through fall-throughs leave the insertion point set to the
695e5dd7070Spatrick // end of the last cleanup, which points to the current scope. The
696e5dd7070Spatrick // rest of IR gen doesn't need to worry about this; it only happens
697e5dd7070Spatrick // during the execution of PopCleanupBlocks().
698e5dd7070Spatrick bool HasPrebranchedFallthrough =
699e5dd7070Spatrick (FallthroughSource && FallthroughSource->getTerminator());
700e5dd7070Spatrick
701e5dd7070Spatrick // If this is a normal cleanup, then having a prebranched
702e5dd7070Spatrick // fallthrough implies that the fallthrough source unconditionally
703e5dd7070Spatrick // jumps here.
704e5dd7070Spatrick assert(!Scope.isNormalCleanup() || !HasPrebranchedFallthrough ||
705e5dd7070Spatrick (Scope.getNormalBlock() &&
706e5dd7070Spatrick FallthroughSource->getTerminator()->getSuccessor(0)
707e5dd7070Spatrick == Scope.getNormalBlock()));
708e5dd7070Spatrick
709e5dd7070Spatrick bool RequiresNormalCleanup = false;
710e5dd7070Spatrick if (Scope.isNormalCleanup() &&
711e5dd7070Spatrick (HasFixups || HasExistingBranches || HasFallthrough)) {
712e5dd7070Spatrick RequiresNormalCleanup = true;
713e5dd7070Spatrick }
714e5dd7070Spatrick
715e5dd7070Spatrick // If we have a prebranched fallthrough into an inactive normal
716e5dd7070Spatrick // cleanup, rewrite it so that it leads to the appropriate place.
717e5dd7070Spatrick if (Scope.isNormalCleanup() && HasPrebranchedFallthrough && !IsActive) {
718e5dd7070Spatrick llvm::BasicBlock *prebranchDest;
719e5dd7070Spatrick
720e5dd7070Spatrick // If the prebranch is semantically branching through the next
721e5dd7070Spatrick // cleanup, just forward it to the next block, leaving the
722e5dd7070Spatrick // insertion point in the prebranched block.
723e5dd7070Spatrick if (FallthroughIsBranchThrough) {
724e5dd7070Spatrick EHScope &enclosing = *EHStack.find(Scope.getEnclosingNormalCleanup());
725e5dd7070Spatrick prebranchDest = CreateNormalEntry(*this, cast<EHCleanupScope>(enclosing));
726e5dd7070Spatrick
727e5dd7070Spatrick // Otherwise, we need to make a new block. If the normal cleanup
728e5dd7070Spatrick // isn't being used at all, we could actually reuse the normal
729e5dd7070Spatrick // entry block, but this is simpler, and it avoids conflicts with
730e5dd7070Spatrick // dead optimistic fixup branches.
731e5dd7070Spatrick } else {
732e5dd7070Spatrick prebranchDest = createBasicBlock("forwarded-prebranch");
733e5dd7070Spatrick EmitBlock(prebranchDest);
734e5dd7070Spatrick }
735e5dd7070Spatrick
736e5dd7070Spatrick llvm::BasicBlock *normalEntry = Scope.getNormalBlock();
737e5dd7070Spatrick assert(normalEntry && !normalEntry->use_empty());
738e5dd7070Spatrick
739e5dd7070Spatrick ForwardPrebranchedFallthrough(FallthroughSource,
740e5dd7070Spatrick normalEntry, prebranchDest);
741e5dd7070Spatrick }
742e5dd7070Spatrick
743e5dd7070Spatrick // If we don't need the cleanup at all, we're done.
744e5dd7070Spatrick if (!RequiresNormalCleanup && !RequiresEHCleanup) {
745e5dd7070Spatrick destroyOptimisticNormalEntry(*this, Scope);
746e5dd7070Spatrick EHStack.popCleanup(); // safe because there are no fixups
747e5dd7070Spatrick assert(EHStack.getNumBranchFixups() == 0 ||
748e5dd7070Spatrick EHStack.hasNormalCleanups());
749e5dd7070Spatrick return;
750e5dd7070Spatrick }
751e5dd7070Spatrick
752e5dd7070Spatrick // Copy the cleanup emission data out. This uses either a stack
753e5dd7070Spatrick // array or malloc'd memory, depending on the size, which is
754e5dd7070Spatrick // behavior that SmallVector would provide, if we could use it
755e5dd7070Spatrick // here. Unfortunately, if you ask for a SmallVector<char>, the
756e5dd7070Spatrick // alignment isn't sufficient.
757e5dd7070Spatrick auto *CleanupSource = reinterpret_cast<char *>(Scope.getCleanupBuffer());
758e5dd7070Spatrick alignas(EHScopeStack::ScopeStackAlignment) char
759e5dd7070Spatrick CleanupBufferStack[8 * sizeof(void *)];
760e5dd7070Spatrick std::unique_ptr<char[]> CleanupBufferHeap;
761e5dd7070Spatrick size_t CleanupSize = Scope.getCleanupSize();
762e5dd7070Spatrick EHScopeStack::Cleanup *Fn;
763e5dd7070Spatrick
764e5dd7070Spatrick if (CleanupSize <= sizeof(CleanupBufferStack)) {
765e5dd7070Spatrick memcpy(CleanupBufferStack, CleanupSource, CleanupSize);
766e5dd7070Spatrick Fn = reinterpret_cast<EHScopeStack::Cleanup *>(CleanupBufferStack);
767e5dd7070Spatrick } else {
768e5dd7070Spatrick CleanupBufferHeap.reset(new char[CleanupSize]);
769e5dd7070Spatrick memcpy(CleanupBufferHeap.get(), CleanupSource, CleanupSize);
770e5dd7070Spatrick Fn = reinterpret_cast<EHScopeStack::Cleanup *>(CleanupBufferHeap.get());
771e5dd7070Spatrick }
772e5dd7070Spatrick
773e5dd7070Spatrick EHScopeStack::Cleanup::Flags cleanupFlags;
774e5dd7070Spatrick if (Scope.isNormalCleanup())
775e5dd7070Spatrick cleanupFlags.setIsNormalCleanupKind();
776e5dd7070Spatrick if (Scope.isEHCleanup())
777e5dd7070Spatrick cleanupFlags.setIsEHCleanupKind();
778e5dd7070Spatrick
779a9ac8606Spatrick // Under -EHa, invoke seh.scope.end() to mark scope end before dtor
780a9ac8606Spatrick bool IsEHa = getLangOpts().EHAsynch && !Scope.isLifetimeMarker();
781a9ac8606Spatrick const EHPersonality &Personality = EHPersonality::get(*this);
782e5dd7070Spatrick if (!RequiresNormalCleanup) {
783a9ac8606Spatrick // Mark CPP scope end for passed-by-value Arg temp
784a9ac8606Spatrick // per Windows ABI which is "normally" Cleanup in callee
785a9ac8606Spatrick if (IsEHa && getInvokeDest()) {
786a9ac8606Spatrick if (Personality.isMSVCXXPersonality())
787a9ac8606Spatrick EmitSehCppScopeEnd();
788a9ac8606Spatrick }
789e5dd7070Spatrick destroyOptimisticNormalEntry(*this, Scope);
790e5dd7070Spatrick EHStack.popCleanup();
791e5dd7070Spatrick } else {
792e5dd7070Spatrick // If we have a fallthrough and no other need for the cleanup,
793e5dd7070Spatrick // emit it directly.
794a9ac8606Spatrick if (HasFallthrough && !HasPrebranchedFallthrough && !HasFixups &&
795a9ac8606Spatrick !HasExistingBranches) {
796a9ac8606Spatrick
797a9ac8606Spatrick // mark SEH scope end for fall-through flow
798a9ac8606Spatrick if (IsEHa && getInvokeDest()) {
799a9ac8606Spatrick if (Personality.isMSVCXXPersonality())
800a9ac8606Spatrick EmitSehCppScopeEnd();
801a9ac8606Spatrick else
802a9ac8606Spatrick EmitSehTryScopeEnd();
803a9ac8606Spatrick }
804e5dd7070Spatrick
805e5dd7070Spatrick destroyOptimisticNormalEntry(*this, Scope);
806e5dd7070Spatrick EHStack.popCleanup();
807e5dd7070Spatrick
808e5dd7070Spatrick EmitCleanup(*this, Fn, cleanupFlags, NormalActiveFlag);
809e5dd7070Spatrick
810e5dd7070Spatrick // Otherwise, the best approach is to thread everything through
811e5dd7070Spatrick // the cleanup block and then try to clean up after ourselves.
812e5dd7070Spatrick } else {
813e5dd7070Spatrick // Force the entry block to exist.
814e5dd7070Spatrick llvm::BasicBlock *NormalEntry = CreateNormalEntry(*this, Scope);
815e5dd7070Spatrick
816e5dd7070Spatrick // I. Set up the fallthrough edge in.
817e5dd7070Spatrick
818e5dd7070Spatrick CGBuilderTy::InsertPoint savedInactiveFallthroughIP;
819e5dd7070Spatrick
820e5dd7070Spatrick // If there's a fallthrough, we need to store the cleanup
821e5dd7070Spatrick // destination index. For fall-throughs this is always zero.
822e5dd7070Spatrick if (HasFallthrough) {
823e5dd7070Spatrick if (!HasPrebranchedFallthrough)
824e5dd7070Spatrick Builder.CreateStore(Builder.getInt32(0), getNormalCleanupDestSlot());
825e5dd7070Spatrick
826e5dd7070Spatrick // Otherwise, save and clear the IP if we don't have fallthrough
827e5dd7070Spatrick // because the cleanup is inactive.
828e5dd7070Spatrick } else if (FallthroughSource) {
829e5dd7070Spatrick assert(!IsActive && "source without fallthrough for active cleanup");
830e5dd7070Spatrick savedInactiveFallthroughIP = Builder.saveAndClearIP();
831e5dd7070Spatrick }
832e5dd7070Spatrick
833e5dd7070Spatrick // II. Emit the entry block. This implicitly branches to it if
834e5dd7070Spatrick // we have fallthrough. All the fixups and existing branches
835e5dd7070Spatrick // should already be branched to it.
836e5dd7070Spatrick EmitBlock(NormalEntry);
837e5dd7070Spatrick
838a9ac8606Spatrick // intercept normal cleanup to mark SEH scope end
839a9ac8606Spatrick if (IsEHa) {
840a9ac8606Spatrick if (Personality.isMSVCXXPersonality())
841a9ac8606Spatrick EmitSehCppScopeEnd();
842a9ac8606Spatrick else
843a9ac8606Spatrick EmitSehTryScopeEnd();
844a9ac8606Spatrick }
845a9ac8606Spatrick
846e5dd7070Spatrick // III. Figure out where we're going and build the cleanup
847e5dd7070Spatrick // epilogue.
848e5dd7070Spatrick
849e5dd7070Spatrick bool HasEnclosingCleanups =
850e5dd7070Spatrick (Scope.getEnclosingNormalCleanup() != EHStack.stable_end());
851e5dd7070Spatrick
852e5dd7070Spatrick // Compute the branch-through dest if we need it:
853e5dd7070Spatrick // - if there are branch-throughs threaded through the scope
854e5dd7070Spatrick // - if fall-through is a branch-through
855e5dd7070Spatrick // - if there are fixups that will be optimistically forwarded
856e5dd7070Spatrick // to the enclosing cleanup
857e5dd7070Spatrick llvm::BasicBlock *BranchThroughDest = nullptr;
858e5dd7070Spatrick if (Scope.hasBranchThroughs() ||
859e5dd7070Spatrick (FallthroughSource && FallthroughIsBranchThrough) ||
860e5dd7070Spatrick (HasFixups && HasEnclosingCleanups)) {
861e5dd7070Spatrick assert(HasEnclosingCleanups);
862e5dd7070Spatrick EHScope &S = *EHStack.find(Scope.getEnclosingNormalCleanup());
863e5dd7070Spatrick BranchThroughDest = CreateNormalEntry(*this, cast<EHCleanupScope>(S));
864e5dd7070Spatrick }
865e5dd7070Spatrick
866e5dd7070Spatrick llvm::BasicBlock *FallthroughDest = nullptr;
867e5dd7070Spatrick SmallVector<llvm::Instruction*, 2> InstsToAppend;
868e5dd7070Spatrick
869e5dd7070Spatrick // If there's exactly one branch-after and no other threads,
870e5dd7070Spatrick // we can route it without a switch.
871e5dd7070Spatrick if (!Scope.hasBranchThroughs() && !HasFixups && !HasFallthrough &&
872e5dd7070Spatrick Scope.getNumBranchAfters() == 1) {
873e5dd7070Spatrick assert(!BranchThroughDest || !IsActive);
874e5dd7070Spatrick
875e5dd7070Spatrick // Clean up the possibly dead store to the cleanup dest slot.
876e5dd7070Spatrick llvm::Instruction *NormalCleanupDestSlot =
877e5dd7070Spatrick cast<llvm::Instruction>(getNormalCleanupDestSlot().getPointer());
878e5dd7070Spatrick if (NormalCleanupDestSlot->hasOneUse()) {
879e5dd7070Spatrick NormalCleanupDestSlot->user_back()->eraseFromParent();
880e5dd7070Spatrick NormalCleanupDestSlot->eraseFromParent();
881e5dd7070Spatrick NormalCleanupDest = Address::invalid();
882e5dd7070Spatrick }
883e5dd7070Spatrick
884e5dd7070Spatrick llvm::BasicBlock *BranchAfter = Scope.getBranchAfterBlock(0);
885e5dd7070Spatrick InstsToAppend.push_back(llvm::BranchInst::Create(BranchAfter));
886e5dd7070Spatrick
887e5dd7070Spatrick // Build a switch-out if we need it:
888e5dd7070Spatrick // - if there are branch-afters threaded through the scope
889e5dd7070Spatrick // - if fall-through is a branch-after
890e5dd7070Spatrick // - if there are fixups that have nowhere left to go and
891e5dd7070Spatrick // so must be immediately resolved
892e5dd7070Spatrick } else if (Scope.getNumBranchAfters() ||
893e5dd7070Spatrick (HasFallthrough && !FallthroughIsBranchThrough) ||
894e5dd7070Spatrick (HasFixups && !HasEnclosingCleanups)) {
895e5dd7070Spatrick
896e5dd7070Spatrick llvm::BasicBlock *Default =
897e5dd7070Spatrick (BranchThroughDest ? BranchThroughDest : getUnreachableBlock());
898e5dd7070Spatrick
899e5dd7070Spatrick // TODO: base this on the number of branch-afters and fixups
900e5dd7070Spatrick const unsigned SwitchCapacity = 10;
901e5dd7070Spatrick
902ec727ea7Spatrick // pass the abnormal exit flag to Fn (SEH cleanup)
903ec727ea7Spatrick cleanupFlags.setHasExitSwitch();
904ec727ea7Spatrick
905e5dd7070Spatrick llvm::LoadInst *Load =
906e5dd7070Spatrick createLoadInstBefore(getNormalCleanupDestSlot(), "cleanup.dest",
907e5dd7070Spatrick nullptr);
908e5dd7070Spatrick llvm::SwitchInst *Switch =
909e5dd7070Spatrick llvm::SwitchInst::Create(Load, Default, SwitchCapacity);
910e5dd7070Spatrick
911e5dd7070Spatrick InstsToAppend.push_back(Load);
912e5dd7070Spatrick InstsToAppend.push_back(Switch);
913e5dd7070Spatrick
914e5dd7070Spatrick // Branch-after fallthrough.
915e5dd7070Spatrick if (FallthroughSource && !FallthroughIsBranchThrough) {
916e5dd7070Spatrick FallthroughDest = createBasicBlock("cleanup.cont");
917e5dd7070Spatrick if (HasFallthrough)
918e5dd7070Spatrick Switch->addCase(Builder.getInt32(0), FallthroughDest);
919e5dd7070Spatrick }
920e5dd7070Spatrick
921e5dd7070Spatrick for (unsigned I = 0, E = Scope.getNumBranchAfters(); I != E; ++I) {
922e5dd7070Spatrick Switch->addCase(Scope.getBranchAfterIndex(I),
923e5dd7070Spatrick Scope.getBranchAfterBlock(I));
924e5dd7070Spatrick }
925e5dd7070Spatrick
926e5dd7070Spatrick // If there aren't any enclosing cleanups, we can resolve all
927e5dd7070Spatrick // the fixups now.
928e5dd7070Spatrick if (HasFixups && !HasEnclosingCleanups)
929e5dd7070Spatrick ResolveAllBranchFixups(*this, Switch, NormalEntry);
930e5dd7070Spatrick } else {
931e5dd7070Spatrick // We should always have a branch-through destination in this case.
932e5dd7070Spatrick assert(BranchThroughDest);
933e5dd7070Spatrick InstsToAppend.push_back(llvm::BranchInst::Create(BranchThroughDest));
934e5dd7070Spatrick }
935e5dd7070Spatrick
936e5dd7070Spatrick // IV. Pop the cleanup and emit it.
937e5dd7070Spatrick EHStack.popCleanup();
938e5dd7070Spatrick assert(EHStack.hasNormalCleanups() == HasEnclosingCleanups);
939e5dd7070Spatrick
940e5dd7070Spatrick EmitCleanup(*this, Fn, cleanupFlags, NormalActiveFlag);
941e5dd7070Spatrick
942e5dd7070Spatrick // Append the prepared cleanup prologue from above.
943e5dd7070Spatrick llvm::BasicBlock *NormalExit = Builder.GetInsertBlock();
944e5dd7070Spatrick for (unsigned I = 0, E = InstsToAppend.size(); I != E; ++I)
945*12c85518Srobert InstsToAppend[I]->insertInto(NormalExit, NormalExit->end());
946e5dd7070Spatrick
947e5dd7070Spatrick // Optimistically hope that any fixups will continue falling through.
948e5dd7070Spatrick for (unsigned I = FixupDepth, E = EHStack.getNumBranchFixups();
949e5dd7070Spatrick I < E; ++I) {
950e5dd7070Spatrick BranchFixup &Fixup = EHStack.getBranchFixup(I);
951e5dd7070Spatrick if (!Fixup.Destination) continue;
952e5dd7070Spatrick if (!Fixup.OptimisticBranchBlock) {
953e5dd7070Spatrick createStoreInstBefore(Builder.getInt32(Fixup.DestinationIndex),
954e5dd7070Spatrick getNormalCleanupDestSlot(),
955e5dd7070Spatrick Fixup.InitialBranch);
956e5dd7070Spatrick Fixup.InitialBranch->setSuccessor(0, NormalEntry);
957e5dd7070Spatrick }
958e5dd7070Spatrick Fixup.OptimisticBranchBlock = NormalExit;
959e5dd7070Spatrick }
960e5dd7070Spatrick
961e5dd7070Spatrick // V. Set up the fallthrough edge out.
962e5dd7070Spatrick
963e5dd7070Spatrick // Case 1: a fallthrough source exists but doesn't branch to the
964e5dd7070Spatrick // cleanup because the cleanup is inactive.
965e5dd7070Spatrick if (!HasFallthrough && FallthroughSource) {
966e5dd7070Spatrick // Prebranched fallthrough was forwarded earlier.
967e5dd7070Spatrick // Non-prebranched fallthrough doesn't need to be forwarded.
968e5dd7070Spatrick // Either way, all we need to do is restore the IP we cleared before.
969e5dd7070Spatrick assert(!IsActive);
970e5dd7070Spatrick Builder.restoreIP(savedInactiveFallthroughIP);
971e5dd7070Spatrick
972e5dd7070Spatrick // Case 2: a fallthrough source exists and should branch to the
973e5dd7070Spatrick // cleanup, but we're not supposed to branch through to the next
974e5dd7070Spatrick // cleanup.
975e5dd7070Spatrick } else if (HasFallthrough && FallthroughDest) {
976e5dd7070Spatrick assert(!FallthroughIsBranchThrough);
977e5dd7070Spatrick EmitBlock(FallthroughDest);
978e5dd7070Spatrick
979e5dd7070Spatrick // Case 3: a fallthrough source exists and should branch to the
980e5dd7070Spatrick // cleanup and then through to the next.
981e5dd7070Spatrick } else if (HasFallthrough) {
982e5dd7070Spatrick // Everything is already set up for this.
983e5dd7070Spatrick
984e5dd7070Spatrick // Case 4: no fallthrough source exists.
985e5dd7070Spatrick } else {
986e5dd7070Spatrick Builder.ClearInsertionPoint();
987e5dd7070Spatrick }
988e5dd7070Spatrick
989e5dd7070Spatrick // VI. Assorted cleaning.
990e5dd7070Spatrick
991e5dd7070Spatrick // Check whether we can merge NormalEntry into a single predecessor.
992e5dd7070Spatrick // This might invalidate (non-IR) pointers to NormalEntry.
993e5dd7070Spatrick llvm::BasicBlock *NewNormalEntry =
994e5dd7070Spatrick SimplifyCleanupEntry(*this, NormalEntry);
995e5dd7070Spatrick
996e5dd7070Spatrick // If it did invalidate those pointers, and NormalEntry was the same
997e5dd7070Spatrick // as NormalExit, go back and patch up the fixups.
998e5dd7070Spatrick if (NewNormalEntry != NormalEntry && NormalEntry == NormalExit)
999e5dd7070Spatrick for (unsigned I = FixupDepth, E = EHStack.getNumBranchFixups();
1000e5dd7070Spatrick I < E; ++I)
1001e5dd7070Spatrick EHStack.getBranchFixup(I).OptimisticBranchBlock = NewNormalEntry;
1002e5dd7070Spatrick }
1003e5dd7070Spatrick }
1004e5dd7070Spatrick
1005e5dd7070Spatrick assert(EHStack.hasNormalCleanups() || EHStack.getNumBranchFixups() == 0);
1006e5dd7070Spatrick
1007e5dd7070Spatrick // Emit the EH cleanup if required.
1008e5dd7070Spatrick if (RequiresEHCleanup) {
1009e5dd7070Spatrick CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP();
1010e5dd7070Spatrick
1011e5dd7070Spatrick EmitBlock(EHEntry);
1012e5dd7070Spatrick
1013e5dd7070Spatrick llvm::BasicBlock *NextAction = getEHDispatchBlock(EHParent);
1014e5dd7070Spatrick
1015e5dd7070Spatrick // Push a terminate scope or cleanupendpad scope around the potentially
1016e5dd7070Spatrick // throwing cleanups. For funclet EH personalities, the cleanupendpad models
1017e5dd7070Spatrick // program termination when cleanups throw.
1018e5dd7070Spatrick bool PushedTerminate = false;
1019*12c85518Srobert SaveAndRestore RestoreCurrentFuncletPad(CurrentFuncletPad);
1020e5dd7070Spatrick llvm::CleanupPadInst *CPI = nullptr;
1021e5dd7070Spatrick
1022e5dd7070Spatrick const EHPersonality &Personality = EHPersonality::get(*this);
1023e5dd7070Spatrick if (Personality.usesFuncletPads()) {
1024e5dd7070Spatrick llvm::Value *ParentPad = CurrentFuncletPad;
1025e5dd7070Spatrick if (!ParentPad)
1026e5dd7070Spatrick ParentPad = llvm::ConstantTokenNone::get(CGM.getLLVMContext());
1027e5dd7070Spatrick CurrentFuncletPad = CPI = Builder.CreateCleanupPad(ParentPad);
1028e5dd7070Spatrick }
1029e5dd7070Spatrick
1030e5dd7070Spatrick // Non-MSVC personalities need to terminate when an EH cleanup throws.
1031e5dd7070Spatrick if (!Personality.isMSVCPersonality()) {
1032e5dd7070Spatrick EHStack.pushTerminate();
1033e5dd7070Spatrick PushedTerminate = true;
1034e5dd7070Spatrick }
1035e5dd7070Spatrick
1036e5dd7070Spatrick // We only actually emit the cleanup code if the cleanup is either
1037e5dd7070Spatrick // active or was used before it was deactivated.
1038e5dd7070Spatrick if (EHActiveFlag.isValid() || IsActive) {
1039e5dd7070Spatrick cleanupFlags.setIsForEHCleanup();
1040e5dd7070Spatrick EmitCleanup(*this, Fn, cleanupFlags, EHActiveFlag);
1041e5dd7070Spatrick }
1042e5dd7070Spatrick
1043e5dd7070Spatrick if (CPI)
1044e5dd7070Spatrick Builder.CreateCleanupRet(CPI, NextAction);
1045e5dd7070Spatrick else
1046e5dd7070Spatrick Builder.CreateBr(NextAction);
1047e5dd7070Spatrick
1048e5dd7070Spatrick // Leave the terminate scope.
1049e5dd7070Spatrick if (PushedTerminate)
1050e5dd7070Spatrick EHStack.popTerminate();
1051e5dd7070Spatrick
1052e5dd7070Spatrick Builder.restoreIP(SavedIP);
1053e5dd7070Spatrick
1054e5dd7070Spatrick SimplifyCleanupEntry(*this, EHEntry);
1055e5dd7070Spatrick }
1056e5dd7070Spatrick }
1057e5dd7070Spatrick
1058e5dd7070Spatrick /// isObviouslyBranchWithoutCleanups - Return true if a branch to the
1059e5dd7070Spatrick /// specified destination obviously has no cleanups to run. 'false' is always
1060e5dd7070Spatrick /// a conservatively correct answer for this method.
isObviouslyBranchWithoutCleanups(JumpDest Dest) const1061e5dd7070Spatrick bool CodeGenFunction::isObviouslyBranchWithoutCleanups(JumpDest Dest) const {
1062e5dd7070Spatrick assert(Dest.getScopeDepth().encloses(EHStack.stable_begin())
1063e5dd7070Spatrick && "stale jump destination");
1064e5dd7070Spatrick
1065e5dd7070Spatrick // Calculate the innermost active normal cleanup.
1066e5dd7070Spatrick EHScopeStack::stable_iterator TopCleanup =
1067e5dd7070Spatrick EHStack.getInnermostActiveNormalCleanup();
1068e5dd7070Spatrick
1069e5dd7070Spatrick // If we're not in an active normal cleanup scope, or if the
1070e5dd7070Spatrick // destination scope is within the innermost active normal cleanup
1071e5dd7070Spatrick // scope, we don't need to worry about fixups.
1072e5dd7070Spatrick if (TopCleanup == EHStack.stable_end() ||
1073e5dd7070Spatrick TopCleanup.encloses(Dest.getScopeDepth())) // works for invalid
1074e5dd7070Spatrick return true;
1075e5dd7070Spatrick
1076e5dd7070Spatrick // Otherwise, we might need some cleanups.
1077e5dd7070Spatrick return false;
1078e5dd7070Spatrick }
1079e5dd7070Spatrick
1080e5dd7070Spatrick
1081e5dd7070Spatrick /// Terminate the current block by emitting a branch which might leave
1082e5dd7070Spatrick /// the current cleanup-protected scope. The target scope may not yet
1083e5dd7070Spatrick /// be known, in which case this will require a fixup.
1084e5dd7070Spatrick ///
1085e5dd7070Spatrick /// As a side-effect, this method clears the insertion point.
EmitBranchThroughCleanup(JumpDest Dest)1086e5dd7070Spatrick void CodeGenFunction::EmitBranchThroughCleanup(JumpDest Dest) {
1087e5dd7070Spatrick assert(Dest.getScopeDepth().encloses(EHStack.stable_begin())
1088e5dd7070Spatrick && "stale jump destination");
1089e5dd7070Spatrick
1090e5dd7070Spatrick if (!HaveInsertPoint())
1091e5dd7070Spatrick return;
1092e5dd7070Spatrick
1093e5dd7070Spatrick // Create the branch.
1094e5dd7070Spatrick llvm::BranchInst *BI = Builder.CreateBr(Dest.getBlock());
1095e5dd7070Spatrick
1096e5dd7070Spatrick // Calculate the innermost active normal cleanup.
1097e5dd7070Spatrick EHScopeStack::stable_iterator
1098e5dd7070Spatrick TopCleanup = EHStack.getInnermostActiveNormalCleanup();
1099e5dd7070Spatrick
1100e5dd7070Spatrick // If we're not in an active normal cleanup scope, or if the
1101e5dd7070Spatrick // destination scope is within the innermost active normal cleanup
1102e5dd7070Spatrick // scope, we don't need to worry about fixups.
1103e5dd7070Spatrick if (TopCleanup == EHStack.stable_end() ||
1104e5dd7070Spatrick TopCleanup.encloses(Dest.getScopeDepth())) { // works for invalid
1105e5dd7070Spatrick Builder.ClearInsertionPoint();
1106e5dd7070Spatrick return;
1107e5dd7070Spatrick }
1108e5dd7070Spatrick
1109e5dd7070Spatrick // If we can't resolve the destination cleanup scope, just add this
1110e5dd7070Spatrick // to the current cleanup scope as a branch fixup.
1111e5dd7070Spatrick if (!Dest.getScopeDepth().isValid()) {
1112e5dd7070Spatrick BranchFixup &Fixup = EHStack.addBranchFixup();
1113e5dd7070Spatrick Fixup.Destination = Dest.getBlock();
1114e5dd7070Spatrick Fixup.DestinationIndex = Dest.getDestIndex();
1115e5dd7070Spatrick Fixup.InitialBranch = BI;
1116e5dd7070Spatrick Fixup.OptimisticBranchBlock = nullptr;
1117e5dd7070Spatrick
1118e5dd7070Spatrick Builder.ClearInsertionPoint();
1119e5dd7070Spatrick return;
1120e5dd7070Spatrick }
1121e5dd7070Spatrick
1122e5dd7070Spatrick // Otherwise, thread through all the normal cleanups in scope.
1123e5dd7070Spatrick
1124e5dd7070Spatrick // Store the index at the start.
1125e5dd7070Spatrick llvm::ConstantInt *Index = Builder.getInt32(Dest.getDestIndex());
1126e5dd7070Spatrick createStoreInstBefore(Index, getNormalCleanupDestSlot(), BI);
1127e5dd7070Spatrick
1128e5dd7070Spatrick // Adjust BI to point to the first cleanup block.
1129e5dd7070Spatrick {
1130e5dd7070Spatrick EHCleanupScope &Scope =
1131e5dd7070Spatrick cast<EHCleanupScope>(*EHStack.find(TopCleanup));
1132e5dd7070Spatrick BI->setSuccessor(0, CreateNormalEntry(*this, Scope));
1133e5dd7070Spatrick }
1134e5dd7070Spatrick
1135e5dd7070Spatrick // Add this destination to all the scopes involved.
1136e5dd7070Spatrick EHScopeStack::stable_iterator I = TopCleanup;
1137e5dd7070Spatrick EHScopeStack::stable_iterator E = Dest.getScopeDepth();
1138e5dd7070Spatrick if (E.strictlyEncloses(I)) {
1139e5dd7070Spatrick while (true) {
1140e5dd7070Spatrick EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(I));
1141e5dd7070Spatrick assert(Scope.isNormalCleanup());
1142e5dd7070Spatrick I = Scope.getEnclosingNormalCleanup();
1143e5dd7070Spatrick
1144e5dd7070Spatrick // If this is the last cleanup we're propagating through, tell it
1145e5dd7070Spatrick // that there's a resolved jump moving through it.
1146e5dd7070Spatrick if (!E.strictlyEncloses(I)) {
1147e5dd7070Spatrick Scope.addBranchAfter(Index, Dest.getBlock());
1148e5dd7070Spatrick break;
1149e5dd7070Spatrick }
1150e5dd7070Spatrick
1151e5dd7070Spatrick // Otherwise, tell the scope that there's a jump propagating
1152e5dd7070Spatrick // through it. If this isn't new information, all the rest of
1153e5dd7070Spatrick // the work has been done before.
1154e5dd7070Spatrick if (!Scope.addBranchThrough(Dest.getBlock()))
1155e5dd7070Spatrick break;
1156e5dd7070Spatrick }
1157e5dd7070Spatrick }
1158e5dd7070Spatrick
1159e5dd7070Spatrick Builder.ClearInsertionPoint();
1160e5dd7070Spatrick }
1161e5dd7070Spatrick
IsUsedAsNormalCleanup(EHScopeStack & EHStack,EHScopeStack::stable_iterator C)1162e5dd7070Spatrick static bool IsUsedAsNormalCleanup(EHScopeStack &EHStack,
1163e5dd7070Spatrick EHScopeStack::stable_iterator C) {
1164e5dd7070Spatrick // If we needed a normal block for any reason, that counts.
1165e5dd7070Spatrick if (cast<EHCleanupScope>(*EHStack.find(C)).getNormalBlock())
1166e5dd7070Spatrick return true;
1167e5dd7070Spatrick
1168e5dd7070Spatrick // Check whether any enclosed cleanups were needed.
1169e5dd7070Spatrick for (EHScopeStack::stable_iterator
1170e5dd7070Spatrick I = EHStack.getInnermostNormalCleanup();
1171e5dd7070Spatrick I != C; ) {
1172e5dd7070Spatrick assert(C.strictlyEncloses(I));
1173e5dd7070Spatrick EHCleanupScope &S = cast<EHCleanupScope>(*EHStack.find(I));
1174e5dd7070Spatrick if (S.getNormalBlock()) return true;
1175e5dd7070Spatrick I = S.getEnclosingNormalCleanup();
1176e5dd7070Spatrick }
1177e5dd7070Spatrick
1178e5dd7070Spatrick return false;
1179e5dd7070Spatrick }
1180e5dd7070Spatrick
IsUsedAsEHCleanup(EHScopeStack & EHStack,EHScopeStack::stable_iterator cleanup)1181e5dd7070Spatrick static bool IsUsedAsEHCleanup(EHScopeStack &EHStack,
1182e5dd7070Spatrick EHScopeStack::stable_iterator cleanup) {
1183e5dd7070Spatrick // If we needed an EH block for any reason, that counts.
1184e5dd7070Spatrick if (EHStack.find(cleanup)->hasEHBranches())
1185e5dd7070Spatrick return true;
1186e5dd7070Spatrick
1187e5dd7070Spatrick // Check whether any enclosed cleanups were needed.
1188e5dd7070Spatrick for (EHScopeStack::stable_iterator
1189e5dd7070Spatrick i = EHStack.getInnermostEHScope(); i != cleanup; ) {
1190e5dd7070Spatrick assert(cleanup.strictlyEncloses(i));
1191e5dd7070Spatrick
1192e5dd7070Spatrick EHScope &scope = *EHStack.find(i);
1193e5dd7070Spatrick if (scope.hasEHBranches())
1194e5dd7070Spatrick return true;
1195e5dd7070Spatrick
1196e5dd7070Spatrick i = scope.getEnclosingEHScope();
1197e5dd7070Spatrick }
1198e5dd7070Spatrick
1199e5dd7070Spatrick return false;
1200e5dd7070Spatrick }
1201e5dd7070Spatrick
1202e5dd7070Spatrick enum ForActivation_t {
1203e5dd7070Spatrick ForActivation,
1204e5dd7070Spatrick ForDeactivation
1205e5dd7070Spatrick };
1206e5dd7070Spatrick
1207e5dd7070Spatrick /// The given cleanup block is changing activation state. Configure a
1208e5dd7070Spatrick /// cleanup variable if necessary.
1209e5dd7070Spatrick ///
1210e5dd7070Spatrick /// It would be good if we had some way of determining if there were
1211e5dd7070Spatrick /// extra uses *after* the change-over point.
SetupCleanupBlockActivation(CodeGenFunction & CGF,EHScopeStack::stable_iterator C,ForActivation_t kind,llvm::Instruction * dominatingIP)1212e5dd7070Spatrick static void SetupCleanupBlockActivation(CodeGenFunction &CGF,
1213e5dd7070Spatrick EHScopeStack::stable_iterator C,
1214e5dd7070Spatrick ForActivation_t kind,
1215e5dd7070Spatrick llvm::Instruction *dominatingIP) {
1216e5dd7070Spatrick EHCleanupScope &Scope = cast<EHCleanupScope>(*CGF.EHStack.find(C));
1217e5dd7070Spatrick
1218e5dd7070Spatrick // We always need the flag if we're activating the cleanup in a
1219e5dd7070Spatrick // conditional context, because we have to assume that the current
1220e5dd7070Spatrick // location doesn't necessarily dominate the cleanup's code.
1221e5dd7070Spatrick bool isActivatedInConditional =
1222e5dd7070Spatrick (kind == ForActivation && CGF.isInConditionalBranch());
1223e5dd7070Spatrick
1224e5dd7070Spatrick bool needFlag = false;
1225e5dd7070Spatrick
1226e5dd7070Spatrick // Calculate whether the cleanup was used:
1227e5dd7070Spatrick
1228e5dd7070Spatrick // - as a normal cleanup
1229e5dd7070Spatrick if (Scope.isNormalCleanup() &&
1230e5dd7070Spatrick (isActivatedInConditional || IsUsedAsNormalCleanup(CGF.EHStack, C))) {
1231e5dd7070Spatrick Scope.setTestFlagInNormalCleanup();
1232e5dd7070Spatrick needFlag = true;
1233e5dd7070Spatrick }
1234e5dd7070Spatrick
1235e5dd7070Spatrick // - as an EH cleanup
1236e5dd7070Spatrick if (Scope.isEHCleanup() &&
1237e5dd7070Spatrick (isActivatedInConditional || IsUsedAsEHCleanup(CGF.EHStack, C))) {
1238e5dd7070Spatrick Scope.setTestFlagInEHCleanup();
1239e5dd7070Spatrick needFlag = true;
1240e5dd7070Spatrick }
1241e5dd7070Spatrick
1242e5dd7070Spatrick // If it hasn't yet been used as either, we're done.
1243e5dd7070Spatrick if (!needFlag) return;
1244e5dd7070Spatrick
1245e5dd7070Spatrick Address var = Scope.getActiveFlag();
1246e5dd7070Spatrick if (!var.isValid()) {
1247e5dd7070Spatrick var = CGF.CreateTempAlloca(CGF.Builder.getInt1Ty(), CharUnits::One(),
1248e5dd7070Spatrick "cleanup.isactive");
1249e5dd7070Spatrick Scope.setActiveFlag(var);
1250e5dd7070Spatrick
1251e5dd7070Spatrick assert(dominatingIP && "no existing variable and no dominating IP!");
1252e5dd7070Spatrick
1253e5dd7070Spatrick // Initialize to true or false depending on whether it was
1254e5dd7070Spatrick // active up to this point.
1255e5dd7070Spatrick llvm::Constant *value = CGF.Builder.getInt1(kind == ForDeactivation);
1256e5dd7070Spatrick
1257e5dd7070Spatrick // If we're in a conditional block, ignore the dominating IP and
1258e5dd7070Spatrick // use the outermost conditional branch.
1259e5dd7070Spatrick if (CGF.isInConditionalBranch()) {
1260e5dd7070Spatrick CGF.setBeforeOutermostConditional(value, var);
1261e5dd7070Spatrick } else {
1262e5dd7070Spatrick createStoreInstBefore(value, var, dominatingIP);
1263e5dd7070Spatrick }
1264e5dd7070Spatrick }
1265e5dd7070Spatrick
1266e5dd7070Spatrick CGF.Builder.CreateStore(CGF.Builder.getInt1(kind == ForActivation), var);
1267e5dd7070Spatrick }
1268e5dd7070Spatrick
1269e5dd7070Spatrick /// Activate a cleanup that was created in an inactivated state.
ActivateCleanupBlock(EHScopeStack::stable_iterator C,llvm::Instruction * dominatingIP)1270e5dd7070Spatrick void CodeGenFunction::ActivateCleanupBlock(EHScopeStack::stable_iterator C,
1271e5dd7070Spatrick llvm::Instruction *dominatingIP) {
1272e5dd7070Spatrick assert(C != EHStack.stable_end() && "activating bottom of stack?");
1273e5dd7070Spatrick EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(C));
1274e5dd7070Spatrick assert(!Scope.isActive() && "double activation");
1275e5dd7070Spatrick
1276e5dd7070Spatrick SetupCleanupBlockActivation(*this, C, ForActivation, dominatingIP);
1277e5dd7070Spatrick
1278e5dd7070Spatrick Scope.setActive(true);
1279e5dd7070Spatrick }
1280e5dd7070Spatrick
1281e5dd7070Spatrick /// Deactive a cleanup that was created in an active state.
DeactivateCleanupBlock(EHScopeStack::stable_iterator C,llvm::Instruction * dominatingIP)1282e5dd7070Spatrick void CodeGenFunction::DeactivateCleanupBlock(EHScopeStack::stable_iterator C,
1283e5dd7070Spatrick llvm::Instruction *dominatingIP) {
1284e5dd7070Spatrick assert(C != EHStack.stable_end() && "deactivating bottom of stack?");
1285e5dd7070Spatrick EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(C));
1286e5dd7070Spatrick assert(Scope.isActive() && "double deactivation");
1287e5dd7070Spatrick
1288e5dd7070Spatrick // If it's the top of the stack, just pop it, but do so only if it belongs
1289e5dd7070Spatrick // to the current RunCleanupsScope.
1290e5dd7070Spatrick if (C == EHStack.stable_begin() &&
1291e5dd7070Spatrick CurrentCleanupScopeDepth.strictlyEncloses(C)) {
1292a9ac8606Spatrick // Per comment below, checking EHAsynch is not really necessary
1293a9ac8606Spatrick // it's there to assure zero-impact w/o EHAsynch option
1294a9ac8606Spatrick if (!Scope.isNormalCleanup() && getLangOpts().EHAsynch) {
1295a9ac8606Spatrick PopCleanupBlock();
1296a9ac8606Spatrick } else {
1297e5dd7070Spatrick // If it's a normal cleanup, we need to pretend that the
1298e5dd7070Spatrick // fallthrough is unreachable.
1299e5dd7070Spatrick CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP();
1300e5dd7070Spatrick PopCleanupBlock();
1301e5dd7070Spatrick Builder.restoreIP(SavedIP);
1302a9ac8606Spatrick }
1303e5dd7070Spatrick return;
1304e5dd7070Spatrick }
1305e5dd7070Spatrick
1306e5dd7070Spatrick // Otherwise, follow the general case.
1307e5dd7070Spatrick SetupCleanupBlockActivation(*this, C, ForDeactivation, dominatingIP);
1308e5dd7070Spatrick
1309e5dd7070Spatrick Scope.setActive(false);
1310e5dd7070Spatrick }
1311e5dd7070Spatrick
getNormalCleanupDestSlot()1312e5dd7070Spatrick Address CodeGenFunction::getNormalCleanupDestSlot() {
1313e5dd7070Spatrick if (!NormalCleanupDest.isValid())
1314e5dd7070Spatrick NormalCleanupDest =
1315e5dd7070Spatrick CreateDefaultAlignTempAlloca(Builder.getInt32Ty(), "cleanup.dest.slot");
1316e5dd7070Spatrick return NormalCleanupDest;
1317e5dd7070Spatrick }
1318e5dd7070Spatrick
1319e5dd7070Spatrick /// Emits all the code to cause the given temporary to be cleaned up.
EmitCXXTemporary(const CXXTemporary * Temporary,QualType TempType,Address Ptr)1320e5dd7070Spatrick void CodeGenFunction::EmitCXXTemporary(const CXXTemporary *Temporary,
1321e5dd7070Spatrick QualType TempType,
1322e5dd7070Spatrick Address Ptr) {
1323e5dd7070Spatrick pushDestroy(NormalAndEHCleanup, Ptr, TempType, destroyCXXObject,
1324e5dd7070Spatrick /*useEHCleanup*/ true);
1325e5dd7070Spatrick }
1326a9ac8606Spatrick
1327a9ac8606Spatrick // Need to set "funclet" in OperandBundle properly for noThrow
1328a9ac8606Spatrick // intrinsic (see CGCall.cpp)
EmitSehScope(CodeGenFunction & CGF,llvm::FunctionCallee & SehCppScope)1329a9ac8606Spatrick static void EmitSehScope(CodeGenFunction &CGF,
1330a9ac8606Spatrick llvm::FunctionCallee &SehCppScope) {
1331a9ac8606Spatrick llvm::BasicBlock *InvokeDest = CGF.getInvokeDest();
1332a9ac8606Spatrick assert(CGF.Builder.GetInsertBlock() && InvokeDest);
1333a9ac8606Spatrick llvm::BasicBlock *Cont = CGF.createBasicBlock("invoke.cont");
1334a9ac8606Spatrick SmallVector<llvm::OperandBundleDef, 1> BundleList =
1335a9ac8606Spatrick CGF.getBundlesForFunclet(SehCppScope.getCallee());
1336a9ac8606Spatrick if (CGF.CurrentFuncletPad)
1337a9ac8606Spatrick BundleList.emplace_back("funclet", CGF.CurrentFuncletPad);
1338*12c85518Srobert CGF.Builder.CreateInvoke(SehCppScope, Cont, InvokeDest, std::nullopt,
1339*12c85518Srobert BundleList);
1340a9ac8606Spatrick CGF.EmitBlock(Cont);
1341a9ac8606Spatrick }
1342a9ac8606Spatrick
1343a9ac8606Spatrick // Invoke a llvm.seh.scope.begin at the beginning of a CPP scope for -EHa
EmitSehCppScopeBegin()1344a9ac8606Spatrick void CodeGenFunction::EmitSehCppScopeBegin() {
1345a9ac8606Spatrick assert(getLangOpts().EHAsynch);
1346a9ac8606Spatrick llvm::FunctionType *FTy =
1347a9ac8606Spatrick llvm::FunctionType::get(CGM.VoidTy, /*isVarArg=*/false);
1348a9ac8606Spatrick llvm::FunctionCallee SehCppScope =
1349a9ac8606Spatrick CGM.CreateRuntimeFunction(FTy, "llvm.seh.scope.begin");
1350a9ac8606Spatrick EmitSehScope(*this, SehCppScope);
1351a9ac8606Spatrick }
1352a9ac8606Spatrick
1353a9ac8606Spatrick // Invoke a llvm.seh.scope.end at the end of a CPP scope for -EHa
1354a9ac8606Spatrick // llvm.seh.scope.end is emitted before popCleanup, so it's "invoked"
EmitSehCppScopeEnd()1355a9ac8606Spatrick void CodeGenFunction::EmitSehCppScopeEnd() {
1356a9ac8606Spatrick assert(getLangOpts().EHAsynch);
1357a9ac8606Spatrick llvm::FunctionType *FTy =
1358a9ac8606Spatrick llvm::FunctionType::get(CGM.VoidTy, /*isVarArg=*/false);
1359a9ac8606Spatrick llvm::FunctionCallee SehCppScope =
1360a9ac8606Spatrick CGM.CreateRuntimeFunction(FTy, "llvm.seh.scope.end");
1361a9ac8606Spatrick EmitSehScope(*this, SehCppScope);
1362a9ac8606Spatrick }
1363a9ac8606Spatrick
1364a9ac8606Spatrick // Invoke a llvm.seh.try.begin at the beginning of a SEH scope for -EHa
EmitSehTryScopeBegin()1365a9ac8606Spatrick void CodeGenFunction::EmitSehTryScopeBegin() {
1366a9ac8606Spatrick assert(getLangOpts().EHAsynch);
1367a9ac8606Spatrick llvm::FunctionType *FTy =
1368a9ac8606Spatrick llvm::FunctionType::get(CGM.VoidTy, /*isVarArg=*/false);
1369a9ac8606Spatrick llvm::FunctionCallee SehCppScope =
1370a9ac8606Spatrick CGM.CreateRuntimeFunction(FTy, "llvm.seh.try.begin");
1371a9ac8606Spatrick EmitSehScope(*this, SehCppScope);
1372a9ac8606Spatrick }
1373a9ac8606Spatrick
1374a9ac8606Spatrick // Invoke a llvm.seh.try.end at the end of a SEH scope for -EHa
EmitSehTryScopeEnd()1375a9ac8606Spatrick void CodeGenFunction::EmitSehTryScopeEnd() {
1376a9ac8606Spatrick assert(getLangOpts().EHAsynch);
1377a9ac8606Spatrick llvm::FunctionType *FTy =
1378a9ac8606Spatrick llvm::FunctionType::get(CGM.VoidTy, /*isVarArg=*/false);
1379a9ac8606Spatrick llvm::FunctionCallee SehCppScope =
1380a9ac8606Spatrick CGM.CreateRuntimeFunction(FTy, "llvm.seh.try.end");
1381a9ac8606Spatrick EmitSehScope(*this, SehCppScope);
1382a9ac8606Spatrick }
1383