xref: /llvm-project/clang/lib/CodeGen/CGCleanup.cpp (revision 4424c44c8c4ec8e071f5c5999fba216d36fb92c9)
1 //===--- CGCleanup.cpp - Bookkeeping and code emission for cleanups -------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file contains code dealing with the IR generation for cleanups
10 // and related information.
11 //
12 // A "cleanup" is a piece of code which needs to be executed whenever
13 // control transfers out of a particular scope.  This can be
14 // conditionalized to occur only on exceptional control flow, only on
15 // normal control flow, or both.
16 //
17 //===----------------------------------------------------------------------===//
18 
19 #include "CGCleanup.h"
20 #include "CodeGenFunction.h"
21 #include "llvm/Support/SaveAndRestore.h"
22 
23 using namespace clang;
24 using namespace CodeGen;
25 
26 bool DominatingValue<RValue>::saved_type::needsSaving(RValue rv) {
27   if (rv.isScalar())
28     return DominatingLLVMValue::needsSaving(rv.getScalarVal());
29   if (rv.isAggregate())
30     return DominatingValue<Address>::needsSaving(rv.getAggregateAddress());
31   return true;
32 }
33 
34 DominatingValue<RValue>::saved_type
35 DominatingValue<RValue>::saved_type::save(CodeGenFunction &CGF, RValue rv) {
36   if (rv.isScalar()) {
37     llvm::Value *V = rv.getScalarVal();
38     return saved_type(DominatingLLVMValue::save(CGF, V),
39                       DominatingLLVMValue::needsSaving(V) ? ScalarAddress
40                                                           : ScalarLiteral);
41   }
42 
43   if (rv.isComplex()) {
44     CodeGenFunction::ComplexPairTy V = rv.getComplexVal();
45     return saved_type(DominatingLLVMValue::save(CGF, V.first),
46                       DominatingLLVMValue::save(CGF, V.second));
47   }
48 
49   assert(rv.isAggregate());
50   Address V = rv.getAggregateAddress();
51   return saved_type(DominatingValue<Address>::save(CGF, V),
52                     DominatingValue<Address>::needsSaving(V)
53                         ? AggregateAddress
54                         : AggregateLiteral);
55 }
56 
57 /// Given a saved r-value produced by SaveRValue, perform the code
58 /// necessary to restore it to usability at the current insertion
59 /// point.
60 RValue DominatingValue<RValue>::saved_type::restore(CodeGenFunction &CGF) {
61   switch (K) {
62   case ScalarLiteral:
63   case ScalarAddress:
64     return RValue::get(DominatingLLVMValue::restore(CGF, Vals.first));
65   case AggregateLiteral:
66   case AggregateAddress:
67     return RValue::getAggregate(
68         DominatingValue<Address>::restore(CGF, AggregateAddr));
69   case ComplexAddress: {
70     llvm::Value *real = DominatingLLVMValue::restore(CGF, Vals.first);
71     llvm::Value *imag = DominatingLLVMValue::restore(CGF, Vals.second);
72     return RValue::getComplex(real, imag);
73   }
74   }
75 
76   llvm_unreachable("bad saved r-value kind");
77 }
78 
79 /// Push an entry of the given size onto this protected-scope stack.
80 char *EHScopeStack::allocate(size_t Size) {
81   Size = llvm::alignTo(Size, ScopeStackAlignment);
82   if (!StartOfBuffer) {
83     unsigned Capacity = 1024;
84     while (Capacity < Size) Capacity *= 2;
85     StartOfBuffer = new char[Capacity];
86     StartOfData = EndOfBuffer = StartOfBuffer + Capacity;
87   } else if (static_cast<size_t>(StartOfData - StartOfBuffer) < Size) {
88     unsigned CurrentCapacity = EndOfBuffer - StartOfBuffer;
89     unsigned UsedCapacity = CurrentCapacity - (StartOfData - StartOfBuffer);
90 
91     unsigned NewCapacity = CurrentCapacity;
92     do {
93       NewCapacity *= 2;
94     } while (NewCapacity < UsedCapacity + Size);
95 
96     char *NewStartOfBuffer = new char[NewCapacity];
97     char *NewEndOfBuffer = NewStartOfBuffer + NewCapacity;
98     char *NewStartOfData = NewEndOfBuffer - UsedCapacity;
99     memcpy(NewStartOfData, StartOfData, UsedCapacity);
100     delete [] StartOfBuffer;
101     StartOfBuffer = NewStartOfBuffer;
102     EndOfBuffer = NewEndOfBuffer;
103     StartOfData = NewStartOfData;
104   }
105 
106   assert(StartOfBuffer + Size <= StartOfData);
107   StartOfData -= Size;
108   return StartOfData;
109 }
110 
111 void EHScopeStack::deallocate(size_t Size) {
112   StartOfData += llvm::alignTo(Size, ScopeStackAlignment);
113 }
114 
115 bool EHScopeStack::containsOnlyNoopCleanups(
116     EHScopeStack::stable_iterator Old) const {
117   for (EHScopeStack::iterator it = begin(); stabilize(it) != Old; it++) {
118     EHCleanupScope *cleanup = dyn_cast<EHCleanupScope>(&*it);
119     // If this is anything other than a lifetime marker or fake use cleanup,
120     // then the scope stack does not contain only noop cleanups.
121     if (!cleanup)
122       return false;
123     if (!cleanup->isLifetimeMarker() && !cleanup->isFakeUse())
124       return false;
125   }
126 
127   return true;
128 }
129 
130 bool EHScopeStack::requiresLandingPad() const {
131   for (stable_iterator si = getInnermostEHScope(); si != stable_end(); ) {
132     // Skip lifetime markers.
133     if (auto *cleanup = dyn_cast<EHCleanupScope>(&*find(si)))
134       if (cleanup->isLifetimeMarker()) {
135         si = cleanup->getEnclosingEHScope();
136         continue;
137       }
138     return true;
139   }
140 
141   return false;
142 }
143 
144 EHScopeStack::stable_iterator
145 EHScopeStack::getInnermostActiveNormalCleanup() const {
146   for (stable_iterator si = getInnermostNormalCleanup(), se = stable_end();
147          si != se; ) {
148     EHCleanupScope &cleanup = cast<EHCleanupScope>(*find(si));
149     if (cleanup.isActive()) return si;
150     si = cleanup.getEnclosingNormalCleanup();
151   }
152   return stable_end();
153 }
154 
155 
156 void *EHScopeStack::pushCleanup(CleanupKind Kind, size_t Size) {
157   char *Buffer = allocate(EHCleanupScope::getSizeForCleanupSize(Size));
158   bool IsNormalCleanup = Kind & NormalCleanup;
159   bool IsEHCleanup = Kind & EHCleanup;
160   bool IsLifetimeMarker = Kind & LifetimeMarker;
161   bool IsFakeUse = Kind & FakeUse;
162 
163   // Per C++ [except.terminate], it is implementation-defined whether none,
164   // some, or all cleanups are called before std::terminate. Thus, when
165   // terminate is the current EH scope, we may skip adding any EH cleanup
166   // scopes.
167   if (InnermostEHScope != stable_end() &&
168       find(InnermostEHScope)->getKind() == EHScope::Terminate)
169     IsEHCleanup = false;
170 
171   EHCleanupScope *Scope =
172     new (Buffer) EHCleanupScope(IsNormalCleanup,
173                                 IsEHCleanup,
174                                 Size,
175                                 BranchFixups.size(),
176                                 InnermostNormalCleanup,
177                                 InnermostEHScope);
178   if (IsNormalCleanup)
179     InnermostNormalCleanup = stable_begin();
180   if (IsEHCleanup)
181     InnermostEHScope = stable_begin();
182   if (IsLifetimeMarker)
183     Scope->setLifetimeMarker();
184   if (IsFakeUse)
185     Scope->setFakeUse();
186 
187   // With Windows -EHa, Invoke llvm.seh.scope.begin() for EHCleanup
188   // If exceptions are disabled/ignored and SEH is not in use, then there is no
189   // invoke destination. SEH "works" even if exceptions are off. In practice,
190   // this means that C++ destructors and other EH cleanups don't run, which is
191   // consistent with MSVC's behavior, except in the presence of -EHa.
192   // Check getInvokeDest() to generate llvm.seh.scope.begin() as needed.
193   if (CGF->getLangOpts().EHAsynch && IsEHCleanup && !IsLifetimeMarker &&
194       CGF->getTarget().getCXXABI().isMicrosoft() && CGF->getInvokeDest())
195     CGF->EmitSehCppScopeBegin();
196 
197   return Scope->getCleanupBuffer();
198 }
199 
200 void EHScopeStack::popCleanup() {
201   assert(!empty() && "popping exception stack when not empty");
202 
203   assert(isa<EHCleanupScope>(*begin()));
204   EHCleanupScope &Cleanup = cast<EHCleanupScope>(*begin());
205   InnermostNormalCleanup = Cleanup.getEnclosingNormalCleanup();
206   InnermostEHScope = Cleanup.getEnclosingEHScope();
207   deallocate(Cleanup.getAllocatedSize());
208 
209   // Destroy the cleanup.
210   Cleanup.Destroy();
211 
212   // Check whether we can shrink the branch-fixups stack.
213   if (!BranchFixups.empty()) {
214     // If we no longer have any normal cleanups, all the fixups are
215     // complete.
216     if (!hasNormalCleanups())
217       BranchFixups.clear();
218 
219     // Otherwise we can still trim out unnecessary nulls.
220     else
221       popNullFixups();
222   }
223 }
224 
225 EHFilterScope *EHScopeStack::pushFilter(unsigned numFilters) {
226   assert(getInnermostEHScope() == stable_end());
227   char *buffer = allocate(EHFilterScope::getSizeForNumFilters(numFilters));
228   EHFilterScope *filter = new (buffer) EHFilterScope(numFilters);
229   InnermostEHScope = stable_begin();
230   return filter;
231 }
232 
233 void EHScopeStack::popFilter() {
234   assert(!empty() && "popping exception stack when not empty");
235 
236   EHFilterScope &filter = cast<EHFilterScope>(*begin());
237   deallocate(EHFilterScope::getSizeForNumFilters(filter.getNumFilters()));
238 
239   InnermostEHScope = filter.getEnclosingEHScope();
240 }
241 
242 EHCatchScope *EHScopeStack::pushCatch(unsigned numHandlers) {
243   char *buffer = allocate(EHCatchScope::getSizeForNumHandlers(numHandlers));
244   EHCatchScope *scope =
245     new (buffer) EHCatchScope(numHandlers, InnermostEHScope);
246   InnermostEHScope = stable_begin();
247   return scope;
248 }
249 
250 void EHScopeStack::pushTerminate() {
251   char *Buffer = allocate(EHTerminateScope::getSize());
252   new (Buffer) EHTerminateScope(InnermostEHScope);
253   InnermostEHScope = stable_begin();
254 }
255 
256 /// Remove any 'null' fixups on the stack.  However, we can't pop more
257 /// fixups than the fixup depth on the innermost normal cleanup, or
258 /// else fixups that we try to add to that cleanup will end up in the
259 /// wrong place.  We *could* try to shrink fixup depths, but that's
260 /// actually a lot of work for little benefit.
261 void EHScopeStack::popNullFixups() {
262   // We expect this to only be called when there's still an innermost
263   // normal cleanup;  otherwise there really shouldn't be any fixups.
264   assert(hasNormalCleanups());
265 
266   EHScopeStack::iterator it = find(InnermostNormalCleanup);
267   unsigned MinSize = cast<EHCleanupScope>(*it).getFixupDepth();
268   assert(BranchFixups.size() >= MinSize && "fixup stack out of order");
269 
270   while (BranchFixups.size() > MinSize &&
271          BranchFixups.back().Destination == nullptr)
272     BranchFixups.pop_back();
273 }
274 
275 RawAddress CodeGenFunction::createCleanupActiveFlag() {
276   // Create a variable to decide whether the cleanup needs to be run.
277   RawAddress active = CreateTempAllocaWithoutCast(
278       Builder.getInt1Ty(), CharUnits::One(), "cleanup.cond");
279 
280   // Initialize it to false at a site that's guaranteed to be run
281   // before each evaluation.
282   setBeforeOutermostConditional(Builder.getFalse(), active, *this);
283 
284   // Initialize it to true at the current location.
285   Builder.CreateStore(Builder.getTrue(), active);
286 
287   return active;
288 }
289 
290 void CodeGenFunction::initFullExprCleanupWithFlag(RawAddress ActiveFlag) {
291   // Set that as the active flag in the cleanup.
292   EHCleanupScope &cleanup = cast<EHCleanupScope>(*EHStack.begin());
293   assert(!cleanup.hasActiveFlag() && "cleanup already has active flag?");
294   cleanup.setActiveFlag(ActiveFlag);
295 
296   if (cleanup.isNormalCleanup()) cleanup.setTestFlagInNormalCleanup();
297   if (cleanup.isEHCleanup()) cleanup.setTestFlagInEHCleanup();
298 }
299 
300 void EHScopeStack::Cleanup::anchor() {}
301 
302 static void createStoreInstBefore(llvm::Value *value, Address addr,
303                                   llvm::BasicBlock::iterator beforeInst,
304                                   CodeGenFunction &CGF) {
305   auto store = new llvm::StoreInst(value, addr.emitRawPointer(CGF), beforeInst);
306   store->setAlignment(addr.getAlignment().getAsAlign());
307 }
308 
309 static llvm::LoadInst *
310 createLoadInstBefore(Address addr, const Twine &name,
311                      llvm::BasicBlock::iterator beforeInst,
312                      CodeGenFunction &CGF) {
313   return new llvm::LoadInst(addr.getElementType(), addr.emitRawPointer(CGF),
314                             name, false, addr.getAlignment().getAsAlign(),
315                             beforeInst);
316 }
317 
318 static llvm::LoadInst *createLoadInstBefore(Address addr, const Twine &name,
319                                             CodeGenFunction &CGF) {
320   return new llvm::LoadInst(addr.getElementType(), addr.emitRawPointer(CGF),
321                             name, false, addr.getAlignment().getAsAlign());
322 }
323 
324 /// All the branch fixups on the EH stack have propagated out past the
325 /// outermost normal cleanup; resolve them all by adding cases to the
326 /// given switch instruction.
327 static void ResolveAllBranchFixups(CodeGenFunction &CGF,
328                                    llvm::SwitchInst *Switch,
329                                    llvm::BasicBlock *CleanupEntry) {
330   llvm::SmallPtrSet<llvm::BasicBlock*, 4> CasesAdded;
331 
332   for (unsigned I = 0, E = CGF.EHStack.getNumBranchFixups(); I != E; ++I) {
333     // Skip this fixup if its destination isn't set.
334     BranchFixup &Fixup = CGF.EHStack.getBranchFixup(I);
335     if (Fixup.Destination == nullptr) continue;
336 
337     // If there isn't an OptimisticBranchBlock, then InitialBranch is
338     // still pointing directly to its destination; forward it to the
339     // appropriate cleanup entry.  This is required in the specific
340     // case of
341     //   { std::string s; goto lbl; }
342     //   lbl:
343     // i.e. where there's an unresolved fixup inside a single cleanup
344     // entry which we're currently popping.
345     if (Fixup.OptimisticBranchBlock == nullptr) {
346       createStoreInstBefore(CGF.Builder.getInt32(Fixup.DestinationIndex),
347                             CGF.getNormalCleanupDestSlot(),
348                             Fixup.InitialBranch->getIterator(), CGF);
349       Fixup.InitialBranch->setSuccessor(0, CleanupEntry);
350     }
351 
352     // Don't add this case to the switch statement twice.
353     if (!CasesAdded.insert(Fixup.Destination).second)
354       continue;
355 
356     Switch->addCase(CGF.Builder.getInt32(Fixup.DestinationIndex),
357                     Fixup.Destination);
358   }
359 
360   CGF.EHStack.clearFixups();
361 }
362 
363 /// Transitions the terminator of the given exit-block of a cleanup to
364 /// be a cleanup switch.
365 static llvm::SwitchInst *TransitionToCleanupSwitch(CodeGenFunction &CGF,
366                                                    llvm::BasicBlock *Block) {
367   // If it's a branch, turn it into a switch whose default
368   // destination is its original target.
369   llvm::Instruction *Term = Block->getTerminator();
370   assert(Term && "can't transition block without terminator");
371 
372   if (llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Term)) {
373     assert(Br->isUnconditional());
374     auto Load = createLoadInstBefore(CGF.getNormalCleanupDestSlot(),
375                                      "cleanup.dest", Term->getIterator(), CGF);
376     llvm::SwitchInst *Switch =
377       llvm::SwitchInst::Create(Load, Br->getSuccessor(0), 4, Block);
378     Br->eraseFromParent();
379     return Switch;
380   } else {
381     return cast<llvm::SwitchInst>(Term);
382   }
383 }
384 
385 void CodeGenFunction::ResolveBranchFixups(llvm::BasicBlock *Block) {
386   assert(Block && "resolving a null target block");
387   if (!EHStack.getNumBranchFixups()) return;
388 
389   assert(EHStack.hasNormalCleanups() &&
390          "branch fixups exist with no normal cleanups on stack");
391 
392   llvm::SmallPtrSet<llvm::BasicBlock*, 4> ModifiedOptimisticBlocks;
393   bool ResolvedAny = false;
394 
395   for (unsigned I = 0, E = EHStack.getNumBranchFixups(); I != E; ++I) {
396     // Skip this fixup if its destination doesn't match.
397     BranchFixup &Fixup = EHStack.getBranchFixup(I);
398     if (Fixup.Destination != Block) continue;
399 
400     Fixup.Destination = nullptr;
401     ResolvedAny = true;
402 
403     // If it doesn't have an optimistic branch block, LatestBranch is
404     // already pointing to the right place.
405     llvm::BasicBlock *BranchBB = Fixup.OptimisticBranchBlock;
406     if (!BranchBB)
407       continue;
408 
409     // Don't process the same optimistic branch block twice.
410     if (!ModifiedOptimisticBlocks.insert(BranchBB).second)
411       continue;
412 
413     llvm::SwitchInst *Switch = TransitionToCleanupSwitch(*this, BranchBB);
414 
415     // Add a case to the switch.
416     Switch->addCase(Builder.getInt32(Fixup.DestinationIndex), Block);
417   }
418 
419   if (ResolvedAny)
420     EHStack.popNullFixups();
421 }
422 
423 /// Pops cleanup blocks until the given savepoint is reached.
424 void CodeGenFunction::PopCleanupBlocks(
425     EHScopeStack::stable_iterator Old,
426     std::initializer_list<llvm::Value **> ValuesToReload) {
427   assert(Old.isValid());
428 
429   bool HadBranches = false;
430   while (EHStack.stable_begin() != Old) {
431     EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.begin());
432     HadBranches |= Scope.hasBranches();
433 
434     // As long as Old strictly encloses the scope's enclosing normal
435     // cleanup, we're going to emit another normal cleanup which
436     // fallthrough can propagate through.
437     bool FallThroughIsBranchThrough =
438       Old.strictlyEncloses(Scope.getEnclosingNormalCleanup());
439 
440     PopCleanupBlock(FallThroughIsBranchThrough);
441   }
442 
443   // If we didn't have any branches, the insertion point before cleanups must
444   // dominate the current insertion point and we don't need to reload any
445   // values.
446   if (!HadBranches)
447     return;
448 
449   // Spill and reload all values that the caller wants to be live at the current
450   // insertion point.
451   for (llvm::Value **ReloadedValue : ValuesToReload) {
452     auto *Inst = dyn_cast_or_null<llvm::Instruction>(*ReloadedValue);
453     if (!Inst)
454       continue;
455 
456     // Don't spill static allocas, they dominate all cleanups. These are created
457     // by binding a reference to a local variable or temporary.
458     auto *AI = dyn_cast<llvm::AllocaInst>(Inst);
459     if (AI && AI->isStaticAlloca())
460       continue;
461 
462     Address Tmp =
463         CreateDefaultAlignTempAlloca(Inst->getType(), "tmp.exprcleanup");
464 
465     // Find an insertion point after Inst and spill it to the temporary.
466     llvm::BasicBlock::iterator InsertBefore;
467     if (auto *Invoke = dyn_cast<llvm::InvokeInst>(Inst))
468       InsertBefore = Invoke->getNormalDest()->getFirstInsertionPt();
469     else
470       InsertBefore = std::next(Inst->getIterator());
471     CGBuilderTy(CGM, &*InsertBefore).CreateStore(Inst, Tmp);
472 
473     // Reload the value at the current insertion point.
474     *ReloadedValue = Builder.CreateLoad(Tmp);
475   }
476 }
477 
478 /// Pops cleanup blocks until the given savepoint is reached, then add the
479 /// cleanups from the given savepoint in the lifetime-extended cleanups stack.
480 void CodeGenFunction::PopCleanupBlocks(
481     EHScopeStack::stable_iterator Old, size_t OldLifetimeExtendedSize,
482     std::initializer_list<llvm::Value **> ValuesToReload) {
483   PopCleanupBlocks(Old, ValuesToReload);
484 
485   // Move our deferred cleanups onto the EH stack.
486   for (size_t I = OldLifetimeExtendedSize,
487               E = LifetimeExtendedCleanupStack.size(); I != E; /**/) {
488     // Alignment should be guaranteed by the vptrs in the individual cleanups.
489     assert((I % alignof(LifetimeExtendedCleanupHeader) == 0) &&
490            "misaligned cleanup stack entry");
491 
492     LifetimeExtendedCleanupHeader &Header =
493         reinterpret_cast<LifetimeExtendedCleanupHeader&>(
494             LifetimeExtendedCleanupStack[I]);
495     I += sizeof(Header);
496 
497     EHStack.pushCopyOfCleanup(Header.getKind(),
498                               &LifetimeExtendedCleanupStack[I],
499                               Header.getSize());
500     I += Header.getSize();
501 
502     if (Header.isConditional()) {
503       RawAddress ActiveFlag =
504           reinterpret_cast<RawAddress &>(LifetimeExtendedCleanupStack[I]);
505       initFullExprCleanupWithFlag(ActiveFlag);
506       I += sizeof(ActiveFlag);
507     }
508   }
509   LifetimeExtendedCleanupStack.resize(OldLifetimeExtendedSize);
510 }
511 
512 static llvm::BasicBlock *CreateNormalEntry(CodeGenFunction &CGF,
513                                            EHCleanupScope &Scope) {
514   assert(Scope.isNormalCleanup());
515   llvm::BasicBlock *Entry = Scope.getNormalBlock();
516   if (!Entry) {
517     Entry = CGF.createBasicBlock("cleanup");
518     Scope.setNormalBlock(Entry);
519   }
520   return Entry;
521 }
522 
523 /// Attempts to reduce a cleanup's entry block to a fallthrough.  This
524 /// is basically llvm::MergeBlockIntoPredecessor, except
525 /// simplified/optimized for the tighter constraints on cleanup blocks.
526 ///
527 /// Returns the new block, whatever it is.
528 static llvm::BasicBlock *SimplifyCleanupEntry(CodeGenFunction &CGF,
529                                               llvm::BasicBlock *Entry) {
530   llvm::BasicBlock *Pred = Entry->getSinglePredecessor();
531   if (!Pred) return Entry;
532 
533   llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Pred->getTerminator());
534   if (!Br || Br->isConditional()) return Entry;
535   assert(Br->getSuccessor(0) == Entry);
536 
537   // If we were previously inserting at the end of the cleanup entry
538   // block, we'll need to continue inserting at the end of the
539   // predecessor.
540   bool WasInsertBlock = CGF.Builder.GetInsertBlock() == Entry;
541   assert(!WasInsertBlock || CGF.Builder.GetInsertPoint() == Entry->end());
542 
543   // Kill the branch.
544   Br->eraseFromParent();
545 
546   // Replace all uses of the entry with the predecessor, in case there
547   // are phis in the cleanup.
548   Entry->replaceAllUsesWith(Pred);
549 
550   // Merge the blocks.
551   Pred->splice(Pred->end(), Entry);
552 
553   // Kill the entry block.
554   Entry->eraseFromParent();
555 
556   if (WasInsertBlock)
557     CGF.Builder.SetInsertPoint(Pred);
558 
559   return Pred;
560 }
561 
562 static void EmitCleanup(CodeGenFunction &CGF,
563                         EHScopeStack::Cleanup *Fn,
564                         EHScopeStack::Cleanup::Flags flags,
565                         Address ActiveFlag) {
566   // If there's an active flag, load it and skip the cleanup if it's
567   // false.
568   llvm::BasicBlock *ContBB = nullptr;
569   if (ActiveFlag.isValid()) {
570     ContBB = CGF.createBasicBlock("cleanup.done");
571     llvm::BasicBlock *CleanupBB = CGF.createBasicBlock("cleanup.action");
572     llvm::Value *IsActive
573       = CGF.Builder.CreateLoad(ActiveFlag, "cleanup.is_active");
574     CGF.Builder.CreateCondBr(IsActive, CleanupBB, ContBB);
575     CGF.EmitBlock(CleanupBB);
576   }
577 
578   // Ask the cleanup to emit itself.
579   Fn->Emit(CGF, flags);
580   assert(CGF.HaveInsertPoint() && "cleanup ended with no insertion point?");
581 
582   // Emit the continuation block if there was an active flag.
583   if (ActiveFlag.isValid())
584     CGF.EmitBlock(ContBB);
585 }
586 
587 static void ForwardPrebranchedFallthrough(llvm::BasicBlock *Exit,
588                                           llvm::BasicBlock *From,
589                                           llvm::BasicBlock *To) {
590   // Exit is the exit block of a cleanup, so it always terminates in
591   // an unconditional branch or a switch.
592   llvm::Instruction *Term = Exit->getTerminator();
593 
594   if (llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Term)) {
595     assert(Br->isUnconditional() && Br->getSuccessor(0) == From);
596     Br->setSuccessor(0, To);
597   } else {
598     llvm::SwitchInst *Switch = cast<llvm::SwitchInst>(Term);
599     for (unsigned I = 0, E = Switch->getNumSuccessors(); I != E; ++I)
600       if (Switch->getSuccessor(I) == From)
601         Switch->setSuccessor(I, To);
602   }
603 }
604 
605 /// We don't need a normal entry block for the given cleanup.
606 /// Optimistic fixup branches can cause these blocks to come into
607 /// existence anyway;  if so, destroy it.
608 ///
609 /// The validity of this transformation is very much specific to the
610 /// exact ways in which we form branches to cleanup entries.
611 static void destroyOptimisticNormalEntry(CodeGenFunction &CGF,
612                                          EHCleanupScope &scope) {
613   llvm::BasicBlock *entry = scope.getNormalBlock();
614   if (!entry) return;
615 
616   // Replace all the uses with unreachable.
617   llvm::BasicBlock *unreachableBB = CGF.getUnreachableBlock();
618   for (llvm::BasicBlock::use_iterator
619          i = entry->use_begin(), e = entry->use_end(); i != e; ) {
620     llvm::Use &use = *i;
621     ++i;
622 
623     use.set(unreachableBB);
624 
625     // The only uses should be fixup switches.
626     llvm::SwitchInst *si = cast<llvm::SwitchInst>(use.getUser());
627     if (si->getNumCases() == 1 && si->getDefaultDest() == unreachableBB) {
628       // Replace the switch with a branch.
629       llvm::BranchInst::Create(si->case_begin()->getCaseSuccessor(),
630                                si->getIterator());
631 
632       // The switch operand is a load from the cleanup-dest alloca.
633       llvm::LoadInst *condition = cast<llvm::LoadInst>(si->getCondition());
634 
635       // Destroy the switch.
636       si->eraseFromParent();
637 
638       // Destroy the load.
639       assert(condition->getOperand(0) == CGF.NormalCleanupDest.getPointer());
640       assert(condition->use_empty());
641       condition->eraseFromParent();
642     }
643   }
644 
645   assert(entry->use_empty());
646   delete entry;
647 }
648 
649 /// Pops a cleanup block.  If the block includes a normal cleanup, the
650 /// current insertion point is threaded through the cleanup, as are
651 /// any branch fixups on the cleanup.
652 void CodeGenFunction::PopCleanupBlock(bool FallthroughIsBranchThrough,
653                                       bool ForDeactivation) {
654   assert(!EHStack.empty() && "cleanup stack is empty!");
655   assert(isa<EHCleanupScope>(*EHStack.begin()) && "top not a cleanup!");
656   EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.begin());
657   assert(Scope.getFixupDepth() <= EHStack.getNumBranchFixups());
658 
659   // If we are deactivating a normal cleanup, we need to pretend that the
660   // fallthrough is unreachable. We restore this IP before returning.
661   CGBuilderTy::InsertPoint NormalDeactivateOrigIP;
662   if (ForDeactivation && (Scope.isNormalCleanup() || !getLangOpts().EHAsynch)) {
663     NormalDeactivateOrigIP = Builder.saveAndClearIP();
664   }
665   // Remember activation information.
666   bool IsActive = Scope.isActive();
667   Address NormalActiveFlag =
668     Scope.shouldTestFlagInNormalCleanup() ? Scope.getActiveFlag()
669                                           : Address::invalid();
670   Address EHActiveFlag =
671     Scope.shouldTestFlagInEHCleanup() ? Scope.getActiveFlag()
672                                       : Address::invalid();
673 
674   // Check whether we need an EH cleanup.  This is only true if we've
675   // generated a lazy EH cleanup block.
676   llvm::BasicBlock *EHEntry = Scope.getCachedEHDispatchBlock();
677   assert(Scope.hasEHBranches() == (EHEntry != nullptr));
678   bool RequiresEHCleanup = (EHEntry != nullptr);
679   EHScopeStack::stable_iterator EHParent = Scope.getEnclosingEHScope();
680 
681   // Check the three conditions which might require a normal cleanup:
682 
683   // - whether there are branch fix-ups through this cleanup
684   unsigned FixupDepth = Scope.getFixupDepth();
685   bool HasFixups = EHStack.getNumBranchFixups() != FixupDepth;
686 
687   // - whether there are branch-throughs or branch-afters
688   bool HasExistingBranches = Scope.hasBranches();
689 
690   // - whether there's a fallthrough
691   llvm::BasicBlock *FallthroughSource = Builder.GetInsertBlock();
692   bool HasFallthrough =
693       FallthroughSource != nullptr && (IsActive || HasExistingBranches);
694 
695   // Branch-through fall-throughs leave the insertion point set to the
696   // end of the last cleanup, which points to the current scope.  The
697   // rest of IR gen doesn't need to worry about this; it only happens
698   // during the execution of PopCleanupBlocks().
699   bool HasPrebranchedFallthrough =
700     (FallthroughSource && FallthroughSource->getTerminator());
701 
702   // If this is a normal cleanup, then having a prebranched
703   // fallthrough implies that the fallthrough source unconditionally
704   // jumps here.
705   assert(!Scope.isNormalCleanup() || !HasPrebranchedFallthrough ||
706          (Scope.getNormalBlock() &&
707           FallthroughSource->getTerminator()->getSuccessor(0)
708             == Scope.getNormalBlock()));
709 
710   bool RequiresNormalCleanup = false;
711   if (Scope.isNormalCleanup() &&
712       (HasFixups || HasExistingBranches || HasFallthrough)) {
713     RequiresNormalCleanup = true;
714   }
715 
716   // If we have a prebranched fallthrough into an inactive normal
717   // cleanup, rewrite it so that it leads to the appropriate place.
718   if (Scope.isNormalCleanup() && HasPrebranchedFallthrough &&
719       !RequiresNormalCleanup) {
720     // FIXME: Come up with a program which would need forwarding prebranched
721     // fallthrough and add tests. Otherwise delete this and assert against it.
722     assert(!IsActive);
723     llvm::BasicBlock *prebranchDest;
724 
725     // If the prebranch is semantically branching through the next
726     // cleanup, just forward it to the next block, leaving the
727     // insertion point in the prebranched block.
728     if (FallthroughIsBranchThrough) {
729       EHScope &enclosing = *EHStack.find(Scope.getEnclosingNormalCleanup());
730       prebranchDest = CreateNormalEntry(*this, cast<EHCleanupScope>(enclosing));
731 
732     // Otherwise, we need to make a new block.  If the normal cleanup
733     // isn't being used at all, we could actually reuse the normal
734     // entry block, but this is simpler, and it avoids conflicts with
735     // dead optimistic fixup branches.
736     } else {
737       prebranchDest = createBasicBlock("forwarded-prebranch");
738       EmitBlock(prebranchDest);
739     }
740 
741     llvm::BasicBlock *normalEntry = Scope.getNormalBlock();
742     assert(normalEntry && !normalEntry->use_empty());
743 
744     ForwardPrebranchedFallthrough(FallthroughSource,
745                                   normalEntry, prebranchDest);
746   }
747 
748   // If we don't need the cleanup at all, we're done.
749   if (!RequiresNormalCleanup && !RequiresEHCleanup) {
750     destroyOptimisticNormalEntry(*this, Scope);
751     EHStack.popCleanup(); // safe because there are no fixups
752     assert(EHStack.getNumBranchFixups() == 0 ||
753            EHStack.hasNormalCleanups());
754     if (NormalDeactivateOrigIP.isSet())
755       Builder.restoreIP(NormalDeactivateOrigIP);
756     return;
757   }
758 
759   // Copy the cleanup emission data out.  This uses either a stack
760   // array or malloc'd memory, depending on the size, which is
761   // behavior that SmallVector would provide, if we could use it
762   // here. Unfortunately, if you ask for a SmallVector<char>, the
763   // alignment isn't sufficient.
764   auto *CleanupSource = reinterpret_cast<char *>(Scope.getCleanupBuffer());
765   alignas(EHScopeStack::ScopeStackAlignment) char
766       CleanupBufferStack[8 * sizeof(void *)];
767   std::unique_ptr<char[]> CleanupBufferHeap;
768   size_t CleanupSize = Scope.getCleanupSize();
769   EHScopeStack::Cleanup *Fn;
770 
771   if (CleanupSize <= sizeof(CleanupBufferStack)) {
772     memcpy(CleanupBufferStack, CleanupSource, CleanupSize);
773     Fn = reinterpret_cast<EHScopeStack::Cleanup *>(CleanupBufferStack);
774   } else {
775     CleanupBufferHeap.reset(new char[CleanupSize]);
776     memcpy(CleanupBufferHeap.get(), CleanupSource, CleanupSize);
777     Fn = reinterpret_cast<EHScopeStack::Cleanup *>(CleanupBufferHeap.get());
778   }
779 
780   EHScopeStack::Cleanup::Flags cleanupFlags;
781   if (Scope.isNormalCleanup())
782     cleanupFlags.setIsNormalCleanupKind();
783   if (Scope.isEHCleanup())
784     cleanupFlags.setIsEHCleanupKind();
785 
786   // Under -EHa, invoke seh.scope.end() to mark scope end before dtor
787   bool IsEHa = getLangOpts().EHAsynch && !Scope.isLifetimeMarker();
788   const EHPersonality &Personality = EHPersonality::get(*this);
789   if (!RequiresNormalCleanup) {
790     // Mark CPP scope end for passed-by-value Arg temp
791     //   per Windows ABI which is "normally" Cleanup in callee
792     if (IsEHa && getInvokeDest()) {
793       // If we are deactivating a normal cleanup then we don't have a
794       // fallthrough. Restore original IP to emit CPP scope ends in the correct
795       // block.
796       if (NormalDeactivateOrigIP.isSet())
797         Builder.restoreIP(NormalDeactivateOrigIP);
798       if (Personality.isMSVCXXPersonality() && Builder.GetInsertBlock())
799         EmitSehCppScopeEnd();
800       if (NormalDeactivateOrigIP.isSet())
801         NormalDeactivateOrigIP = Builder.saveAndClearIP();
802     }
803     destroyOptimisticNormalEntry(*this, Scope);
804     Scope.MarkEmitted();
805     EHStack.popCleanup();
806   } else {
807     // If we have a fallthrough and no other need for the cleanup,
808     // emit it directly.
809     if (HasFallthrough && !HasPrebranchedFallthrough && !HasFixups &&
810         !HasExistingBranches) {
811 
812       // mark SEH scope end for fall-through flow
813       if (IsEHa && getInvokeDest()) {
814         if (Personality.isMSVCXXPersonality())
815           EmitSehCppScopeEnd();
816         else
817           EmitSehTryScopeEnd();
818       }
819 
820       destroyOptimisticNormalEntry(*this, Scope);
821       Scope.MarkEmitted();
822       EHStack.popCleanup();
823 
824       EmitCleanup(*this, Fn, cleanupFlags, NormalActiveFlag);
825 
826     // Otherwise, the best approach is to thread everything through
827     // the cleanup block and then try to clean up after ourselves.
828     } else {
829       // Force the entry block to exist.
830       llvm::BasicBlock *NormalEntry = CreateNormalEntry(*this, Scope);
831 
832       // I.  Set up the fallthrough edge in.
833 
834       CGBuilderTy::InsertPoint savedInactiveFallthroughIP;
835 
836       // If there's a fallthrough, we need to store the cleanup
837       // destination index.  For fall-throughs this is always zero.
838       if (HasFallthrough) {
839         if (!HasPrebranchedFallthrough)
840           Builder.CreateStore(Builder.getInt32(0), getNormalCleanupDestSlot());
841 
842       // Otherwise, save and clear the IP if we don't have fallthrough
843       // because the cleanup is inactive.
844       } else if (FallthroughSource) {
845         assert(!IsActive && "source without fallthrough for active cleanup");
846         savedInactiveFallthroughIP = Builder.saveAndClearIP();
847       }
848 
849       // II.  Emit the entry block.  This implicitly branches to it if
850       // we have fallthrough.  All the fixups and existing branches
851       // should already be branched to it.
852       EmitBlock(NormalEntry);
853 
854       // intercept normal cleanup to mark SEH scope end
855       if (IsEHa && getInvokeDest()) {
856         if (Personality.isMSVCXXPersonality())
857           EmitSehCppScopeEnd();
858         else
859           EmitSehTryScopeEnd();
860       }
861 
862       // III.  Figure out where we're going and build the cleanup
863       // epilogue.
864 
865       bool HasEnclosingCleanups =
866         (Scope.getEnclosingNormalCleanup() != EHStack.stable_end());
867 
868       // Compute the branch-through dest if we need it:
869       //   - if there are branch-throughs threaded through the scope
870       //   - if fall-through is a branch-through
871       //   - if there are fixups that will be optimistically forwarded
872       //     to the enclosing cleanup
873       llvm::BasicBlock *BranchThroughDest = nullptr;
874       if (Scope.hasBranchThroughs() ||
875           (FallthroughSource && FallthroughIsBranchThrough) ||
876           (HasFixups && HasEnclosingCleanups)) {
877         assert(HasEnclosingCleanups);
878         EHScope &S = *EHStack.find(Scope.getEnclosingNormalCleanup());
879         BranchThroughDest = CreateNormalEntry(*this, cast<EHCleanupScope>(S));
880       }
881 
882       llvm::BasicBlock *FallthroughDest = nullptr;
883       SmallVector<llvm::Instruction*, 2> InstsToAppend;
884 
885       // If there's exactly one branch-after and no other threads,
886       // we can route it without a switch.
887       // Skip for SEH, since ExitSwitch is used to generate code to indicate
888       // abnormal termination. (SEH: Except _leave and fall-through at
889       // the end, all other exits in a _try (return/goto/continue/break)
890       // are considered as abnormal terminations, using NormalCleanupDestSlot
891       // to indicate abnormal termination)
892       if (!Scope.hasBranchThroughs() && !HasFixups && !HasFallthrough &&
893           !currentFunctionUsesSEHTry() && Scope.getNumBranchAfters() == 1) {
894         assert(!BranchThroughDest || !IsActive);
895 
896         // Clean up the possibly dead store to the cleanup dest slot.
897         llvm::Instruction *NormalCleanupDestSlot =
898             cast<llvm::Instruction>(getNormalCleanupDestSlot().getPointer());
899         if (NormalCleanupDestSlot->hasOneUse()) {
900           NormalCleanupDestSlot->user_back()->eraseFromParent();
901           NormalCleanupDestSlot->eraseFromParent();
902           NormalCleanupDest = RawAddress::invalid();
903         }
904 
905         llvm::BasicBlock *BranchAfter = Scope.getBranchAfterBlock(0);
906         InstsToAppend.push_back(llvm::BranchInst::Create(BranchAfter));
907 
908       // Build a switch-out if we need it:
909       //   - if there are branch-afters threaded through the scope
910       //   - if fall-through is a branch-after
911       //   - if there are fixups that have nowhere left to go and
912       //     so must be immediately resolved
913       } else if (Scope.getNumBranchAfters() ||
914                  (HasFallthrough && !FallthroughIsBranchThrough) ||
915                  (HasFixups && !HasEnclosingCleanups)) {
916 
917         llvm::BasicBlock *Default =
918           (BranchThroughDest ? BranchThroughDest : getUnreachableBlock());
919 
920         // TODO: base this on the number of branch-afters and fixups
921         const unsigned SwitchCapacity = 10;
922 
923         // pass the abnormal exit flag to Fn (SEH cleanup)
924         cleanupFlags.setHasExitSwitch();
925 
926         llvm::LoadInst *Load = createLoadInstBefore(getNormalCleanupDestSlot(),
927                                                     "cleanup.dest", *this);
928         llvm::SwitchInst *Switch =
929           llvm::SwitchInst::Create(Load, Default, SwitchCapacity);
930 
931         InstsToAppend.push_back(Load);
932         InstsToAppend.push_back(Switch);
933 
934         // Branch-after fallthrough.
935         if (FallthroughSource && !FallthroughIsBranchThrough) {
936           FallthroughDest = createBasicBlock("cleanup.cont");
937           if (HasFallthrough)
938             Switch->addCase(Builder.getInt32(0), FallthroughDest);
939         }
940 
941         for (unsigned I = 0, E = Scope.getNumBranchAfters(); I != E; ++I) {
942           Switch->addCase(Scope.getBranchAfterIndex(I),
943                           Scope.getBranchAfterBlock(I));
944         }
945 
946         // If there aren't any enclosing cleanups, we can resolve all
947         // the fixups now.
948         if (HasFixups && !HasEnclosingCleanups)
949           ResolveAllBranchFixups(*this, Switch, NormalEntry);
950       } else {
951         // We should always have a branch-through destination in this case.
952         assert(BranchThroughDest);
953         InstsToAppend.push_back(llvm::BranchInst::Create(BranchThroughDest));
954       }
955 
956       // IV.  Pop the cleanup and emit it.
957       Scope.MarkEmitted();
958       EHStack.popCleanup();
959       assert(EHStack.hasNormalCleanups() == HasEnclosingCleanups);
960 
961       EmitCleanup(*this, Fn, cleanupFlags, NormalActiveFlag);
962 
963       // Append the prepared cleanup prologue from above.
964       llvm::BasicBlock *NormalExit = Builder.GetInsertBlock();
965       for (unsigned I = 0, E = InstsToAppend.size(); I != E; ++I)
966         InstsToAppend[I]->insertInto(NormalExit, NormalExit->end());
967 
968       // Optimistically hope that any fixups will continue falling through.
969       for (unsigned I = FixupDepth, E = EHStack.getNumBranchFixups();
970            I < E; ++I) {
971         BranchFixup &Fixup = EHStack.getBranchFixup(I);
972         if (!Fixup.Destination)
973           continue;
974         if (!Fixup.OptimisticBranchBlock) {
975           createStoreInstBefore(Builder.getInt32(Fixup.DestinationIndex),
976                                 getNormalCleanupDestSlot(),
977                                 Fixup.InitialBranch->getIterator(), *this);
978           Fixup.InitialBranch->setSuccessor(0, NormalEntry);
979         }
980         Fixup.OptimisticBranchBlock = NormalExit;
981       }
982 
983       // V.  Set up the fallthrough edge out.
984 
985       // Case 1: a fallthrough source exists but doesn't branch to the
986       // cleanup because the cleanup is inactive.
987       if (!HasFallthrough && FallthroughSource) {
988         // Prebranched fallthrough was forwarded earlier.
989         // Non-prebranched fallthrough doesn't need to be forwarded.
990         // Either way, all we need to do is restore the IP we cleared before.
991         assert(!IsActive);
992         Builder.restoreIP(savedInactiveFallthroughIP);
993 
994       // Case 2: a fallthrough source exists and should branch to the
995       // cleanup, but we're not supposed to branch through to the next
996       // cleanup.
997       } else if (HasFallthrough && FallthroughDest) {
998         assert(!FallthroughIsBranchThrough);
999         EmitBlock(FallthroughDest);
1000 
1001       // Case 3: a fallthrough source exists and should branch to the
1002       // cleanup and then through to the next.
1003       } else if (HasFallthrough) {
1004         // Everything is already set up for this.
1005 
1006       // Case 4: no fallthrough source exists.
1007       } else {
1008         Builder.ClearInsertionPoint();
1009       }
1010 
1011       // VI.  Assorted cleaning.
1012 
1013       // Check whether we can merge NormalEntry into a single predecessor.
1014       // This might invalidate (non-IR) pointers to NormalEntry.
1015       llvm::BasicBlock *NewNormalEntry =
1016         SimplifyCleanupEntry(*this, NormalEntry);
1017 
1018       // If it did invalidate those pointers, and NormalEntry was the same
1019       // as NormalExit, go back and patch up the fixups.
1020       if (NewNormalEntry != NormalEntry && NormalEntry == NormalExit)
1021         for (unsigned I = FixupDepth, E = EHStack.getNumBranchFixups();
1022                I < E; ++I)
1023           EHStack.getBranchFixup(I).OptimisticBranchBlock = NewNormalEntry;
1024     }
1025   }
1026 
1027   if (NormalDeactivateOrigIP.isSet())
1028     Builder.restoreIP(NormalDeactivateOrigIP);
1029   assert(EHStack.hasNormalCleanups() || EHStack.getNumBranchFixups() == 0);
1030 
1031   // Emit the EH cleanup if required.
1032   if (RequiresEHCleanup) {
1033     CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP();
1034 
1035     EmitBlock(EHEntry);
1036 
1037     llvm::BasicBlock *NextAction = getEHDispatchBlock(EHParent);
1038 
1039     // Push a terminate scope or cleanupendpad scope around the potentially
1040     // throwing cleanups. For funclet EH personalities, the cleanupendpad models
1041     // program termination when cleanups throw.
1042     bool PushedTerminate = false;
1043     SaveAndRestore RestoreCurrentFuncletPad(CurrentFuncletPad);
1044     llvm::CleanupPadInst *CPI = nullptr;
1045 
1046     const EHPersonality &Personality = EHPersonality::get(*this);
1047     if (Personality.usesFuncletPads()) {
1048       llvm::Value *ParentPad = CurrentFuncletPad;
1049       if (!ParentPad)
1050         ParentPad = llvm::ConstantTokenNone::get(CGM.getLLVMContext());
1051       CurrentFuncletPad = CPI = Builder.CreateCleanupPad(ParentPad);
1052     }
1053 
1054     // Non-MSVC personalities need to terminate when an EH cleanup throws.
1055     if (!Personality.isMSVCPersonality()) {
1056       EHStack.pushTerminate();
1057       PushedTerminate = true;
1058     } else if (IsEHa && getInvokeDest()) {
1059       EmitSehCppScopeEnd();
1060     }
1061 
1062     // We only actually emit the cleanup code if the cleanup is either
1063     // active or was used before it was deactivated.
1064     if (EHActiveFlag.isValid() || IsActive) {
1065       cleanupFlags.setIsForEHCleanup();
1066       EmitCleanup(*this, Fn, cleanupFlags, EHActiveFlag);
1067     }
1068 
1069     if (CPI)
1070       Builder.CreateCleanupRet(CPI, NextAction);
1071     else
1072       Builder.CreateBr(NextAction);
1073 
1074     // Leave the terminate scope.
1075     if (PushedTerminate)
1076       EHStack.popTerminate();
1077 
1078     Builder.restoreIP(SavedIP);
1079 
1080     SimplifyCleanupEntry(*this, EHEntry);
1081   }
1082 }
1083 
1084 /// isObviouslyBranchWithoutCleanups - Return true if a branch to the
1085 /// specified destination obviously has no cleanups to run.  'false' is always
1086 /// a conservatively correct answer for this method.
1087 bool CodeGenFunction::isObviouslyBranchWithoutCleanups(JumpDest Dest) const {
1088   assert(Dest.getScopeDepth().encloses(EHStack.stable_begin())
1089          && "stale jump destination");
1090 
1091   // Calculate the innermost active normal cleanup.
1092   EHScopeStack::stable_iterator TopCleanup =
1093     EHStack.getInnermostActiveNormalCleanup();
1094 
1095   // If we're not in an active normal cleanup scope, or if the
1096   // destination scope is within the innermost active normal cleanup
1097   // scope, we don't need to worry about fixups.
1098   if (TopCleanup == EHStack.stable_end() ||
1099       TopCleanup.encloses(Dest.getScopeDepth())) // works for invalid
1100     return true;
1101 
1102   // Otherwise, we might need some cleanups.
1103   return false;
1104 }
1105 
1106 
1107 /// Terminate the current block by emitting a branch which might leave
1108 /// the current cleanup-protected scope.  The target scope may not yet
1109 /// be known, in which case this will require a fixup.
1110 ///
1111 /// As a side-effect, this method clears the insertion point.
1112 void CodeGenFunction::EmitBranchThroughCleanup(JumpDest Dest) {
1113   assert(Dest.getScopeDepth().encloses(EHStack.stable_begin())
1114          && "stale jump destination");
1115 
1116   if (!HaveInsertPoint())
1117     return;
1118 
1119   // Create the branch.
1120   llvm::BranchInst *BI = Builder.CreateBr(Dest.getBlock());
1121 
1122   // Calculate the innermost active normal cleanup.
1123   EHScopeStack::stable_iterator
1124     TopCleanup = EHStack.getInnermostActiveNormalCleanup();
1125 
1126   // If we're not in an active normal cleanup scope, or if the
1127   // destination scope is within the innermost active normal cleanup
1128   // scope, we don't need to worry about fixups.
1129   if (TopCleanup == EHStack.stable_end() ||
1130       TopCleanup.encloses(Dest.getScopeDepth())) { // works for invalid
1131     Builder.ClearInsertionPoint();
1132     return;
1133   }
1134 
1135   // If we can't resolve the destination cleanup scope, just add this
1136   // to the current cleanup scope as a branch fixup.
1137   if (!Dest.getScopeDepth().isValid()) {
1138     BranchFixup &Fixup = EHStack.addBranchFixup();
1139     Fixup.Destination = Dest.getBlock();
1140     Fixup.DestinationIndex = Dest.getDestIndex();
1141     Fixup.InitialBranch = BI;
1142     Fixup.OptimisticBranchBlock = nullptr;
1143 
1144     Builder.ClearInsertionPoint();
1145     return;
1146   }
1147 
1148   // Otherwise, thread through all the normal cleanups in scope.
1149 
1150   // Store the index at the start.
1151   llvm::ConstantInt *Index = Builder.getInt32(Dest.getDestIndex());
1152   createStoreInstBefore(Index, getNormalCleanupDestSlot(), BI->getIterator(),
1153                         *this);
1154 
1155   // Adjust BI to point to the first cleanup block.
1156   {
1157     EHCleanupScope &Scope =
1158       cast<EHCleanupScope>(*EHStack.find(TopCleanup));
1159     BI->setSuccessor(0, CreateNormalEntry(*this, Scope));
1160   }
1161 
1162   // Add this destination to all the scopes involved.
1163   EHScopeStack::stable_iterator I = TopCleanup;
1164   EHScopeStack::stable_iterator E = Dest.getScopeDepth();
1165   if (E.strictlyEncloses(I)) {
1166     while (true) {
1167       EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(I));
1168       assert(Scope.isNormalCleanup());
1169       I = Scope.getEnclosingNormalCleanup();
1170 
1171       // If this is the last cleanup we're propagating through, tell it
1172       // that there's a resolved jump moving through it.
1173       if (!E.strictlyEncloses(I)) {
1174         Scope.addBranchAfter(Index, Dest.getBlock());
1175         break;
1176       }
1177 
1178       // Otherwise, tell the scope that there's a jump propagating
1179       // through it.  If this isn't new information, all the rest of
1180       // the work has been done before.
1181       if (!Scope.addBranchThrough(Dest.getBlock()))
1182         break;
1183     }
1184   }
1185 
1186   Builder.ClearInsertionPoint();
1187 }
1188 
1189 static bool IsUsedAsEHCleanup(EHScopeStack &EHStack,
1190                               EHScopeStack::stable_iterator cleanup) {
1191   // If we needed an EH block for any reason, that counts.
1192   if (EHStack.find(cleanup)->hasEHBranches())
1193     return true;
1194 
1195   // Check whether any enclosed cleanups were needed.
1196   for (EHScopeStack::stable_iterator
1197          i = EHStack.getInnermostEHScope(); i != cleanup; ) {
1198     assert(cleanup.strictlyEncloses(i));
1199 
1200     EHScope &scope = *EHStack.find(i);
1201     if (scope.hasEHBranches())
1202       return true;
1203 
1204     i = scope.getEnclosingEHScope();
1205   }
1206 
1207   return false;
1208 }
1209 
1210 enum ForActivation_t {
1211   ForActivation,
1212   ForDeactivation
1213 };
1214 
1215 /// The given cleanup block is changing activation state.  Configure a
1216 /// cleanup variable if necessary.
1217 ///
1218 /// It would be good if we had some way of determining if there were
1219 /// extra uses *after* the change-over point.
1220 static void SetupCleanupBlockActivation(CodeGenFunction &CGF,
1221                                         EHScopeStack::stable_iterator C,
1222                                         ForActivation_t kind,
1223                                         llvm::Instruction *dominatingIP) {
1224   EHCleanupScope &Scope = cast<EHCleanupScope>(*CGF.EHStack.find(C));
1225 
1226   // We always need the flag if we're activating the cleanup in a
1227   // conditional context, because we have to assume that the current
1228   // location doesn't necessarily dominate the cleanup's code.
1229   bool isActivatedInConditional =
1230     (kind == ForActivation && CGF.isInConditionalBranch());
1231 
1232   bool needFlag = false;
1233 
1234   // Calculate whether the cleanup was used:
1235 
1236   //   - as a normal cleanup
1237   if (Scope.isNormalCleanup()) {
1238     Scope.setTestFlagInNormalCleanup();
1239     needFlag = true;
1240   }
1241 
1242   //  - as an EH cleanup
1243   if (Scope.isEHCleanup() &&
1244       (isActivatedInConditional || IsUsedAsEHCleanup(CGF.EHStack, C))) {
1245     Scope.setTestFlagInEHCleanup();
1246     needFlag = true;
1247   }
1248 
1249   // If it hasn't yet been used as either, we're done.
1250   if (!needFlag)
1251     return;
1252 
1253   Address var = Scope.getActiveFlag();
1254   if (!var.isValid()) {
1255     CodeGenFunction::AllocaTrackerRAII AllocaTracker(CGF);
1256     var = CGF.CreateTempAlloca(CGF.Builder.getInt1Ty(), CharUnits::One(),
1257                                "cleanup.isactive");
1258     Scope.setActiveFlag(var);
1259     Scope.AddAuxAllocas(AllocaTracker.Take());
1260 
1261     assert(dominatingIP && "no existing variable and no dominating IP!");
1262 
1263     // Initialize to true or false depending on whether it was
1264     // active up to this point.
1265     llvm::Constant *value = CGF.Builder.getInt1(kind == ForDeactivation);
1266 
1267     // If we're in a conditional block, ignore the dominating IP and
1268     // use the outermost conditional branch.
1269     if (CGF.isInConditionalBranch()) {
1270       CGF.setBeforeOutermostConditional(value, var, CGF);
1271     } else {
1272       createStoreInstBefore(value, var, dominatingIP->getIterator(), CGF);
1273     }
1274   }
1275 
1276   CGF.Builder.CreateStore(CGF.Builder.getInt1(kind == ForActivation), var);
1277 }
1278 
1279 /// Activate a cleanup that was created in an inactivated state.
1280 void CodeGenFunction::ActivateCleanupBlock(EHScopeStack::stable_iterator C,
1281                                            llvm::Instruction *dominatingIP) {
1282   assert(C != EHStack.stable_end() && "activating bottom of stack?");
1283   EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(C));
1284   assert(!Scope.isActive() && "double activation");
1285 
1286   SetupCleanupBlockActivation(*this, C, ForActivation, dominatingIP);
1287 
1288   Scope.setActive(true);
1289 }
1290 
1291 /// Deactive a cleanup that was created in an active state.
1292 void CodeGenFunction::DeactivateCleanupBlock(EHScopeStack::stable_iterator C,
1293                                              llvm::Instruction *dominatingIP) {
1294   assert(C != EHStack.stable_end() && "deactivating bottom of stack?");
1295   EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(C));
1296   assert(Scope.isActive() && "double deactivation");
1297 
1298   // If it's the top of the stack, just pop it, but do so only if it belongs
1299   // to the current RunCleanupsScope.
1300   if (C == EHStack.stable_begin() &&
1301       CurrentCleanupScopeDepth.strictlyEncloses(C)) {
1302     PopCleanupBlock(/*FallthroughIsBranchThrough=*/false,
1303                     /*ForDeactivation=*/true);
1304     return;
1305   }
1306 
1307   // Otherwise, follow the general case.
1308   SetupCleanupBlockActivation(*this, C, ForDeactivation, dominatingIP);
1309 
1310   Scope.setActive(false);
1311 }
1312 
1313 RawAddress CodeGenFunction::getNormalCleanupDestSlot() {
1314   if (!NormalCleanupDest.isValid())
1315     NormalCleanupDest =
1316       CreateDefaultAlignTempAlloca(Builder.getInt32Ty(), "cleanup.dest.slot");
1317   return NormalCleanupDest;
1318 }
1319 
1320 /// Emits all the code to cause the given temporary to be cleaned up.
1321 void CodeGenFunction::EmitCXXTemporary(const CXXTemporary *Temporary,
1322                                        QualType TempType,
1323                                        Address Ptr) {
1324   pushDestroy(NormalAndEHCleanup, Ptr, TempType, destroyCXXObject,
1325               /*useEHCleanup*/ true);
1326 }
1327 
1328 // Need to set "funclet" in OperandBundle properly for noThrow
1329 //       intrinsic (see CGCall.cpp)
1330 static void EmitSehScope(CodeGenFunction &CGF,
1331                          llvm::FunctionCallee &SehCppScope) {
1332   llvm::BasicBlock *InvokeDest = CGF.getInvokeDest();
1333   assert(CGF.Builder.GetInsertBlock() && InvokeDest);
1334   llvm::BasicBlock *Cont = CGF.createBasicBlock("invoke.cont");
1335   SmallVector<llvm::OperandBundleDef, 1> BundleList =
1336       CGF.getBundlesForFunclet(SehCppScope.getCallee());
1337   if (CGF.CurrentFuncletPad)
1338     BundleList.emplace_back("funclet", CGF.CurrentFuncletPad);
1339   CGF.Builder.CreateInvoke(SehCppScope, Cont, InvokeDest, {}, BundleList);
1340   CGF.EmitBlock(Cont);
1341 }
1342 
1343 // Invoke a llvm.seh.scope.begin at the beginning of a CPP scope for -EHa
1344 void CodeGenFunction::EmitSehCppScopeBegin() {
1345   assert(getLangOpts().EHAsynch);
1346   llvm::FunctionType *FTy =
1347       llvm::FunctionType::get(CGM.VoidTy, /*isVarArg=*/false);
1348   llvm::FunctionCallee SehCppScope =
1349       CGM.CreateRuntimeFunction(FTy, "llvm.seh.scope.begin");
1350   EmitSehScope(*this, SehCppScope);
1351 }
1352 
1353 // Invoke a llvm.seh.scope.end at the end of a CPP scope for -EHa
1354 //   llvm.seh.scope.end is emitted before popCleanup, so it's "invoked"
1355 void CodeGenFunction::EmitSehCppScopeEnd() {
1356   assert(getLangOpts().EHAsynch);
1357   llvm::FunctionType *FTy =
1358       llvm::FunctionType::get(CGM.VoidTy, /*isVarArg=*/false);
1359   llvm::FunctionCallee SehCppScope =
1360       CGM.CreateRuntimeFunction(FTy, "llvm.seh.scope.end");
1361   EmitSehScope(*this, SehCppScope);
1362 }
1363 
1364 // Invoke a llvm.seh.try.begin at the beginning of a SEH scope for -EHa
1365 void CodeGenFunction::EmitSehTryScopeBegin() {
1366   assert(getLangOpts().EHAsynch);
1367   llvm::FunctionType *FTy =
1368       llvm::FunctionType::get(CGM.VoidTy, /*isVarArg=*/false);
1369   llvm::FunctionCallee SehCppScope =
1370       CGM.CreateRuntimeFunction(FTy, "llvm.seh.try.begin");
1371   EmitSehScope(*this, SehCppScope);
1372 }
1373 
1374 // Invoke a llvm.seh.try.end at the end of a SEH scope for -EHa
1375 void CodeGenFunction::EmitSehTryScopeEnd() {
1376   assert(getLangOpts().EHAsynch);
1377   llvm::FunctionType *FTy =
1378       llvm::FunctionType::get(CGM.VoidTy, /*isVarArg=*/false);
1379   llvm::FunctionCallee SehCppScope =
1380       CGM.CreateRuntimeFunction(FTy, "llvm.seh.try.end");
1381   EmitSehScope(*this, SehCppScope);
1382 }
1383