xref: /llvm-project/llvm/lib/Transforms/Coroutines/SpillUtils.cpp (revision 29441e4f5fa5f5c7709f7cf180815ba97f611297)
1f4e2d7bfSTyler Nowicki //===- SpillUtils.cpp - Utilities for checking for spills ---------------===//
2f4e2d7bfSTyler Nowicki //
3f4e2d7bfSTyler Nowicki // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4f4e2d7bfSTyler Nowicki // See https://llvm.org/LICENSE.txt for license information.
5f4e2d7bfSTyler Nowicki // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6f4e2d7bfSTyler Nowicki //
7f4e2d7bfSTyler Nowicki //===----------------------------------------------------------------------===//
8f4e2d7bfSTyler Nowicki 
9e82fcda1STyler Nowicki #include "llvm/Transforms/Coroutines/SpillUtils.h"
10e82fcda1STyler Nowicki #include "CoroInternal.h"
11f4e2d7bfSTyler Nowicki #include "llvm/Analysis/CFG.h"
12f4e2d7bfSTyler Nowicki #include "llvm/Analysis/PtrUseVisitor.h"
13f4e2d7bfSTyler Nowicki #include "llvm/IR/CFG.h"
14f4e2d7bfSTyler Nowicki #include "llvm/IR/DebugInfo.h"
15f4e2d7bfSTyler Nowicki #include "llvm/IR/Dominators.h"
16f4e2d7bfSTyler Nowicki #include "llvm/IR/InstIterator.h"
17f4e2d7bfSTyler Nowicki #include "llvm/Transforms/Utils/BasicBlockUtils.h"
18f4e2d7bfSTyler Nowicki 
19f4e2d7bfSTyler Nowicki namespace llvm {
20f4e2d7bfSTyler Nowicki 
21f4e2d7bfSTyler Nowicki namespace coro {
22f4e2d7bfSTyler Nowicki 
23f4e2d7bfSTyler Nowicki namespace {
24f4e2d7bfSTyler Nowicki 
25f4e2d7bfSTyler Nowicki typedef SmallPtrSet<BasicBlock *, 8> VisitedBlocksSet;
26f4e2d7bfSTyler Nowicki 
27f4e2d7bfSTyler Nowicki // Check for structural coroutine intrinsics that should not be spilled into
28f4e2d7bfSTyler Nowicki // the coroutine frame.
29f4e2d7bfSTyler Nowicki static bool isCoroutineStructureIntrinsic(Instruction &I) {
30f4e2d7bfSTyler Nowicki   return isa<CoroIdInst>(&I) || isa<CoroSaveInst>(&I) ||
31f4e2d7bfSTyler Nowicki          isa<CoroSuspendInst>(&I);
32f4e2d7bfSTyler Nowicki }
33f4e2d7bfSTyler Nowicki 
34f4e2d7bfSTyler Nowicki /// Does control flow starting at the given block ever reach a suspend
35f4e2d7bfSTyler Nowicki /// instruction before reaching a block in VisitedOrFreeBBs?
36f4e2d7bfSTyler Nowicki static bool isSuspendReachableFrom(BasicBlock *From,
37f4e2d7bfSTyler Nowicki                                    VisitedBlocksSet &VisitedOrFreeBBs) {
38f4e2d7bfSTyler Nowicki   // Eagerly try to add this block to the visited set. If it's already
39f4e2d7bfSTyler Nowicki   // there, stop recursing; this path doesn't reach a suspend before
40f4e2d7bfSTyler Nowicki   // either looping or reaching a freeing block.
41f4e2d7bfSTyler Nowicki   if (!VisitedOrFreeBBs.insert(From).second)
42f4e2d7bfSTyler Nowicki     return false;
43f4e2d7bfSTyler Nowicki 
44f4e2d7bfSTyler Nowicki   // We assume that we'll already have split suspends into their own blocks.
452670565aSTyler Nowicki   if (coro::isSuspendBlock(From))
46f4e2d7bfSTyler Nowicki     return true;
47f4e2d7bfSTyler Nowicki 
48f4e2d7bfSTyler Nowicki   // Recurse on the successors.
49f4e2d7bfSTyler Nowicki   for (auto *Succ : successors(From)) {
50f4e2d7bfSTyler Nowicki     if (isSuspendReachableFrom(Succ, VisitedOrFreeBBs))
51f4e2d7bfSTyler Nowicki       return true;
52f4e2d7bfSTyler Nowicki   }
53f4e2d7bfSTyler Nowicki 
54f4e2d7bfSTyler Nowicki   return false;
55f4e2d7bfSTyler Nowicki }
56f4e2d7bfSTyler Nowicki 
57f4e2d7bfSTyler Nowicki /// Is the given alloca "local", i.e. bounded in lifetime to not cross a
58f4e2d7bfSTyler Nowicki /// suspend point?
59f4e2d7bfSTyler Nowicki static bool isLocalAlloca(CoroAllocaAllocInst *AI) {
60f4e2d7bfSTyler Nowicki   // Seed the visited set with all the basic blocks containing a free
61f4e2d7bfSTyler Nowicki   // so that we won't pass them up.
62f4e2d7bfSTyler Nowicki   VisitedBlocksSet VisitedOrFreeBBs;
63f4e2d7bfSTyler Nowicki   for (auto *User : AI->users()) {
64f4e2d7bfSTyler Nowicki     if (auto FI = dyn_cast<CoroAllocaFreeInst>(User))
65f4e2d7bfSTyler Nowicki       VisitedOrFreeBBs.insert(FI->getParent());
66f4e2d7bfSTyler Nowicki   }
67f4e2d7bfSTyler Nowicki 
68f4e2d7bfSTyler Nowicki   return !isSuspendReachableFrom(AI->getParent(), VisitedOrFreeBBs);
69f4e2d7bfSTyler Nowicki }
70f4e2d7bfSTyler Nowicki 
71f4e2d7bfSTyler Nowicki /// Turn the given coro.alloca.alloc call into a dynamic allocation.
72f4e2d7bfSTyler Nowicki /// This happens during the all-instructions iteration, so it must not
73f4e2d7bfSTyler Nowicki /// delete the call.
74f4e2d7bfSTyler Nowicki static Instruction *
75f4e2d7bfSTyler Nowicki lowerNonLocalAlloca(CoroAllocaAllocInst *AI, const coro::Shape &Shape,
76f4e2d7bfSTyler Nowicki                     SmallVectorImpl<Instruction *> &DeadInsts) {
77f4e2d7bfSTyler Nowicki   IRBuilder<> Builder(AI);
78f4e2d7bfSTyler Nowicki   auto Alloc = Shape.emitAlloc(Builder, AI->getSize(), nullptr);
79f4e2d7bfSTyler Nowicki 
80f4e2d7bfSTyler Nowicki   for (User *U : AI->users()) {
81f4e2d7bfSTyler Nowicki     if (isa<CoroAllocaGetInst>(U)) {
82f4e2d7bfSTyler Nowicki       U->replaceAllUsesWith(Alloc);
83f4e2d7bfSTyler Nowicki     } else {
84f4e2d7bfSTyler Nowicki       auto FI = cast<CoroAllocaFreeInst>(U);
85f4e2d7bfSTyler Nowicki       Builder.SetInsertPoint(FI);
86f4e2d7bfSTyler Nowicki       Shape.emitDealloc(Builder, Alloc, nullptr);
87f4e2d7bfSTyler Nowicki     }
88f4e2d7bfSTyler Nowicki     DeadInsts.push_back(cast<Instruction>(U));
89f4e2d7bfSTyler Nowicki   }
90f4e2d7bfSTyler Nowicki 
91f4e2d7bfSTyler Nowicki   // Push this on last so that it gets deleted after all the others.
92f4e2d7bfSTyler Nowicki   DeadInsts.push_back(AI);
93f4e2d7bfSTyler Nowicki 
94f4e2d7bfSTyler Nowicki   // Return the new allocation value so that we can check for needed spills.
95f4e2d7bfSTyler Nowicki   return cast<Instruction>(Alloc);
96f4e2d7bfSTyler Nowicki }
97f4e2d7bfSTyler Nowicki 
98f4e2d7bfSTyler Nowicki // We need to make room to insert a spill after initial PHIs, but before
99f4e2d7bfSTyler Nowicki // catchswitch instruction. Placing it before violates the requirement that
100f4e2d7bfSTyler Nowicki // catchswitch, like all other EHPads must be the first nonPHI in a block.
101f4e2d7bfSTyler Nowicki //
102f4e2d7bfSTyler Nowicki // Split away catchswitch into a separate block and insert in its place:
103f4e2d7bfSTyler Nowicki //
104f4e2d7bfSTyler Nowicki //   cleanuppad <InsertPt> cleanupret.
105f4e2d7bfSTyler Nowicki //
106f4e2d7bfSTyler Nowicki // cleanupret instruction will act as an insert point for the spill.
107f4e2d7bfSTyler Nowicki static Instruction *splitBeforeCatchSwitch(CatchSwitchInst *CatchSwitch) {
108f4e2d7bfSTyler Nowicki   BasicBlock *CurrentBlock = CatchSwitch->getParent();
109f4e2d7bfSTyler Nowicki   BasicBlock *NewBlock = CurrentBlock->splitBasicBlock(CatchSwitch);
110f4e2d7bfSTyler Nowicki   CurrentBlock->getTerminator()->eraseFromParent();
111f4e2d7bfSTyler Nowicki 
112f4e2d7bfSTyler Nowicki   auto *CleanupPad =
113f4e2d7bfSTyler Nowicki       CleanupPadInst::Create(CatchSwitch->getParentPad(), {}, "", CurrentBlock);
114f4e2d7bfSTyler Nowicki   auto *CleanupRet =
115f4e2d7bfSTyler Nowicki       CleanupReturnInst::Create(CleanupPad, NewBlock, CurrentBlock);
116f4e2d7bfSTyler Nowicki   return CleanupRet;
117f4e2d7bfSTyler Nowicki }
118f4e2d7bfSTyler Nowicki 
119f4e2d7bfSTyler Nowicki // We use a pointer use visitor to track how an alloca is being used.
120f4e2d7bfSTyler Nowicki // The goal is to be able to answer the following three questions:
121f4e2d7bfSTyler Nowicki //   1. Should this alloca be allocated on the frame instead.
122867e0420STyler Nowicki //   2. Could the content of the alloca be modified prior to CoroBegin, which
123867e0420STyler Nowicki //      would require copying the data from the alloca to the frame after
124867e0420STyler Nowicki //      CoroBegin.
125867e0420STyler Nowicki //   3. Are there any aliases created for this alloca prior to CoroBegin, but
126867e0420STyler Nowicki //      used after CoroBegin. In that case, we will need to recreate the alias
127867e0420STyler Nowicki //      after CoroBegin based off the frame.
128867e0420STyler Nowicki //
129867e0420STyler Nowicki // To answer question 1, we track two things:
130867e0420STyler Nowicki //   A. List of all BasicBlocks that use this alloca or any of the aliases of
131f4e2d7bfSTyler Nowicki //   the alloca. In the end, we check if there exists any two basic blocks that
132867e0420STyler Nowicki //   cross suspension points. If so, this alloca must be put on the frame.
133867e0420STyler Nowicki //   B. Whether the alloca or any alias of the alloca is escaped at some point,
134f4e2d7bfSTyler Nowicki //   either by storing the address somewhere, or the address is used in a
135f4e2d7bfSTyler Nowicki //   function call that might capture. If it's ever escaped, this alloca must be
136f4e2d7bfSTyler Nowicki //   put on the frame conservatively.
137867e0420STyler Nowicki //
138f4e2d7bfSTyler Nowicki // To answer quetion 2, we track through the variable MayWriteBeforeCoroBegin.
139f4e2d7bfSTyler Nowicki // Whenever a potential write happens, either through a store instruction, a
140f4e2d7bfSTyler Nowicki // function call or any of the memory intrinsics, we check whether this
141867e0420STyler Nowicki // instruction is prior to CoroBegin.
142867e0420STyler Nowicki //
143867e0420STyler Nowicki // To answer question 3, we track the offsets of all aliases created for the
144867e0420STyler Nowicki // alloca prior to CoroBegin but used after CoroBegin. std::optional is used to
145867e0420STyler Nowicki // be able to represent the case when the offset is unknown (e.g. when you have
146867e0420STyler Nowicki // a PHINode that takes in different offset values). We cannot handle unknown
147867e0420STyler Nowicki // offsets and will assert. This is the potential issue left out. An ideal
148867e0420STyler Nowicki // solution would likely require a significant redesign.
149867e0420STyler Nowicki 
150f4e2d7bfSTyler Nowicki namespace {
151f4e2d7bfSTyler Nowicki struct AllocaUseVisitor : PtrUseVisitor<AllocaUseVisitor> {
152f4e2d7bfSTyler Nowicki   using Base = PtrUseVisitor<AllocaUseVisitor>;
153f4e2d7bfSTyler Nowicki   AllocaUseVisitor(const DataLayout &DL, const DominatorTree &DT,
154f4e2d7bfSTyler Nowicki                    const coro::Shape &CoroShape,
155f4e2d7bfSTyler Nowicki                    const SuspendCrossingInfo &Checker,
156f4e2d7bfSTyler Nowicki                    bool ShouldUseLifetimeStartInfo)
157f4e2d7bfSTyler Nowicki       : PtrUseVisitor(DL), DT(DT), CoroShape(CoroShape), Checker(Checker),
158f4e2d7bfSTyler Nowicki         ShouldUseLifetimeStartInfo(ShouldUseLifetimeStartInfo) {
159f4e2d7bfSTyler Nowicki     for (AnyCoroSuspendInst *SuspendInst : CoroShape.CoroSuspends)
160f4e2d7bfSTyler Nowicki       CoroSuspendBBs.insert(SuspendInst->getParent());
161f4e2d7bfSTyler Nowicki   }
162f4e2d7bfSTyler Nowicki 
163f4e2d7bfSTyler Nowicki   void visit(Instruction &I) {
164f4e2d7bfSTyler Nowicki     Users.insert(&I);
165f4e2d7bfSTyler Nowicki     Base::visit(I);
166f4e2d7bfSTyler Nowicki     // If the pointer is escaped prior to CoroBegin, we have to assume it would
167f4e2d7bfSTyler Nowicki     // be written into before CoroBegin as well.
168f4e2d7bfSTyler Nowicki     if (PI.isEscaped() &&
169f4e2d7bfSTyler Nowicki         !DT.dominates(CoroShape.CoroBegin, PI.getEscapingInst())) {
170f4e2d7bfSTyler Nowicki       MayWriteBeforeCoroBegin = true;
171f4e2d7bfSTyler Nowicki     }
172f4e2d7bfSTyler Nowicki   }
173f4e2d7bfSTyler Nowicki   // We need to provide this overload as PtrUseVisitor uses a pointer based
174f4e2d7bfSTyler Nowicki   // visiting function.
175f4e2d7bfSTyler Nowicki   void visit(Instruction *I) { return visit(*I); }
176f4e2d7bfSTyler Nowicki 
177f4e2d7bfSTyler Nowicki   void visitPHINode(PHINode &I) {
178f4e2d7bfSTyler Nowicki     enqueueUsers(I);
179f4e2d7bfSTyler Nowicki     handleAlias(I);
180f4e2d7bfSTyler Nowicki   }
181f4e2d7bfSTyler Nowicki 
182f4e2d7bfSTyler Nowicki   void visitSelectInst(SelectInst &I) {
183f4e2d7bfSTyler Nowicki     enqueueUsers(I);
184f4e2d7bfSTyler Nowicki     handleAlias(I);
185f4e2d7bfSTyler Nowicki   }
186f4e2d7bfSTyler Nowicki 
187f4e2d7bfSTyler Nowicki   void visitStoreInst(StoreInst &SI) {
188f4e2d7bfSTyler Nowicki     // Regardless whether the alias of the alloca is the value operand or the
189f4e2d7bfSTyler Nowicki     // pointer operand, we need to assume the alloca is been written.
190f4e2d7bfSTyler Nowicki     handleMayWrite(SI);
191f4e2d7bfSTyler Nowicki 
192f4e2d7bfSTyler Nowicki     if (SI.getValueOperand() != U->get())
193f4e2d7bfSTyler Nowicki       return;
194f4e2d7bfSTyler Nowicki 
195f4e2d7bfSTyler Nowicki     // We are storing the pointer into a memory location, potentially escaping.
196f4e2d7bfSTyler Nowicki     // As an optimization, we try to detect simple cases where it doesn't
197f4e2d7bfSTyler Nowicki     // actually escape, for example:
198f4e2d7bfSTyler Nowicki     //   %ptr = alloca ..
199f4e2d7bfSTyler Nowicki     //   %addr = alloca ..
200f4e2d7bfSTyler Nowicki     //   store %ptr, %addr
201f4e2d7bfSTyler Nowicki     //   %x = load %addr
202f4e2d7bfSTyler Nowicki     //   ..
203f4e2d7bfSTyler Nowicki     // If %addr is only used by loading from it, we could simply treat %x as
204f4e2d7bfSTyler Nowicki     // another alias of %ptr, and not considering %ptr being escaped.
205f4e2d7bfSTyler Nowicki     auto IsSimpleStoreThenLoad = [&]() {
206f4e2d7bfSTyler Nowicki       auto *AI = dyn_cast<AllocaInst>(SI.getPointerOperand());
207f4e2d7bfSTyler Nowicki       // If the memory location we are storing to is not an alloca, it
208f4e2d7bfSTyler Nowicki       // could be an alias of some other memory locations, which is difficult
209f4e2d7bfSTyler Nowicki       // to analyze.
210f4e2d7bfSTyler Nowicki       if (!AI)
211f4e2d7bfSTyler Nowicki         return false;
212f4e2d7bfSTyler Nowicki       // StoreAliases contains aliases of the memory location stored into.
213f4e2d7bfSTyler Nowicki       SmallVector<Instruction *, 4> StoreAliases = {AI};
214f4e2d7bfSTyler Nowicki       while (!StoreAliases.empty()) {
215f4e2d7bfSTyler Nowicki         Instruction *I = StoreAliases.pop_back_val();
216f4e2d7bfSTyler Nowicki         for (User *U : I->users()) {
217f4e2d7bfSTyler Nowicki           // If we are loading from the memory location, we are creating an
218f4e2d7bfSTyler Nowicki           // alias of the original pointer.
219f4e2d7bfSTyler Nowicki           if (auto *LI = dyn_cast<LoadInst>(U)) {
220f4e2d7bfSTyler Nowicki             enqueueUsers(*LI);
221f4e2d7bfSTyler Nowicki             handleAlias(*LI);
222f4e2d7bfSTyler Nowicki             continue;
223f4e2d7bfSTyler Nowicki           }
224f4e2d7bfSTyler Nowicki           // If we are overriding the memory location, the pointer certainly
225f4e2d7bfSTyler Nowicki           // won't escape.
226f4e2d7bfSTyler Nowicki           if (auto *S = dyn_cast<StoreInst>(U))
227f4e2d7bfSTyler Nowicki             if (S->getPointerOperand() == I)
228f4e2d7bfSTyler Nowicki               continue;
229f4e2d7bfSTyler Nowicki           if (auto *II = dyn_cast<IntrinsicInst>(U))
230f4e2d7bfSTyler Nowicki             if (II->isLifetimeStartOrEnd())
231f4e2d7bfSTyler Nowicki               continue;
232f4e2d7bfSTyler Nowicki           // BitCastInst creats aliases of the memory location being stored
233f4e2d7bfSTyler Nowicki           // into.
234f4e2d7bfSTyler Nowicki           if (auto *BI = dyn_cast<BitCastInst>(U)) {
235f4e2d7bfSTyler Nowicki             StoreAliases.push_back(BI);
236f4e2d7bfSTyler Nowicki             continue;
237f4e2d7bfSTyler Nowicki           }
238f4e2d7bfSTyler Nowicki           return false;
239f4e2d7bfSTyler Nowicki         }
240f4e2d7bfSTyler Nowicki       }
241f4e2d7bfSTyler Nowicki 
242f4e2d7bfSTyler Nowicki       return true;
243f4e2d7bfSTyler Nowicki     };
244f4e2d7bfSTyler Nowicki 
245f4e2d7bfSTyler Nowicki     if (!IsSimpleStoreThenLoad())
246f4e2d7bfSTyler Nowicki       PI.setEscaped(&SI);
247f4e2d7bfSTyler Nowicki   }
248f4e2d7bfSTyler Nowicki 
249f4e2d7bfSTyler Nowicki   // All mem intrinsics modify the data.
250f4e2d7bfSTyler Nowicki   void visitMemIntrinsic(MemIntrinsic &MI) { handleMayWrite(MI); }
251f4e2d7bfSTyler Nowicki 
252f4e2d7bfSTyler Nowicki   void visitBitCastInst(BitCastInst &BC) {
253f4e2d7bfSTyler Nowicki     Base::visitBitCastInst(BC);
254f4e2d7bfSTyler Nowicki     handleAlias(BC);
255f4e2d7bfSTyler Nowicki   }
256f4e2d7bfSTyler Nowicki 
257f4e2d7bfSTyler Nowicki   void visitAddrSpaceCastInst(AddrSpaceCastInst &ASC) {
258f4e2d7bfSTyler Nowicki     Base::visitAddrSpaceCastInst(ASC);
259f4e2d7bfSTyler Nowicki     handleAlias(ASC);
260f4e2d7bfSTyler Nowicki   }
261f4e2d7bfSTyler Nowicki 
262f4e2d7bfSTyler Nowicki   void visitGetElementPtrInst(GetElementPtrInst &GEPI) {
263f4e2d7bfSTyler Nowicki     // The base visitor will adjust Offset accordingly.
264f4e2d7bfSTyler Nowicki     Base::visitGetElementPtrInst(GEPI);
265f4e2d7bfSTyler Nowicki     handleAlias(GEPI);
266f4e2d7bfSTyler Nowicki   }
267f4e2d7bfSTyler Nowicki 
268f4e2d7bfSTyler Nowicki   void visitIntrinsicInst(IntrinsicInst &II) {
269f4e2d7bfSTyler Nowicki     // When we found the lifetime markers refers to a
270f4e2d7bfSTyler Nowicki     // subrange of the original alloca, ignore the lifetime
271f4e2d7bfSTyler Nowicki     // markers to avoid misleading the analysis.
272f4e2d7bfSTyler Nowicki     if (!IsOffsetKnown || !Offset.isZero())
273f4e2d7bfSTyler Nowicki       return Base::visitIntrinsicInst(II);
274f4e2d7bfSTyler Nowicki     switch (II.getIntrinsicID()) {
275f4e2d7bfSTyler Nowicki     default:
276f4e2d7bfSTyler Nowicki       return Base::visitIntrinsicInst(II);
277f4e2d7bfSTyler Nowicki     case Intrinsic::lifetime_start:
278f4e2d7bfSTyler Nowicki       LifetimeStarts.insert(&II);
279f4e2d7bfSTyler Nowicki       LifetimeStartBBs.push_back(II.getParent());
280f4e2d7bfSTyler Nowicki       break;
281f4e2d7bfSTyler Nowicki     case Intrinsic::lifetime_end:
282f4e2d7bfSTyler Nowicki       LifetimeEndBBs.insert(II.getParent());
283f4e2d7bfSTyler Nowicki       break;
284f4e2d7bfSTyler Nowicki     }
285f4e2d7bfSTyler Nowicki   }
286f4e2d7bfSTyler Nowicki 
287f4e2d7bfSTyler Nowicki   void visitCallBase(CallBase &CB) {
288f4e2d7bfSTyler Nowicki     for (unsigned Op = 0, OpCount = CB.arg_size(); Op < OpCount; ++Op)
289f4e2d7bfSTyler Nowicki       if (U->get() == CB.getArgOperand(Op) && !CB.doesNotCapture(Op))
290f4e2d7bfSTyler Nowicki         PI.setEscaped(&CB);
291f4e2d7bfSTyler Nowicki     handleMayWrite(CB);
292f4e2d7bfSTyler Nowicki   }
293f4e2d7bfSTyler Nowicki 
294f4e2d7bfSTyler Nowicki   bool getShouldLiveOnFrame() const {
295f4e2d7bfSTyler Nowicki     if (!ShouldLiveOnFrame)
296f4e2d7bfSTyler Nowicki       ShouldLiveOnFrame = computeShouldLiveOnFrame();
297f4e2d7bfSTyler Nowicki     return *ShouldLiveOnFrame;
298f4e2d7bfSTyler Nowicki   }
299f4e2d7bfSTyler Nowicki 
300f4e2d7bfSTyler Nowicki   bool getMayWriteBeforeCoroBegin() const { return MayWriteBeforeCoroBegin; }
301f4e2d7bfSTyler Nowicki 
302f4e2d7bfSTyler Nowicki   DenseMap<Instruction *, std::optional<APInt>> getAliasesCopy() const {
303f4e2d7bfSTyler Nowicki     assert(getShouldLiveOnFrame() && "This method should only be called if the "
304f4e2d7bfSTyler Nowicki                                      "alloca needs to live on the frame.");
305f4e2d7bfSTyler Nowicki     for (const auto &P : AliasOffetMap)
306f4e2d7bfSTyler Nowicki       if (!P.second)
307f4e2d7bfSTyler Nowicki         report_fatal_error("Unable to handle an alias with unknown offset "
308f4e2d7bfSTyler Nowicki                            "created before CoroBegin.");
309f4e2d7bfSTyler Nowicki     return AliasOffetMap;
310f4e2d7bfSTyler Nowicki   }
311f4e2d7bfSTyler Nowicki 
312f4e2d7bfSTyler Nowicki private:
313f4e2d7bfSTyler Nowicki   const DominatorTree &DT;
314f4e2d7bfSTyler Nowicki   const coro::Shape &CoroShape;
315f4e2d7bfSTyler Nowicki   const SuspendCrossingInfo &Checker;
316f4e2d7bfSTyler Nowicki   // All alias to the original AllocaInst, created before CoroBegin and used
317f4e2d7bfSTyler Nowicki   // after CoroBegin. Each entry contains the instruction and the offset in the
318f4e2d7bfSTyler Nowicki   // original Alloca. They need to be recreated after CoroBegin off the frame.
319f4e2d7bfSTyler Nowicki   DenseMap<Instruction *, std::optional<APInt>> AliasOffetMap{};
320f4e2d7bfSTyler Nowicki   SmallPtrSet<Instruction *, 4> Users{};
321f4e2d7bfSTyler Nowicki   SmallPtrSet<IntrinsicInst *, 2> LifetimeStarts{};
322f4e2d7bfSTyler Nowicki   SmallVector<BasicBlock *> LifetimeStartBBs{};
323f4e2d7bfSTyler Nowicki   SmallPtrSet<BasicBlock *, 2> LifetimeEndBBs{};
324f4e2d7bfSTyler Nowicki   SmallPtrSet<const BasicBlock *, 2> CoroSuspendBBs{};
325f4e2d7bfSTyler Nowicki   bool MayWriteBeforeCoroBegin{false};
326f4e2d7bfSTyler Nowicki   bool ShouldUseLifetimeStartInfo{true};
327f4e2d7bfSTyler Nowicki 
328f4e2d7bfSTyler Nowicki   mutable std::optional<bool> ShouldLiveOnFrame{};
329f4e2d7bfSTyler Nowicki 
330f4e2d7bfSTyler Nowicki   bool computeShouldLiveOnFrame() const {
331f4e2d7bfSTyler Nowicki     // If lifetime information is available, we check it first since it's
332f4e2d7bfSTyler Nowicki     // more precise. We look at every pair of lifetime.start intrinsic and
333f4e2d7bfSTyler Nowicki     // every basic block that uses the pointer to see if they cross suspension
334f4e2d7bfSTyler Nowicki     // points. The uses cover both direct uses as well as indirect uses.
335f4e2d7bfSTyler Nowicki     if (ShouldUseLifetimeStartInfo && !LifetimeStarts.empty()) {
336f4e2d7bfSTyler Nowicki       // If there is no explicit lifetime.end, then assume the address can
337f4e2d7bfSTyler Nowicki       // cross suspension points.
338f4e2d7bfSTyler Nowicki       if (LifetimeEndBBs.empty())
339f4e2d7bfSTyler Nowicki         return true;
340f4e2d7bfSTyler Nowicki 
341f4e2d7bfSTyler Nowicki       // If there is a path from a lifetime.start to a suspend without a
342f4e2d7bfSTyler Nowicki       // corresponding lifetime.end, then the alloca's lifetime persists
343f4e2d7bfSTyler Nowicki       // beyond that suspension point and the alloca must go on the frame.
344f4e2d7bfSTyler Nowicki       llvm::SmallVector<BasicBlock *> Worklist(LifetimeStartBBs);
345f4e2d7bfSTyler Nowicki       if (isManyPotentiallyReachableFromMany(Worklist, CoroSuspendBBs,
346f4e2d7bfSTyler Nowicki                                              &LifetimeEndBBs, &DT))
347f4e2d7bfSTyler Nowicki         return true;
348f4e2d7bfSTyler Nowicki 
349f4e2d7bfSTyler Nowicki       // Addresses are guaranteed to be identical after every lifetime.start so
350f4e2d7bfSTyler Nowicki       // we cannot use the local stack if the address escaped and there is a
351f4e2d7bfSTyler Nowicki       // suspend point between lifetime markers. This should also cover the
352f4e2d7bfSTyler Nowicki       // case of a single lifetime.start intrinsic in a loop with suspend point.
353f4e2d7bfSTyler Nowicki       if (PI.isEscaped()) {
354f4e2d7bfSTyler Nowicki         for (auto *A : LifetimeStarts) {
355f4e2d7bfSTyler Nowicki           for (auto *B : LifetimeStarts) {
356f4e2d7bfSTyler Nowicki             if (Checker.hasPathOrLoopCrossingSuspendPoint(A->getParent(),
357f4e2d7bfSTyler Nowicki                                                           B->getParent()))
358f4e2d7bfSTyler Nowicki               return true;
359f4e2d7bfSTyler Nowicki           }
360f4e2d7bfSTyler Nowicki         }
361f4e2d7bfSTyler Nowicki       }
362f4e2d7bfSTyler Nowicki       return false;
363f4e2d7bfSTyler Nowicki     }
364f4e2d7bfSTyler Nowicki     // FIXME: Ideally the isEscaped check should come at the beginning.
365f4e2d7bfSTyler Nowicki     // However there are a few loose ends that need to be fixed first before
366f4e2d7bfSTyler Nowicki     // we can do that. We need to make sure we are not over-conservative, so
367f4e2d7bfSTyler Nowicki     // that the data accessed in-between await_suspend and symmetric transfer
368f4e2d7bfSTyler Nowicki     // is always put on the stack, and also data accessed after coro.end is
369f4e2d7bfSTyler Nowicki     // always put on the stack (esp the return object). To fix that, we need
370f4e2d7bfSTyler Nowicki     // to:
371f4e2d7bfSTyler Nowicki     //  1) Potentially treat sret as nocapture in calls
372f4e2d7bfSTyler Nowicki     //  2) Special handle the return object and put it on the stack
373f4e2d7bfSTyler Nowicki     //  3) Utilize lifetime.end intrinsic
374f4e2d7bfSTyler Nowicki     if (PI.isEscaped())
375f4e2d7bfSTyler Nowicki       return true;
376f4e2d7bfSTyler Nowicki 
377f4e2d7bfSTyler Nowicki     for (auto *U1 : Users)
378f4e2d7bfSTyler Nowicki       for (auto *U2 : Users)
379f4e2d7bfSTyler Nowicki         if (Checker.isDefinitionAcrossSuspend(*U1, U2))
380f4e2d7bfSTyler Nowicki           return true;
381f4e2d7bfSTyler Nowicki 
382f4e2d7bfSTyler Nowicki     return false;
383f4e2d7bfSTyler Nowicki   }
384f4e2d7bfSTyler Nowicki 
385f4e2d7bfSTyler Nowicki   void handleMayWrite(const Instruction &I) {
386f4e2d7bfSTyler Nowicki     if (!DT.dominates(CoroShape.CoroBegin, &I))
387f4e2d7bfSTyler Nowicki       MayWriteBeforeCoroBegin = true;
388f4e2d7bfSTyler Nowicki   }
389f4e2d7bfSTyler Nowicki 
390f4e2d7bfSTyler Nowicki   bool usedAfterCoroBegin(Instruction &I) {
391f4e2d7bfSTyler Nowicki     for (auto &U : I.uses())
392f4e2d7bfSTyler Nowicki       if (DT.dominates(CoroShape.CoroBegin, U))
393f4e2d7bfSTyler Nowicki         return true;
394f4e2d7bfSTyler Nowicki     return false;
395f4e2d7bfSTyler Nowicki   }
396f4e2d7bfSTyler Nowicki 
397f4e2d7bfSTyler Nowicki   void handleAlias(Instruction &I) {
398f4e2d7bfSTyler Nowicki     // We track all aliases created prior to CoroBegin but used after.
399f4e2d7bfSTyler Nowicki     // These aliases may need to be recreated after CoroBegin if the alloca
400f4e2d7bfSTyler Nowicki     // need to live on the frame.
401f4e2d7bfSTyler Nowicki     if (DT.dominates(CoroShape.CoroBegin, &I) || !usedAfterCoroBegin(I))
402f4e2d7bfSTyler Nowicki       return;
403f4e2d7bfSTyler Nowicki 
404f4e2d7bfSTyler Nowicki     if (!IsOffsetKnown) {
405f4e2d7bfSTyler Nowicki       AliasOffetMap[&I].reset();
406f4e2d7bfSTyler Nowicki     } else {
407ecccc6a3SKazu Hirata       auto [Itr, Inserted] = AliasOffetMap.try_emplace(&I, Offset);
408ecccc6a3SKazu Hirata       if (!Inserted && Itr->second && *Itr->second != Offset) {
409f4e2d7bfSTyler Nowicki         // If we have seen two different possible values for this alias, we set
410f4e2d7bfSTyler Nowicki         // it to empty.
411ecccc6a3SKazu Hirata         Itr->second.reset();
412f4e2d7bfSTyler Nowicki       }
413f4e2d7bfSTyler Nowicki     }
414f4e2d7bfSTyler Nowicki   }
415f4e2d7bfSTyler Nowicki };
416f4e2d7bfSTyler Nowicki } // namespace
417f4e2d7bfSTyler Nowicki 
418f4e2d7bfSTyler Nowicki static void collectFrameAlloca(AllocaInst *AI, const coro::Shape &Shape,
419f4e2d7bfSTyler Nowicki                                const SuspendCrossingInfo &Checker,
420f4e2d7bfSTyler Nowicki                                SmallVectorImpl<AllocaInfo> &Allocas,
421f4e2d7bfSTyler Nowicki                                const DominatorTree &DT) {
422f4e2d7bfSTyler Nowicki   if (Shape.CoroSuspends.empty())
423f4e2d7bfSTyler Nowicki     return;
424f4e2d7bfSTyler Nowicki 
425f4e2d7bfSTyler Nowicki   // The PromiseAlloca will be specially handled since it needs to be in a
426f4e2d7bfSTyler Nowicki   // fixed position in the frame.
427f4e2d7bfSTyler Nowicki   if (AI == Shape.SwitchLowering.PromiseAlloca)
428f4e2d7bfSTyler Nowicki     return;
429f4e2d7bfSTyler Nowicki 
430f4e2d7bfSTyler Nowicki   // The __coro_gro alloca should outlive the promise, make sure we
431f4e2d7bfSTyler Nowicki   // keep it outside the frame.
432f4e2d7bfSTyler Nowicki   if (AI->hasMetadata(LLVMContext::MD_coro_outside_frame))
433f4e2d7bfSTyler Nowicki     return;
434f4e2d7bfSTyler Nowicki 
435f4e2d7bfSTyler Nowicki   // The code that uses lifetime.start intrinsic does not work for functions
436f4e2d7bfSTyler Nowicki   // with loops without exit. Disable it on ABIs we know to generate such
437f4e2d7bfSTyler Nowicki   // code.
438f4e2d7bfSTyler Nowicki   bool ShouldUseLifetimeStartInfo =
439f4e2d7bfSTyler Nowicki       (Shape.ABI != coro::ABI::Async && Shape.ABI != coro::ABI::Retcon &&
440f4e2d7bfSTyler Nowicki        Shape.ABI != coro::ABI::RetconOnce);
441f4e2d7bfSTyler Nowicki   AllocaUseVisitor Visitor{AI->getDataLayout(), DT, Shape, Checker,
442f4e2d7bfSTyler Nowicki                            ShouldUseLifetimeStartInfo};
443f4e2d7bfSTyler Nowicki   Visitor.visitPtr(*AI);
444f4e2d7bfSTyler Nowicki   if (!Visitor.getShouldLiveOnFrame())
445f4e2d7bfSTyler Nowicki     return;
446f4e2d7bfSTyler Nowicki   Allocas.emplace_back(AI, Visitor.getAliasesCopy(),
447f4e2d7bfSTyler Nowicki                        Visitor.getMayWriteBeforeCoroBegin());
448f4e2d7bfSTyler Nowicki }
449f4e2d7bfSTyler Nowicki 
450f4e2d7bfSTyler Nowicki } // namespace
451f4e2d7bfSTyler Nowicki 
452f4e2d7bfSTyler Nowicki void collectSpillsFromArgs(SpillInfo &Spills, Function &F,
453f4e2d7bfSTyler Nowicki                            const SuspendCrossingInfo &Checker) {
454f4e2d7bfSTyler Nowicki   // Collect the spills for arguments and other not-materializable values.
455f4e2d7bfSTyler Nowicki   for (Argument &A : F.args())
456f4e2d7bfSTyler Nowicki     for (User *U : A.users())
457f4e2d7bfSTyler Nowicki       if (Checker.isDefinitionAcrossSuspend(A, U))
458f4e2d7bfSTyler Nowicki         Spills[&A].push_back(cast<Instruction>(U));
459f4e2d7bfSTyler Nowicki }
460f4e2d7bfSTyler Nowicki 
461f4e2d7bfSTyler Nowicki void collectSpillsAndAllocasFromInsts(
462f4e2d7bfSTyler Nowicki     SpillInfo &Spills, SmallVector<AllocaInfo, 8> &Allocas,
463f4e2d7bfSTyler Nowicki     SmallVector<Instruction *, 4> &DeadInstructions,
464f4e2d7bfSTyler Nowicki     SmallVector<CoroAllocaAllocInst *, 4> &LocalAllocas, Function &F,
465f4e2d7bfSTyler Nowicki     const SuspendCrossingInfo &Checker, const DominatorTree &DT,
466f4e2d7bfSTyler Nowicki     const coro::Shape &Shape) {
467f4e2d7bfSTyler Nowicki 
468f4e2d7bfSTyler Nowicki   for (Instruction &I : instructions(F)) {
469f4e2d7bfSTyler Nowicki     // Values returned from coroutine structure intrinsics should not be part
470f4e2d7bfSTyler Nowicki     // of the Coroutine Frame.
471f4e2d7bfSTyler Nowicki     if (isCoroutineStructureIntrinsic(I) || &I == Shape.CoroBegin)
472f4e2d7bfSTyler Nowicki       continue;
473f4e2d7bfSTyler Nowicki 
474f4e2d7bfSTyler Nowicki     // Handle alloca.alloc specially here.
475f4e2d7bfSTyler Nowicki     if (auto AI = dyn_cast<CoroAllocaAllocInst>(&I)) {
476f4e2d7bfSTyler Nowicki       // Check whether the alloca's lifetime is bounded by suspend points.
477f4e2d7bfSTyler Nowicki       if (isLocalAlloca(AI)) {
478f4e2d7bfSTyler Nowicki         LocalAllocas.push_back(AI);
479f4e2d7bfSTyler Nowicki         continue;
480f4e2d7bfSTyler Nowicki       }
481f4e2d7bfSTyler Nowicki 
482f4e2d7bfSTyler Nowicki       // If not, do a quick rewrite of the alloca and then add spills of
483f4e2d7bfSTyler Nowicki       // the rewritten value. The rewrite doesn't invalidate anything in
484f4e2d7bfSTyler Nowicki       // Spills because the other alloca intrinsics have no other operands
485f4e2d7bfSTyler Nowicki       // besides AI, and it doesn't invalidate the iteration because we delay
486f4e2d7bfSTyler Nowicki       // erasing AI.
487f4e2d7bfSTyler Nowicki       auto Alloc = lowerNonLocalAlloca(AI, Shape, DeadInstructions);
488f4e2d7bfSTyler Nowicki 
489f4e2d7bfSTyler Nowicki       for (User *U : Alloc->users()) {
490f4e2d7bfSTyler Nowicki         if (Checker.isDefinitionAcrossSuspend(*Alloc, U))
491f4e2d7bfSTyler Nowicki           Spills[Alloc].push_back(cast<Instruction>(U));
492f4e2d7bfSTyler Nowicki       }
493f4e2d7bfSTyler Nowicki       continue;
494f4e2d7bfSTyler Nowicki     }
495f4e2d7bfSTyler Nowicki 
496f4e2d7bfSTyler Nowicki     // Ignore alloca.get; we process this as part of coro.alloca.alloc.
497f4e2d7bfSTyler Nowicki     if (isa<CoroAllocaGetInst>(I))
498f4e2d7bfSTyler Nowicki       continue;
499f4e2d7bfSTyler Nowicki 
500f4e2d7bfSTyler Nowicki     if (auto *AI = dyn_cast<AllocaInst>(&I)) {
501f4e2d7bfSTyler Nowicki       collectFrameAlloca(AI, Shape, Checker, Allocas, DT);
502f4e2d7bfSTyler Nowicki       continue;
503f4e2d7bfSTyler Nowicki     }
504f4e2d7bfSTyler Nowicki 
505f4e2d7bfSTyler Nowicki     for (User *U : I.users())
506f4e2d7bfSTyler Nowicki       if (Checker.isDefinitionAcrossSuspend(I, U)) {
507f4e2d7bfSTyler Nowicki         // We cannot spill a token.
508f4e2d7bfSTyler Nowicki         if (I.getType()->isTokenTy())
509f4e2d7bfSTyler Nowicki           report_fatal_error(
510f4e2d7bfSTyler Nowicki               "token definition is separated from the use by a suspend point");
511f4e2d7bfSTyler Nowicki         Spills[&I].push_back(cast<Instruction>(U));
512f4e2d7bfSTyler Nowicki       }
513f4e2d7bfSTyler Nowicki   }
514f4e2d7bfSTyler Nowicki }
515f4e2d7bfSTyler Nowicki 
516f4e2d7bfSTyler Nowicki void collectSpillsFromDbgInfo(SpillInfo &Spills, Function &F,
517f4e2d7bfSTyler Nowicki                               const SuspendCrossingInfo &Checker) {
518f4e2d7bfSTyler Nowicki   // We don't want the layout of coroutine frame to be affected
519f4e2d7bfSTyler Nowicki   // by debug information. So we only choose to salvage DbgValueInst for
520f4e2d7bfSTyler Nowicki   // whose value is already in the frame.
521f4e2d7bfSTyler Nowicki   // We would handle the dbg.values for allocas specially
522f4e2d7bfSTyler Nowicki   for (auto &Iter : Spills) {
523f4e2d7bfSTyler Nowicki     auto *V = Iter.first;
524f4e2d7bfSTyler Nowicki     SmallVector<DbgValueInst *, 16> DVIs;
525f4e2d7bfSTyler Nowicki     SmallVector<DbgVariableRecord *, 16> DVRs;
526f4e2d7bfSTyler Nowicki     findDbgValues(DVIs, V, &DVRs);
527f4e2d7bfSTyler Nowicki     for (DbgValueInst *DVI : DVIs)
528f4e2d7bfSTyler Nowicki       if (Checker.isDefinitionAcrossSuspend(*V, DVI))
529f4e2d7bfSTyler Nowicki         Spills[V].push_back(DVI);
530f4e2d7bfSTyler Nowicki     // Add the instructions which carry debug info that is in the frame.
531f4e2d7bfSTyler Nowicki     for (DbgVariableRecord *DVR : DVRs)
532f4e2d7bfSTyler Nowicki       if (Checker.isDefinitionAcrossSuspend(*V, DVR->Marker->MarkedInstr))
533f4e2d7bfSTyler Nowicki         Spills[V].push_back(DVR->Marker->MarkedInstr);
534f4e2d7bfSTyler Nowicki   }
535f4e2d7bfSTyler Nowicki }
536f4e2d7bfSTyler Nowicki 
537f4e2d7bfSTyler Nowicki /// Async and Retcon{Once} conventions assume that all spill uses can be sunk
538f4e2d7bfSTyler Nowicki /// after the coro.begin intrinsic.
539f4e2d7bfSTyler Nowicki void sinkSpillUsesAfterCoroBegin(const DominatorTree &Dom,
540f4e2d7bfSTyler Nowicki                                  CoroBeginInst *CoroBegin,
541f4e2d7bfSTyler Nowicki                                  coro::SpillInfo &Spills,
542f4e2d7bfSTyler Nowicki                                  SmallVectorImpl<coro::AllocaInfo> &Allocas) {
543f4e2d7bfSTyler Nowicki   SmallSetVector<Instruction *, 32> ToMove;
544f4e2d7bfSTyler Nowicki   SmallVector<Instruction *, 32> Worklist;
545f4e2d7bfSTyler Nowicki 
546f4e2d7bfSTyler Nowicki   // Collect all users that precede coro.begin.
547f4e2d7bfSTyler Nowicki   auto collectUsers = [&](Value *Def) {
548f4e2d7bfSTyler Nowicki     for (User *U : Def->users()) {
549f4e2d7bfSTyler Nowicki       auto Inst = cast<Instruction>(U);
550f4e2d7bfSTyler Nowicki       if (Inst->getParent() != CoroBegin->getParent() ||
551f4e2d7bfSTyler Nowicki           Dom.dominates(CoroBegin, Inst))
552f4e2d7bfSTyler Nowicki         continue;
553f4e2d7bfSTyler Nowicki       if (ToMove.insert(Inst))
554f4e2d7bfSTyler Nowicki         Worklist.push_back(Inst);
555f4e2d7bfSTyler Nowicki     }
556f4e2d7bfSTyler Nowicki   };
557f4e2d7bfSTyler Nowicki   std::for_each(Spills.begin(), Spills.end(),
558f4e2d7bfSTyler Nowicki                 [&](auto &I) { collectUsers(I.first); });
559f4e2d7bfSTyler Nowicki   std::for_each(Allocas.begin(), Allocas.end(),
560f4e2d7bfSTyler Nowicki                 [&](auto &I) { collectUsers(I.Alloca); });
561f4e2d7bfSTyler Nowicki 
562f4e2d7bfSTyler Nowicki   // Recursively collect users before coro.begin.
563f4e2d7bfSTyler Nowicki   while (!Worklist.empty()) {
564f4e2d7bfSTyler Nowicki     auto *Def = Worklist.pop_back_val();
565f4e2d7bfSTyler Nowicki     for (User *U : Def->users()) {
566f4e2d7bfSTyler Nowicki       auto Inst = cast<Instruction>(U);
567f4e2d7bfSTyler Nowicki       if (Dom.dominates(CoroBegin, Inst))
568f4e2d7bfSTyler Nowicki         continue;
569f4e2d7bfSTyler Nowicki       if (ToMove.insert(Inst))
570f4e2d7bfSTyler Nowicki         Worklist.push_back(Inst);
571f4e2d7bfSTyler Nowicki     }
572f4e2d7bfSTyler Nowicki   }
573f4e2d7bfSTyler Nowicki 
574f4e2d7bfSTyler Nowicki   // Sort by dominance.
575f4e2d7bfSTyler Nowicki   SmallVector<Instruction *, 64> InsertionList(ToMove.begin(), ToMove.end());
576f4e2d7bfSTyler Nowicki   llvm::sort(InsertionList, [&Dom](Instruction *A, Instruction *B) -> bool {
577f4e2d7bfSTyler Nowicki     // If a dominates b it should precede (<) b.
578f4e2d7bfSTyler Nowicki     return Dom.dominates(A, B);
579f4e2d7bfSTyler Nowicki   });
580f4e2d7bfSTyler Nowicki 
581f4e2d7bfSTyler Nowicki   Instruction *InsertPt = CoroBegin->getNextNode();
582f4e2d7bfSTyler Nowicki   for (Instruction *Inst : InsertionList)
5838e702735SJeremy Morse     Inst->moveBefore(InsertPt->getIterator());
584f4e2d7bfSTyler Nowicki }
585f4e2d7bfSTyler Nowicki 
586f4e2d7bfSTyler Nowicki BasicBlock::iterator getSpillInsertionPt(const coro::Shape &Shape, Value *Def,
587f4e2d7bfSTyler Nowicki                                          const DominatorTree &DT) {
588f4e2d7bfSTyler Nowicki   BasicBlock::iterator InsertPt;
589f4e2d7bfSTyler Nowicki   if (auto *Arg = dyn_cast<Argument>(Def)) {
590f4e2d7bfSTyler Nowicki     // For arguments, we will place the store instruction right after
591f4e2d7bfSTyler Nowicki     // the coroutine frame pointer instruction, i.e. coro.begin.
592f4e2d7bfSTyler Nowicki     InsertPt = Shape.getInsertPtAfterFramePtr();
593f4e2d7bfSTyler Nowicki 
594*29441e4fSNikita Popov     // If we're spilling an Argument, make sure we clear 'captures'
595f4e2d7bfSTyler Nowicki     // from the coroutine function.
596*29441e4fSNikita Popov     Arg->getParent()->removeParamAttr(Arg->getArgNo(), Attribute::Captures);
597f4e2d7bfSTyler Nowicki   } else if (auto *CSI = dyn_cast<AnyCoroSuspendInst>(Def)) {
598f4e2d7bfSTyler Nowicki     // Don't spill immediately after a suspend; splitting assumes
599f4e2d7bfSTyler Nowicki     // that the suspend will be followed by a branch.
600f4e2d7bfSTyler Nowicki     InsertPt = CSI->getParent()->getSingleSuccessor()->getFirstNonPHIIt();
601f4e2d7bfSTyler Nowicki   } else {
602f4e2d7bfSTyler Nowicki     auto *I = cast<Instruction>(Def);
603f4e2d7bfSTyler Nowicki     if (!DT.dominates(Shape.CoroBegin, I)) {
604f4e2d7bfSTyler Nowicki       // If it is not dominated by CoroBegin, then spill should be
605f4e2d7bfSTyler Nowicki       // inserted immediately after CoroFrame is computed.
606f4e2d7bfSTyler Nowicki       InsertPt = Shape.getInsertPtAfterFramePtr();
607f4e2d7bfSTyler Nowicki     } else if (auto *II = dyn_cast<InvokeInst>(I)) {
608f4e2d7bfSTyler Nowicki       // If we are spilling the result of the invoke instruction, split
609f4e2d7bfSTyler Nowicki       // the normal edge and insert the spill in the new block.
610f4e2d7bfSTyler Nowicki       auto *NewBB = SplitEdge(II->getParent(), II->getNormalDest());
611f4e2d7bfSTyler Nowicki       InsertPt = NewBB->getTerminator()->getIterator();
612f4e2d7bfSTyler Nowicki     } else if (isa<PHINode>(I)) {
613f4e2d7bfSTyler Nowicki       // Skip the PHINodes and EH pads instructions.
614f4e2d7bfSTyler Nowicki       BasicBlock *DefBlock = I->getParent();
615f4e2d7bfSTyler Nowicki       if (auto *CSI = dyn_cast<CatchSwitchInst>(DefBlock->getTerminator()))
616f4e2d7bfSTyler Nowicki         InsertPt = splitBeforeCatchSwitch(CSI)->getIterator();
617f4e2d7bfSTyler Nowicki       else
618f4e2d7bfSTyler Nowicki         InsertPt = DefBlock->getFirstInsertionPt();
619f4e2d7bfSTyler Nowicki     } else {
620f4e2d7bfSTyler Nowicki       assert(!I->isTerminator() && "unexpected terminator");
621f4e2d7bfSTyler Nowicki       // For all other values, the spill is placed immediately after
622f4e2d7bfSTyler Nowicki       // the definition.
623f4e2d7bfSTyler Nowicki       InsertPt = I->getNextNode()->getIterator();
624f4e2d7bfSTyler Nowicki     }
625f4e2d7bfSTyler Nowicki   }
626f4e2d7bfSTyler Nowicki 
627f4e2d7bfSTyler Nowicki   return InsertPt;
628f4e2d7bfSTyler Nowicki }
629f4e2d7bfSTyler Nowicki 
630f4e2d7bfSTyler Nowicki } // End namespace coro.
631f4e2d7bfSTyler Nowicki 
632f4e2d7bfSTyler Nowicki } // End namespace llvm.
633