xref: /netbsd-src/external/apache2/llvm/dist/llvm/lib/Transforms/Coroutines/CoroElide.cpp (revision 82d56013d7b633d116a93943de88e08335357a7c)
1 //===- CoroElide.cpp - Coroutine Frame Allocation Elision Pass ------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 
9 #include "llvm/Transforms/Coroutines/CoroElide.h"
10 #include "CoroInternal.h"
11 #include "llvm/ADT/DenseMap.h"
12 #include "llvm/Analysis/AliasAnalysis.h"
13 #include "llvm/Analysis/InstructionSimplify.h"
14 #include "llvm/IR/Dominators.h"
15 #include "llvm/IR/InstIterator.h"
16 #include "llvm/InitializePasses.h"
17 #include "llvm/Pass.h"
18 #include "llvm/Support/ErrorHandling.h"
19 
20 using namespace llvm;
21 
22 #define DEBUG_TYPE "coro-elide"
23 
24 namespace {
25 // Created on demand if the coro-elide pass has work to do.
26 struct Lowerer : coro::LowererBase {
27   SmallVector<CoroIdInst *, 4> CoroIds;
28   SmallVector<CoroBeginInst *, 1> CoroBegins;
29   SmallVector<CoroAllocInst *, 1> CoroAllocs;
30   SmallVector<CoroSubFnInst *, 4> ResumeAddr;
31   DenseMap<CoroBeginInst *, SmallVector<CoroSubFnInst *, 4>> DestroyAddr;
32   SmallVector<CoroFreeInst *, 1> CoroFrees;
33   SmallPtrSet<const SwitchInst *, 4> CoroSuspendSwitches;
34 
Lowerer__anonf208cae90111::Lowerer35   Lowerer(Module &M) : LowererBase(M) {}
36 
37   void elideHeapAllocations(Function *F, uint64_t FrameSize, Align FrameAlign,
38                             AAResults &AA);
39   bool shouldElide(Function *F, DominatorTree &DT) const;
40   void collectPostSplitCoroIds(Function *F);
41   bool processCoroId(CoroIdInst *, AAResults &AA, DominatorTree &DT);
42   bool hasEscapePath(const CoroBeginInst *,
43                      const SmallPtrSetImpl<BasicBlock *> &) const;
44 };
45 } // end anonymous namespace
46 
47 // Go through the list of coro.subfn.addr intrinsics and replace them with the
48 // provided constant.
replaceWithConstant(Constant * Value,SmallVectorImpl<CoroSubFnInst * > & Users)49 static void replaceWithConstant(Constant *Value,
50                                 SmallVectorImpl<CoroSubFnInst *> &Users) {
51   if (Users.empty())
52     return;
53 
54   // See if we need to bitcast the constant to match the type of the intrinsic
55   // being replaced. Note: All coro.subfn.addr intrinsics return the same type,
56   // so we only need to examine the type of the first one in the list.
57   Type *IntrTy = Users.front()->getType();
58   Type *ValueTy = Value->getType();
59   if (ValueTy != IntrTy) {
60     // May need to tweak the function type to match the type expected at the
61     // use site.
62     assert(ValueTy->isPointerTy() && IntrTy->isPointerTy());
63     Value = ConstantExpr::getBitCast(Value, IntrTy);
64   }
65 
66   // Now the value type matches the type of the intrinsic. Replace them all!
67   for (CoroSubFnInst *I : Users)
68     replaceAndRecursivelySimplify(I, Value);
69 }
70 
71 // See if any operand of the call instruction references the coroutine frame.
operandReferences(CallInst * CI,AllocaInst * Frame,AAResults & AA)72 static bool operandReferences(CallInst *CI, AllocaInst *Frame, AAResults &AA) {
73   for (Value *Op : CI->operand_values())
74     if (!AA.isNoAlias(Op, Frame))
75       return true;
76   return false;
77 }
78 
79 // Look for any tail calls referencing the coroutine frame and remove tail
80 // attribute from them, since now coroutine frame resides on the stack and tail
81 // call implies that the function does not references anything on the stack.
82 // However if it's a musttail call, we cannot remove the tailcall attribute.
83 // It's safe to keep it there as the musttail call is for symmetric transfer,
84 // and by that point the frame should have been destroyed and hence not
85 // interfering with operands.
removeTailCallAttribute(AllocaInst * Frame,AAResults & AA)86 static void removeTailCallAttribute(AllocaInst *Frame, AAResults &AA) {
87   Function &F = *Frame->getFunction();
88   for (Instruction &I : instructions(F))
89     if (auto *Call = dyn_cast<CallInst>(&I))
90       if (Call->isTailCall() && operandReferences(Call, Frame, AA) &&
91           !Call->isMustTailCall())
92         Call->setTailCall(false);
93 }
94 
95 // Given a resume function @f.resume(%f.frame* %frame), returns the size
96 // and expected alignment of %f.frame type.
getFrameLayout(Function * Resume)97 static std::pair<uint64_t, Align> getFrameLayout(Function *Resume) {
98   // Prefer to pull information from the function attributes.
99   auto Size = Resume->getParamDereferenceableBytes(0);
100   auto Align = Resume->getParamAlign(0);
101 
102   // If those aren't given, extract them from the type.
103   if (Size == 0 || !Align) {
104     auto *FrameTy = Resume->arg_begin()->getType()->getPointerElementType();
105 
106     const DataLayout &DL = Resume->getParent()->getDataLayout();
107     if (!Size) Size = DL.getTypeAllocSize(FrameTy);
108     if (!Align) Align = DL.getABITypeAlign(FrameTy);
109   }
110 
111   return std::make_pair(Size, *Align);
112 }
113 
114 // Finds first non alloca instruction in the entry block of a function.
getFirstNonAllocaInTheEntryBlock(Function * F)115 static Instruction *getFirstNonAllocaInTheEntryBlock(Function *F) {
116   for (Instruction &I : F->getEntryBlock())
117     if (!isa<AllocaInst>(&I))
118       return &I;
119   llvm_unreachable("no terminator in the entry block");
120 }
121 
122 // To elide heap allocations we need to suppress code blocks guarded by
123 // llvm.coro.alloc and llvm.coro.free instructions.
elideHeapAllocations(Function * F,uint64_t FrameSize,Align FrameAlign,AAResults & AA)124 void Lowerer::elideHeapAllocations(Function *F, uint64_t FrameSize,
125                                    Align FrameAlign, AAResults &AA) {
126   LLVMContext &C = F->getContext();
127   auto *InsertPt =
128       getFirstNonAllocaInTheEntryBlock(CoroIds.front()->getFunction());
129 
130   // Replacing llvm.coro.alloc with false will suppress dynamic
131   // allocation as it is expected for the frontend to generate the code that
132   // looks like:
133   //   id = coro.id(...)
134   //   mem = coro.alloc(id) ? malloc(coro.size()) : 0;
135   //   coro.begin(id, mem)
136   auto *False = ConstantInt::getFalse(C);
137   for (auto *CA : CoroAllocs) {
138     CA->replaceAllUsesWith(False);
139     CA->eraseFromParent();
140   }
141 
142   // FIXME: Design how to transmit alignment information for every alloca that
143   // is spilled into the coroutine frame and recreate the alignment information
144   // here. Possibly we will need to do a mini SROA here and break the coroutine
145   // frame into individual AllocaInst recreating the original alignment.
146   const DataLayout &DL = F->getParent()->getDataLayout();
147   auto FrameTy = ArrayType::get(Type::getInt8Ty(C), FrameSize);
148   auto *Frame = new AllocaInst(FrameTy, DL.getAllocaAddrSpace(), "", InsertPt);
149   Frame->setAlignment(FrameAlign);
150   auto *FrameVoidPtr =
151       new BitCastInst(Frame, Type::getInt8PtrTy(C), "vFrame", InsertPt);
152 
153   for (auto *CB : CoroBegins) {
154     CB->replaceAllUsesWith(FrameVoidPtr);
155     CB->eraseFromParent();
156   }
157 
158   // Since now coroutine frame lives on the stack we need to make sure that
159   // any tail call referencing it, must be made non-tail call.
160   removeTailCallAttribute(Frame, AA);
161 }
162 
hasEscapePath(const CoroBeginInst * CB,const SmallPtrSetImpl<BasicBlock * > & TIs) const163 bool Lowerer::hasEscapePath(const CoroBeginInst *CB,
164                             const SmallPtrSetImpl<BasicBlock *> &TIs) const {
165   const auto &It = DestroyAddr.find(CB);
166   assert(It != DestroyAddr.end());
167 
168   // Limit the number of blocks we visit.
169   unsigned Limit = 32 * (1 + It->second.size());
170 
171   SmallVector<const BasicBlock *, 32> Worklist;
172   Worklist.push_back(CB->getParent());
173 
174   SmallPtrSet<const BasicBlock *, 32> Visited;
175   // Consider basicblock of coro.destroy as visited one, so that we
176   // skip the path pass through coro.destroy.
177   for (auto *DA : It->second)
178     Visited.insert(DA->getParent());
179 
180   do {
181     const auto *BB = Worklist.pop_back_val();
182     if (!Visited.insert(BB).second)
183       continue;
184     if (TIs.count(BB))
185       return true;
186 
187     // Conservatively say that there is potentially a path.
188     if (!--Limit)
189       return true;
190 
191     auto TI = BB->getTerminator();
192     // Although the default dest of coro.suspend switches is suspend pointer
193     // which means a escape path to normal terminator, it is reasonable to skip
194     // it since coroutine frame doesn't change outside the coroutine body.
195     if (isa<SwitchInst>(TI) &&
196         CoroSuspendSwitches.count(cast<SwitchInst>(TI))) {
197       Worklist.push_back(cast<SwitchInst>(TI)->getSuccessor(1));
198       Worklist.push_back(cast<SwitchInst>(TI)->getSuccessor(2));
199     } else
200       Worklist.append(succ_begin(BB), succ_end(BB));
201 
202   } while (!Worklist.empty());
203 
204   // We have exhausted all possible paths and are certain that coro.begin can
205   // not reach to any of terminators.
206   return false;
207 }
208 
shouldElide(Function * F,DominatorTree & DT) const209 bool Lowerer::shouldElide(Function *F, DominatorTree &DT) const {
210   // If no CoroAllocs, we cannot suppress allocation, so elision is not
211   // possible.
212   if (CoroAllocs.empty())
213     return false;
214 
215   // Check that for every coro.begin there is at least one coro.destroy directly
216   // referencing the SSA value of that coro.begin along each
217   // non-exceptional path.
218   // If the value escaped, then coro.destroy would have been referencing a
219   // memory location storing that value and not the virtual register.
220 
221   SmallPtrSet<BasicBlock *, 8> Terminators;
222   // First gather all of the non-exceptional terminators for the function.
223   // Consider the final coro.suspend as the real terminator when the current
224   // function is a coroutine.
225     for (BasicBlock &B : *F) {
226       auto *TI = B.getTerminator();
227       if (TI->getNumSuccessors() == 0 && !TI->isExceptionalTerminator() &&
228           !isa<UnreachableInst>(TI))
229         Terminators.insert(&B);
230     }
231 
232   // Filter out the coro.destroy that lie along exceptional paths.
233   SmallPtrSet<CoroBeginInst *, 8> ReferencedCoroBegins;
234   for (auto &It : DestroyAddr) {
235     // If there is any coro.destroy dominates all of the terminators for the
236     // coro.begin, we could know the corresponding coro.begin wouldn't escape.
237     for (Instruction *DA : It.second) {
238       if (llvm::all_of(Terminators, [&](auto *TI) {
239             return DT.dominates(DA, TI->getTerminator());
240           })) {
241         ReferencedCoroBegins.insert(It.first);
242         break;
243       }
244     }
245 
246     // Whether there is any paths from coro.begin to Terminators which not pass
247     // through any of the coro.destroys.
248     //
249     // hasEscapePath is relatively slow, so we avoid to run it as much as
250     // possible.
251     if (!ReferencedCoroBegins.count(It.first) &&
252         !hasEscapePath(It.first, Terminators))
253       ReferencedCoroBegins.insert(It.first);
254   }
255 
256   // If size of the set is the same as total number of coro.begin, that means we
257   // found a coro.free or coro.destroy referencing each coro.begin, so we can
258   // perform heap elision.
259   return ReferencedCoroBegins.size() == CoroBegins.size();
260 }
261 
collectPostSplitCoroIds(Function * F)262 void Lowerer::collectPostSplitCoroIds(Function *F) {
263   CoroIds.clear();
264   CoroSuspendSwitches.clear();
265   for (auto &I : instructions(F)) {
266     if (auto *CII = dyn_cast<CoroIdInst>(&I))
267       if (CII->getInfo().isPostSplit())
268         // If it is the coroutine itself, don't touch it.
269         if (CII->getCoroutine() != CII->getFunction())
270           CoroIds.push_back(CII);
271 
272     // Consider case like:
273     // %0 = call i8 @llvm.coro.suspend(...)
274     // switch i8 %0, label %suspend [i8 0, label %resume
275     //                              i8 1, label %cleanup]
276     // and collect the SwitchInsts which are used by escape analysis later.
277     if (auto *CSI = dyn_cast<CoroSuspendInst>(&I))
278       if (CSI->hasOneUse() && isa<SwitchInst>(CSI->use_begin()->getUser())) {
279         SwitchInst *SWI = cast<SwitchInst>(CSI->use_begin()->getUser());
280         if (SWI->getNumCases() == 2)
281           CoroSuspendSwitches.insert(SWI);
282       }
283   }
284 }
285 
processCoroId(CoroIdInst * CoroId,AAResults & AA,DominatorTree & DT)286 bool Lowerer::processCoroId(CoroIdInst *CoroId, AAResults &AA,
287                             DominatorTree &DT) {
288   CoroBegins.clear();
289   CoroAllocs.clear();
290   CoroFrees.clear();
291   ResumeAddr.clear();
292   DestroyAddr.clear();
293 
294   // Collect all coro.begin and coro.allocs associated with this coro.id.
295   for (User *U : CoroId->users()) {
296     if (auto *CB = dyn_cast<CoroBeginInst>(U))
297       CoroBegins.push_back(CB);
298     else if (auto *CA = dyn_cast<CoroAllocInst>(U))
299       CoroAllocs.push_back(CA);
300     else if (auto *CF = dyn_cast<CoroFreeInst>(U))
301       CoroFrees.push_back(CF);
302   }
303 
304   // Collect all coro.subfn.addrs associated with coro.begin.
305   // Note, we only devirtualize the calls if their coro.subfn.addr refers to
306   // coro.begin directly. If we run into cases where this check is too
307   // conservative, we can consider relaxing the check.
308   for (CoroBeginInst *CB : CoroBegins) {
309     for (User *U : CB->users())
310       if (auto *II = dyn_cast<CoroSubFnInst>(U))
311         switch (II->getIndex()) {
312         case CoroSubFnInst::ResumeIndex:
313           ResumeAddr.push_back(II);
314           break;
315         case CoroSubFnInst::DestroyIndex:
316           DestroyAddr[CB].push_back(II);
317           break;
318         default:
319           llvm_unreachable("unexpected coro.subfn.addr constant");
320         }
321   }
322 
323   // PostSplit coro.id refers to an array of subfunctions in its Info
324   // argument.
325   ConstantArray *Resumers = CoroId->getInfo().Resumers;
326   assert(Resumers && "PostSplit coro.id Info argument must refer to an array"
327                      "of coroutine subfunctions");
328   auto *ResumeAddrConstant =
329       ConstantExpr::getExtractValue(Resumers, CoroSubFnInst::ResumeIndex);
330 
331   replaceWithConstant(ResumeAddrConstant, ResumeAddr);
332 
333   bool ShouldElide = shouldElide(CoroId->getFunction(), DT);
334 
335   auto *DestroyAddrConstant = ConstantExpr::getExtractValue(
336       Resumers,
337       ShouldElide ? CoroSubFnInst::CleanupIndex : CoroSubFnInst::DestroyIndex);
338 
339   for (auto &It : DestroyAddr)
340     replaceWithConstant(DestroyAddrConstant, It.second);
341 
342   if (ShouldElide) {
343     auto FrameSizeAndAlign = getFrameLayout(cast<Function>(ResumeAddrConstant));
344     elideHeapAllocations(CoroId->getFunction(), FrameSizeAndAlign.first,
345                          FrameSizeAndAlign.second, AA);
346     coro::replaceCoroFree(CoroId, /*Elide=*/true);
347   }
348 
349   return true;
350 }
351 
352 // See if there are any coro.subfn.addr instructions referring to coro.devirt
353 // trigger, if so, replace them with a direct call to devirt trigger function.
replaceDevirtTrigger(Function & F)354 static bool replaceDevirtTrigger(Function &F) {
355   SmallVector<CoroSubFnInst *, 1> DevirtAddr;
356   for (auto &I : instructions(F))
357     if (auto *SubFn = dyn_cast<CoroSubFnInst>(&I))
358       if (SubFn->getIndex() == CoroSubFnInst::RestartTrigger)
359         DevirtAddr.push_back(SubFn);
360 
361   if (DevirtAddr.empty())
362     return false;
363 
364   Module &M = *F.getParent();
365   Function *DevirtFn = M.getFunction(CORO_DEVIRT_TRIGGER_FN);
366   assert(DevirtFn && "coro.devirt.fn not found");
367   replaceWithConstant(DevirtFn, DevirtAddr);
368 
369   return true;
370 }
371 
declaresCoroElideIntrinsics(Module & M)372 static bool declaresCoroElideIntrinsics(Module &M) {
373   return coro::declaresIntrinsics(M, {"llvm.coro.id", "llvm.coro.id.async"});
374 }
375 
run(Function & F,FunctionAnalysisManager & AM)376 PreservedAnalyses CoroElidePass::run(Function &F, FunctionAnalysisManager &AM) {
377   auto &M = *F.getParent();
378   if (!declaresCoroElideIntrinsics(M))
379     return PreservedAnalyses::all();
380 
381   Lowerer L(M);
382   L.CoroIds.clear();
383   L.collectPostSplitCoroIds(&F);
384   // If we did not find any coro.id, there is nothing to do.
385   if (L.CoroIds.empty())
386     return PreservedAnalyses::all();
387 
388   AAResults &AA = AM.getResult<AAManager>(F);
389   DominatorTree &DT = AM.getResult<DominatorTreeAnalysis>(F);
390 
391   bool Changed = false;
392   for (auto *CII : L.CoroIds)
393     Changed |= L.processCoroId(CII, AA, DT);
394 
395   return Changed ? PreservedAnalyses::none() : PreservedAnalyses::all();
396 }
397 
398 namespace {
399 struct CoroElideLegacy : FunctionPass {
400   static char ID;
CoroElideLegacy__anonf208cae90311::CoroElideLegacy401   CoroElideLegacy() : FunctionPass(ID) {
402     initializeCoroElideLegacyPass(*PassRegistry::getPassRegistry());
403   }
404 
405   std::unique_ptr<Lowerer> L;
406 
doInitialization__anonf208cae90311::CoroElideLegacy407   bool doInitialization(Module &M) override {
408     if (declaresCoroElideIntrinsics(M))
409       L = std::make_unique<Lowerer>(M);
410     return false;
411   }
412 
runOnFunction__anonf208cae90311::CoroElideLegacy413   bool runOnFunction(Function &F) override {
414     if (!L)
415       return false;
416 
417     bool Changed = false;
418 
419     if (F.hasFnAttribute(CORO_PRESPLIT_ATTR))
420       Changed = replaceDevirtTrigger(F);
421 
422     L->CoroIds.clear();
423     L->collectPostSplitCoroIds(&F);
424     // If we did not find any coro.id, there is nothing to do.
425     if (L->CoroIds.empty())
426       return Changed;
427 
428     AAResults &AA = getAnalysis<AAResultsWrapperPass>().getAAResults();
429     DominatorTree &DT = getAnalysis<DominatorTreeWrapperPass>().getDomTree();
430 
431     for (auto *CII : L->CoroIds)
432       Changed |= L->processCoroId(CII, AA, DT);
433 
434     return Changed;
435   }
getAnalysisUsage__anonf208cae90311::CoroElideLegacy436   void getAnalysisUsage(AnalysisUsage &AU) const override {
437     AU.addRequired<AAResultsWrapperPass>();
438     AU.addRequired<DominatorTreeWrapperPass>();
439   }
getPassName__anonf208cae90311::CoroElideLegacy440   StringRef getPassName() const override { return "Coroutine Elision"; }
441 };
442 }
443 
444 char CoroElideLegacy::ID = 0;
445 INITIALIZE_PASS_BEGIN(
446     CoroElideLegacy, "coro-elide",
447     "Coroutine frame allocation elision and indirect calls replacement", false,
448     false)
INITIALIZE_PASS_DEPENDENCY(AAResultsWrapperPass)449 INITIALIZE_PASS_DEPENDENCY(AAResultsWrapperPass)
450 INITIALIZE_PASS_END(
451     CoroElideLegacy, "coro-elide",
452     "Coroutine frame allocation elision and indirect calls replacement", false,
453     false)
454 
455 Pass *llvm::createCoroElideLegacyPass() { return new CoroElideLegacy(); }
456