xref: /llvm-project/llvm/lib/Transforms/IPO/ModuleInliner.cpp (revision 513717ddd0a318275352dc47a8984833455841c7)
1 //===- ModuleInliner.cpp - Code related to module inliner -----------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file implements the mechanics required to implement inlining without
10 // missing any calls in the module level. It doesn't need any infromation about
11 // SCC or call graph, which is different from the SCC inliner.  The decisions of
12 // which calls are profitable to inline are implemented elsewhere.
13 //
14 //===----------------------------------------------------------------------===//
15 
16 #include "llvm/Transforms/IPO/ModuleInliner.h"
17 #include "llvm/ADT/ScopeExit.h"
18 #include "llvm/ADT/SetVector.h"
19 #include "llvm/ADT/SmallVector.h"
20 #include "llvm/ADT/Statistic.h"
21 #include "llvm/Analysis/AliasAnalysis.h"
22 #include "llvm/Analysis/AssumptionCache.h"
23 #include "llvm/Analysis/BlockFrequencyInfo.h"
24 #include "llvm/Analysis/InlineAdvisor.h"
25 #include "llvm/Analysis/InlineCost.h"
26 #include "llvm/Analysis/InlineOrder.h"
27 #include "llvm/Analysis/OptimizationRemarkEmitter.h"
28 #include "llvm/Analysis/ProfileSummaryInfo.h"
29 #include "llvm/Analysis/ReplayInlineAdvisor.h"
30 #include "llvm/Analysis/TargetLibraryInfo.h"
31 #include "llvm/IR/DiagnosticInfo.h"
32 #include "llvm/IR/Function.h"
33 #include "llvm/IR/InstIterator.h"
34 #include "llvm/IR/Instruction.h"
35 #include "llvm/IR/IntrinsicInst.h"
36 #include "llvm/IR/Module.h"
37 #include "llvm/IR/PassManager.h"
38 #include "llvm/Support/CommandLine.h"
39 #include "llvm/Support/Debug.h"
40 #include "llvm/Support/raw_ostream.h"
41 #include "llvm/Transforms/Utils/CallPromotionUtils.h"
42 #include "llvm/Transforms/Utils/Cloning.h"
43 #include <cassert>
44 
45 using namespace llvm;
46 
47 #define DEBUG_TYPE "module-inline"
48 
49 STATISTIC(NumInlined, "Number of functions inlined");
50 STATISTIC(NumDeleted, "Number of functions deleted because all callers found");
51 
52 static cl::opt<InlinePriorityMode> UseInlinePriority(
53     "inline-priority-mode", cl::init(InlinePriorityMode::Size), cl::Hidden,
54     cl::desc("Choose the priority mode to use in module inline"),
55     cl::values(clEnumValN(InlinePriorityMode::NoPriority, "no priority",
56                           "Use no priority, visit callsites in bottom-up."),
57                clEnumValN(InlinePriorityMode::Size, "size",
58                           "Use callee size priority."),
59                clEnumValN(InlinePriorityMode::Cost, "cost",
60                           "Use inline cost priority.")));
61 
62 /// Return true if the specified inline history ID
63 /// indicates an inline history that includes the specified function.
64 static bool inlineHistoryIncludes(
65     Function *F, int InlineHistoryID,
66     const SmallVectorImpl<std::pair<Function *, int>> &InlineHistory) {
67   while (InlineHistoryID != -1) {
68     assert(unsigned(InlineHistoryID) < InlineHistory.size() &&
69            "Invalid inline history ID");
70     if (InlineHistory[InlineHistoryID].first == F)
71       return true;
72     InlineHistoryID = InlineHistory[InlineHistoryID].second;
73   }
74   return false;
75 }
76 
77 InlineAdvisor &ModuleInlinerPass::getAdvisor(const ModuleAnalysisManager &MAM,
78                                              FunctionAnalysisManager &FAM,
79                                              Module &M) {
80   if (OwnedAdvisor)
81     return *OwnedAdvisor;
82 
83   auto *IAA = MAM.getCachedResult<InlineAdvisorAnalysis>(M);
84   if (!IAA) {
85     // It should still be possible to run the inliner as a stand-alone module
86     // pass, for test scenarios. In that case, we default to the
87     // DefaultInlineAdvisor, which doesn't need to keep state between module
88     // pass runs. It also uses just the default InlineParams. In this case, we
89     // need to use the provided FAM, which is valid for the duration of the
90     // inliner pass, and thus the lifetime of the owned advisor. The one we
91     // would get from the MAM can be invalidated as a result of the inliner's
92     // activity.
93     OwnedAdvisor = std::make_unique<DefaultInlineAdvisor>(
94         M, FAM, Params,
95         InlineContext{LTOPhase, InlinePass::ModuleInliner});
96 
97     return *OwnedAdvisor;
98   }
99   assert(IAA->getAdvisor() &&
100          "Expected a present InlineAdvisorAnalysis also have an "
101          "InlineAdvisor initialized");
102   return *IAA->getAdvisor();
103 }
104 
105 static bool isKnownLibFunction(Function &F, TargetLibraryInfo &TLI) {
106   LibFunc LF;
107 
108   // Either this is a normal library function or a "vectorizable"
109   // function.  Not using the VFDatabase here because this query
110   // is related only to libraries handled via the TLI.
111   return TLI.getLibFunc(F, LF) ||
112          TLI.isKnownVectorFunctionInLibrary(F.getName());
113 }
114 
115 PreservedAnalyses ModuleInlinerPass::run(Module &M,
116                                          ModuleAnalysisManager &MAM) {
117   LLVM_DEBUG(dbgs() << "---- Module Inliner is Running ---- \n");
118 
119   auto &IAA = MAM.getResult<InlineAdvisorAnalysis>(M);
120   if (!IAA.tryCreate(
121           Params, Mode, {},
122           InlineContext{LTOPhase, InlinePass::ModuleInliner})) {
123     M.getContext().emitError(
124         "Could not setup Inlining Advisor for the requested "
125         "mode and/or options");
126     return PreservedAnalyses::all();
127   }
128 
129   bool Changed = false;
130 
131   ProfileSummaryInfo *PSI = MAM.getCachedResult<ProfileSummaryAnalysis>(M);
132 
133   FunctionAnalysisManager &FAM =
134       MAM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager();
135 
136   auto GetTLI = [&FAM](Function &F) -> TargetLibraryInfo & {
137     return FAM.getResult<TargetLibraryAnalysis>(F);
138   };
139 
140   InlineAdvisor &Advisor = getAdvisor(MAM, FAM, M);
141   Advisor.onPassEntry();
142 
143   auto AdvisorOnExit = make_scope_exit([&] { Advisor.onPassExit(); });
144 
145   // In the module inliner, a priority-based worklist is used for calls across
146   // the entire Module. With this module inliner, the inline order is not
147   // limited to bottom-up order. More globally scope inline order is enabled.
148   // Also, the inline deferral logic become unnecessary in this module inliner.
149   // It is possible to use other priority heuristics, e.g. profile-based
150   // heuristic.
151   //
152   // TODO: Here is a huge amount duplicate code between the module inliner and
153   // the SCC inliner, which need some refactoring.
154   auto Calls = getInlineOrder(UseInlinePriority, FAM, Params);
155   assert(Calls != nullptr && "Expected an initialized InlineOrder");
156 
157   // Populate the initial list of calls in this module.
158   for (Function &F : M) {
159     auto &ORE = FAM.getResult<OptimizationRemarkEmitterAnalysis>(F);
160     // We want to generally process call sites top-down in order for
161     // simplifications stemming from replacing the call with the returned value
162     // after inlining to be visible to subsequent inlining decisions.
163     // FIXME: Using instructions sequence is a really bad way to do this.
164     // Instead we should do an actual RPO walk of the function body.
165     for (Instruction &I : instructions(F))
166       if (auto *CB = dyn_cast<CallBase>(&I))
167         if (Function *Callee = CB->getCalledFunction()) {
168           if (!Callee->isDeclaration())
169             Calls->push({CB, -1});
170           else if (!isa<IntrinsicInst>(I)) {
171             using namespace ore;
172             setInlineRemark(*CB, "unavailable definition");
173             ORE.emit([&]() {
174               return OptimizationRemarkMissed(DEBUG_TYPE, "NoDefinition", &I)
175                      << NV("Callee", Callee) << " will not be inlined into "
176                      << NV("Caller", CB->getCaller())
177                      << " because its definition is unavailable"
178                      << setIsVerbose();
179             });
180           }
181         }
182   }
183   if (Calls->empty())
184     return PreservedAnalyses::all();
185 
186   // When inlining a callee produces new call sites, we want to keep track of
187   // the fact that they were inlined from the callee.  This allows us to avoid
188   // infinite inlining in some obscure cases.  To represent this, we use an
189   // index into the InlineHistory vector.
190   SmallVector<std::pair<Function *, int>, 16> InlineHistory;
191 
192   // Track the dead functions to delete once finished with inlining calls. We
193   // defer deleting these to make it easier to handle the call graph updates.
194   SmallVector<Function *, 4> DeadFunctions;
195 
196   // Loop forward over all of the calls.
197   while (!Calls->empty()) {
198     // We expect the calls to typically be batched with sequences of calls that
199     // have the same caller, so we first set up some shared infrastructure for
200     // this caller. We also do any pruning we can at this layer on the caller
201     // alone.
202     Function &F = *Calls->front().first->getCaller();
203 
204     LLVM_DEBUG(dbgs() << "Inlining calls in: " << F.getName() << "\n"
205                       << "    Function size: " << F.getInstructionCount()
206                       << "\n");
207 
208     auto GetAssumptionCache = [&](Function &F) -> AssumptionCache & {
209       return FAM.getResult<AssumptionAnalysis>(F);
210     };
211 
212     // Now process as many calls as we have within this caller in the sequence.
213     // We bail out as soon as the caller has to change so we can
214     // prepare the context of that new caller.
215     bool DidInline = false;
216     auto P = Calls->pop();
217     CallBase *CB = P.first;
218     const int InlineHistoryID = P.second;
219     Function &Callee = *CB->getCalledFunction();
220 
221     if (InlineHistoryID != -1 &&
222         inlineHistoryIncludes(&Callee, InlineHistoryID, InlineHistory)) {
223       setInlineRemark(*CB, "recursive");
224       continue;
225     }
226 
227     auto Advice = Advisor.getAdvice(*CB, /*OnlyMandatory*/ false);
228     // Check whether we want to inline this callsite.
229     if (!Advice->isInliningRecommended()) {
230       Advice->recordUnattemptedInlining();
231       continue;
232     }
233 
234     // Setup the data structure used to plumb customization into the
235     // `InlineFunction` routine.
236     InlineFunctionInfo IFI(
237         /*cg=*/nullptr, GetAssumptionCache, PSI,
238         &FAM.getResult<BlockFrequencyAnalysis>(*(CB->getCaller())),
239         &FAM.getResult<BlockFrequencyAnalysis>(Callee));
240 
241     InlineResult IR =
242         InlineFunction(*CB, IFI, &FAM.getResult<AAManager>(*CB->getCaller()));
243     if (!IR.isSuccess()) {
244       Advice->recordUnsuccessfulInlining(IR);
245       continue;
246     }
247 
248     DidInline = true;
249     ++NumInlined;
250 
251     LLVM_DEBUG(dbgs() << "    Size after inlining: " << F.getInstructionCount()
252                       << "\n");
253 
254     // Add any new callsites to defined functions to the worklist.
255     if (!IFI.InlinedCallSites.empty()) {
256       int NewHistoryID = InlineHistory.size();
257       InlineHistory.push_back({&Callee, InlineHistoryID});
258 
259       for (CallBase *ICB : reverse(IFI.InlinedCallSites)) {
260         Function *NewCallee = ICB->getCalledFunction();
261         if (!NewCallee) {
262           // Try to promote an indirect (virtual) call without waiting for
263           // the post-inline cleanup and the next DevirtSCCRepeatedPass
264           // iteration because the next iteration may not happen and we may
265           // miss inlining it.
266           if (tryPromoteCall(*ICB))
267             NewCallee = ICB->getCalledFunction();
268         }
269         if (NewCallee)
270           if (!NewCallee->isDeclaration())
271             Calls->push({ICB, NewHistoryID});
272       }
273     }
274 
275     // Merge the attributes based on the inlining.
276     AttributeFuncs::mergeAttributesForInlining(F, Callee);
277 
278     // For local functions, check whether this makes the callee trivially
279     // dead. In that case, we can drop the body of the function eagerly
280     // which may reduce the number of callers of other functions to one,
281     // changing inline cost thresholds.
282     bool CalleeWasDeleted = false;
283     if (Callee.hasLocalLinkage()) {
284       // To check this we also need to nuke any dead constant uses (perhaps
285       // made dead by this operation on other functions).
286       Callee.removeDeadConstantUsers();
287       // if (Callee.use_empty() && !CG.isLibFunction(Callee)) {
288       if (Callee.use_empty() && !isKnownLibFunction(Callee, GetTLI(Callee))) {
289         Calls->erase_if([&](const std::pair<CallBase *, int> &Call) {
290           return Call.first->getCaller() == &Callee;
291         });
292         // Clear the body and queue the function itself for deletion when we
293         // finish inlining.
294         // Note that after this point, it is an error to do anything other
295         // than use the callee's address or delete it.
296         Callee.dropAllReferences();
297         assert(!is_contained(DeadFunctions, &Callee) &&
298                "Cannot put cause a function to become dead twice!");
299         DeadFunctions.push_back(&Callee);
300         CalleeWasDeleted = true;
301       }
302     }
303     if (CalleeWasDeleted)
304       Advice->recordInliningWithCalleeDeleted();
305     else
306       Advice->recordInlining();
307 
308     if (!DidInline)
309       continue;
310     Changed = true;
311   }
312 
313   // Now that we've finished inlining all of the calls across this module,
314   // delete all of the trivially dead functions.
315   //
316   // Note that this walks a pointer set which has non-deterministic order but
317   // that is OK as all we do is delete things and add pointers to unordered
318   // sets.
319   for (Function *DeadF : DeadFunctions) {
320     // Clear out any cached analyses.
321     FAM.clear(*DeadF, DeadF->getName());
322 
323     // And delete the actual function from the module.
324     M.getFunctionList().erase(DeadF);
325 
326     ++NumDeleted;
327   }
328 
329   if (!Changed)
330     return PreservedAnalyses::all();
331 
332   return PreservedAnalyses::none();
333 }
334