xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/ipa-inline-analysis.c (revision 82d56013d7b633d116a93943de88e08335357a7c)
1 /* Analysis used by inlining decision heuristics.
2    Copyright (C) 2003-2019 Free Software Foundation, Inc.
3    Contributed by Jan Hubicka
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "print-tree.h"
35 #include "tree-inline.h"
36 #include "gimple-pretty-print.h"
37 #include "params.h"
38 #include "cfganal.h"
39 #include "gimple-iterator.h"
40 #include "tree-cfg.h"
41 #include "tree-ssa-loop-niter.h"
42 #include "tree-ssa-loop.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "ipa-fnsummary.h"
46 #include "ipa-inline.h"
47 #include "cfgloop.h"
48 #include "tree-scalar-evolution.h"
49 #include "ipa-utils.h"
50 #include "cfgexpand.h"
51 #include "gimplify.h"
52 
53 /* Cached node/edge growths.  */
54 call_summary<edge_growth_cache_entry *> *edge_growth_cache = NULL;
55 
56 /* Give initial reasons why inlining would fail on EDGE.  This gets either
57    nullified or usually overwritten by more precise reasons later.  */
58 
59 void
60 initialize_inline_failed (struct cgraph_edge *e)
61 {
62   struct cgraph_node *callee = e->callee;
63 
64   if (e->inline_failed && e->inline_failed != CIF_BODY_NOT_AVAILABLE
65       && cgraph_inline_failed_type (e->inline_failed) == CIF_FINAL_ERROR)
66     ;
67   else if (e->indirect_unknown_callee)
68     e->inline_failed = CIF_INDIRECT_UNKNOWN_CALL;
69   else if (!callee->definition)
70     e->inline_failed = CIF_BODY_NOT_AVAILABLE;
71   else if (callee->local.redefined_extern_inline)
72     e->inline_failed = CIF_REDEFINED_EXTERN_INLINE;
73   else
74     e->inline_failed = CIF_FUNCTION_NOT_CONSIDERED;
75   gcc_checking_assert (!e->call_stmt_cannot_inline_p
76 		       || cgraph_inline_failed_type (e->inline_failed)
77 			    == CIF_FINAL_ERROR);
78 }
79 
80 
81 /* Free growth caches.  */
82 
83 void
84 free_growth_caches (void)
85 {
86   delete edge_growth_cache;
87   edge_growth_cache = NULL;
88 }
89 
90 /* Return hints derrived from EDGE.   */
91 
92 int
93 simple_edge_hints (struct cgraph_edge *edge)
94 {
95   int hints = 0;
96   struct cgraph_node *to = (edge->caller->global.inlined_to
97 			    ? edge->caller->global.inlined_to : edge->caller);
98   struct cgraph_node *callee = edge->callee->ultimate_alias_target ();
99   int to_scc_no = ipa_fn_summaries->get (to)->scc_no;
100   int callee_scc_no = ipa_fn_summaries->get (callee)->scc_no;
101 
102   if (to_scc_no && to_scc_no  == callee_scc_no && !edge->recursive_p ())
103     hints |= INLINE_HINT_same_scc;
104 
105   if (callee->lto_file_data && edge->caller->lto_file_data
106       && edge->caller->lto_file_data != callee->lto_file_data
107       && !callee->merged_comdat && !callee->icf_merged)
108     hints |= INLINE_HINT_cross_module;
109 
110   return hints;
111 }
112 
113 /* Estimate the time cost for the caller when inlining EDGE.
114    Only to be called via estimate_edge_time, that handles the
115    caching mechanism.
116 
117    When caching, also update the cache entry.  Compute both time and
118    size, since we always need both metrics eventually.  */
119 
120 sreal
121 do_estimate_edge_time (struct cgraph_edge *edge)
122 {
123   sreal time, nonspec_time;
124   int size;
125   ipa_hints hints;
126   struct cgraph_node *callee;
127   clause_t clause, nonspec_clause;
128   vec<tree> known_vals;
129   vec<ipa_polymorphic_call_context> known_contexts;
130   vec<ipa_agg_jump_function_p> known_aggs;
131   struct ipa_call_summary *es = ipa_call_summaries->get (edge);
132   int min_size;
133 
134   callee = edge->callee->ultimate_alias_target ();
135 
136   gcc_checking_assert (edge->inline_failed);
137   evaluate_properties_for_edge (edge, true,
138 				&clause, &nonspec_clause, &known_vals,
139 				&known_contexts, &known_aggs);
140   estimate_node_size_and_time (callee, clause, nonspec_clause, known_vals,
141 			       known_contexts, known_aggs, &size, &min_size,
142 			       &time, &nonspec_time, &hints, es->param);
143 
144   /* When we have profile feedback, we can quite safely identify hot
145      edges and for those we disable size limits.  Don't do that when
146      probability that caller will call the callee is low however, since it
147      may hurt optimization of the caller's hot path.  */
148   if (edge->count.ipa ().initialized_p () && edge->maybe_hot_p ()
149       && (edge->count.ipa ().apply_scale (2, 1)
150           > (edge->caller->global.inlined_to
151 	     ? edge->caller->global.inlined_to->count.ipa ()
152 	     : edge->caller->count.ipa ())))
153     hints |= INLINE_HINT_known_hot;
154 
155   known_vals.release ();
156   known_contexts.release ();
157   known_aggs.release ();
158   gcc_checking_assert (size >= 0);
159   gcc_checking_assert (time >= 0);
160 
161   /* When caching, update the cache entry.  */
162   if (edge_growth_cache != NULL)
163     {
164       ipa_fn_summaries->get_create (edge->callee)->min_size = min_size;
165       edge_growth_cache_entry *entry
166 	= edge_growth_cache->get_create (edge);
167       entry->time = time;
168       entry->nonspec_time = nonspec_time;
169 
170       entry->size = size + (size >= 0);
171       hints |= simple_edge_hints (edge);
172       entry->hints = hints + 1;
173     }
174   return time;
175 }
176 
177 
178 /* Return estimated callee growth after inlining EDGE.
179    Only to be called via estimate_edge_size.  */
180 
181 int
182 do_estimate_edge_size (struct cgraph_edge *edge)
183 {
184   int size;
185   struct cgraph_node *callee;
186   clause_t clause, nonspec_clause;
187   vec<tree> known_vals;
188   vec<ipa_polymorphic_call_context> known_contexts;
189   vec<ipa_agg_jump_function_p> known_aggs;
190 
191   /* When we do caching, use do_estimate_edge_time to populate the entry.  */
192 
193   if (edge_growth_cache != NULL)
194     {
195       do_estimate_edge_time (edge);
196       size = edge_growth_cache->get (edge)->size;
197       gcc_checking_assert (size);
198       return size - (size > 0);
199     }
200 
201   callee = edge->callee->ultimate_alias_target ();
202 
203   /* Early inliner runs without caching, go ahead and do the dirty work.  */
204   gcc_checking_assert (edge->inline_failed);
205   evaluate_properties_for_edge (edge, true,
206 				&clause, &nonspec_clause,
207 				&known_vals, &known_contexts,
208 				&known_aggs);
209   estimate_node_size_and_time (callee, clause, nonspec_clause, known_vals,
210 			       known_contexts, known_aggs, &size, NULL, NULL,
211 			       NULL, NULL, vNULL);
212   known_vals.release ();
213   known_contexts.release ();
214   known_aggs.release ();
215   return size;
216 }
217 
218 
219 /* Estimate the growth of the caller when inlining EDGE.
220    Only to be called via estimate_edge_size.  */
221 
222 ipa_hints
223 do_estimate_edge_hints (struct cgraph_edge *edge)
224 {
225   ipa_hints hints;
226   struct cgraph_node *callee;
227   clause_t clause, nonspec_clause;
228   vec<tree> known_vals;
229   vec<ipa_polymorphic_call_context> known_contexts;
230   vec<ipa_agg_jump_function_p> known_aggs;
231 
232   /* When we do caching, use do_estimate_edge_time to populate the entry.  */
233 
234   if (edge_growth_cache != NULL)
235     {
236       do_estimate_edge_time (edge);
237       hints = edge_growth_cache->get (edge)->hints;
238       gcc_checking_assert (hints);
239       return hints - 1;
240     }
241 
242   callee = edge->callee->ultimate_alias_target ();
243 
244   /* Early inliner runs without caching, go ahead and do the dirty work.  */
245   gcc_checking_assert (edge->inline_failed);
246   evaluate_properties_for_edge (edge, true,
247 				&clause, &nonspec_clause,
248 				&known_vals, &known_contexts,
249 				&known_aggs);
250   estimate_node_size_and_time (callee, clause, nonspec_clause, known_vals,
251 			       known_contexts, known_aggs, NULL, NULL,
252 			       NULL, NULL, &hints, vNULL);
253   known_vals.release ();
254   known_contexts.release ();
255   known_aggs.release ();
256   hints |= simple_edge_hints (edge);
257   return hints;
258 }
259 
260 /* Estimate the size of NODE after inlining EDGE which should be an
261    edge to either NODE or a call inlined into NODE.  */
262 
263 int
264 estimate_size_after_inlining (struct cgraph_node *node,
265 			      struct cgraph_edge *edge)
266 {
267   struct ipa_call_summary *es = ipa_call_summaries->get (edge);
268   ipa_fn_summary *s = ipa_fn_summaries->get (node);
269   if (!es->predicate || *es->predicate != false)
270     {
271       int size = s->size + estimate_edge_growth (edge);
272       gcc_assert (size >= 0);
273       return size;
274     }
275   return s->size;
276 }
277 
278 
279 struct growth_data
280 {
281   struct cgraph_node *node;
282   bool self_recursive;
283   bool uninlinable;
284   int growth;
285 };
286 
287 
288 /* Worker for do_estimate_growth.  Collect growth for all callers.  */
289 
290 static bool
291 do_estimate_growth_1 (struct cgraph_node *node, void *data)
292 {
293   struct cgraph_edge *e;
294   struct growth_data *d = (struct growth_data *) data;
295 
296   for (e = node->callers; e; e = e->next_caller)
297     {
298       gcc_checking_assert (e->inline_failed);
299 
300       if (cgraph_inline_failed_type (e->inline_failed) == CIF_FINAL_ERROR
301 	  || !opt_for_fn (e->caller->decl, optimize))
302 	{
303 	  d->uninlinable = true;
304           continue;
305 	}
306 
307       if (e->recursive_p ())
308 	{
309 	  d->self_recursive = true;
310 	  continue;
311 	}
312       d->growth += estimate_edge_growth (e);
313     }
314   return false;
315 }
316 
317 
318 /* Estimate the growth caused by inlining NODE into all callees.  */
319 
320 int
321 estimate_growth (struct cgraph_node *node)
322 {
323   struct growth_data d = { node, false, false, 0 };
324   struct ipa_fn_summary *info = ipa_fn_summaries->get (node);
325 
326   node->call_for_symbol_and_aliases (do_estimate_growth_1, &d, true);
327 
328   /* For self recursive functions the growth estimation really should be
329      infinity.  We don't want to return very large values because the growth
330      plays various roles in badness computation fractions.  Be sure to not
331      return zero or negative growths. */
332   if (d.self_recursive)
333     d.growth = d.growth < info->size ? info->size : d.growth;
334   else if (DECL_EXTERNAL (node->decl) || d.uninlinable)
335     ;
336   else
337     {
338       if (node->will_be_removed_from_program_if_no_direct_calls_p ())
339 	d.growth -= info->size;
340       /* COMDAT functions are very often not shared across multiple units
341          since they come from various template instantiations.
342          Take this into account.  */
343       else if (DECL_COMDAT (node->decl)
344 	       && node->can_remove_if_no_direct_calls_p ())
345 	d.growth -= (info->size
346 		     * (100 - PARAM_VALUE (PARAM_COMDAT_SHARING_PROBABILITY))
347 		     + 50) / 100;
348     }
349 
350   return d.growth;
351 }
352 
353 /* Verify if there are fewer than MAX_CALLERS.  */
354 
355 static bool
356 check_callers (cgraph_node *node, int *max_callers)
357 {
358   ipa_ref *ref;
359 
360   if (!node->can_remove_if_no_direct_calls_and_refs_p ())
361     return true;
362 
363   for (cgraph_edge *e = node->callers; e; e = e->next_caller)
364     {
365       (*max_callers)--;
366       if (!*max_callers
367 	  || cgraph_inline_failed_type (e->inline_failed) == CIF_FINAL_ERROR)
368 	return true;
369     }
370 
371   FOR_EACH_ALIAS (node, ref)
372     if (check_callers (dyn_cast <cgraph_node *> (ref->referring), max_callers))
373       return true;
374 
375   return false;
376 }
377 
378 
379 /* Make cheap estimation if growth of NODE is likely positive knowing
380    EDGE_GROWTH of one particular edge.
381    We assume that most of other edges will have similar growth
382    and skip computation if there are too many callers.  */
383 
384 bool
385 growth_likely_positive (struct cgraph_node *node,
386 		        int edge_growth)
387 {
388   int max_callers;
389   struct cgraph_edge *e;
390   gcc_checking_assert (edge_growth > 0);
391 
392   /* First quickly check if NODE is removable at all.  */
393   if (DECL_EXTERNAL (node->decl))
394     return true;
395   if (!node->can_remove_if_no_direct_calls_and_refs_p ()
396       || node->address_taken)
397     return true;
398 
399   max_callers = ipa_fn_summaries->get (node)->size * 4 / edge_growth + 2;
400 
401   for (e = node->callers; e; e = e->next_caller)
402     {
403       max_callers--;
404       if (!max_callers
405 	  || cgraph_inline_failed_type (e->inline_failed) == CIF_FINAL_ERROR)
406 	return true;
407     }
408 
409   ipa_ref *ref;
410   FOR_EACH_ALIAS (node, ref)
411     if (check_callers (dyn_cast <cgraph_node *> (ref->referring), &max_callers))
412       return true;
413 
414   /* Unlike for functions called once, we play unsafe with
415      COMDATs.  We can allow that since we know functions
416      in consideration are small (and thus risk is small) and
417      moreover grow estimates already accounts that COMDAT
418      functions may or may not disappear when eliminated from
419      current unit. With good probability making aggressive
420      choice in all units is going to make overall program
421      smaller.  */
422   if (DECL_COMDAT (node->decl))
423     {
424       if (!node->can_remove_if_no_direct_calls_p ())
425 	return true;
426     }
427   else if (!node->will_be_removed_from_program_if_no_direct_calls_p ())
428     return true;
429 
430   return estimate_growth (node) > 0;
431 }
432