xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/ipa-inline-transform.c (revision e6c7e151de239c49d2e38720a061ed9d1fa99309)
1 /* Callgraph transformations to handle inlining
2    Copyright (C) 2003-2017 Free Software Foundation, Inc.
3    Contributed by Jan Hubicka
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 /* The inline decisions are stored in callgraph in "inline plan" and
22    applied later.
23 
24    To mark given call inline, use inline_call function.
25    The function marks the edge inlinable and, if necessary, produces
26    virtual clone in the callgraph representing the new copy of callee's
27    function body.
28 
29    The inline plan is applied on given function body by inline_transform.  */
30 
31 #include "config.h"
32 #include "system.h"
33 #include "coretypes.h"
34 #include "tm.h"
35 #include "function.h"
36 #include "tree.h"
37 #include "alloc-pool.h"
38 #include "tree-pass.h"
39 #include "cgraph.h"
40 #include "tree-cfg.h"
41 #include "symbol-summary.h"
42 #include "tree-vrp.h"
43 #include "ipa-prop.h"
44 #include "ipa-inline.h"
45 #include "tree-inline.h"
46 
47 int ncalls_inlined;
48 int nfunctions_inlined;
49 
50 /* Scale frequency of NODE edges by FREQ_SCALE.  */
51 
52 static void
53 update_noncloned_frequencies (struct cgraph_node *node,
54 			      int freq_scale)
55 {
56   struct cgraph_edge *e;
57 
58   /* We do not want to ignore high loop nest after freq drops to 0.  */
59   if (!freq_scale)
60     freq_scale = 1;
61   for (e = node->callees; e; e = e->next_callee)
62     {
63       e->frequency = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE;
64       if (e->frequency > CGRAPH_FREQ_MAX)
65         e->frequency = CGRAPH_FREQ_MAX;
66       if (!e->inline_failed)
67         update_noncloned_frequencies (e->callee, freq_scale);
68     }
69   for (e = node->indirect_calls; e; e = e->next_callee)
70     {
71       e->frequency = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE;
72       if (e->frequency > CGRAPH_FREQ_MAX)
73         e->frequency = CGRAPH_FREQ_MAX;
74     }
75 }
76 
77 /* We removed or are going to remove the last call to NODE.
78    Return true if we can and want proactively remove the NODE now.
79    This is important to do, since we want inliner to know when offline
80    copy of function was removed.  */
81 
82 static bool
83 can_remove_node_now_p_1 (struct cgraph_node *node, struct cgraph_edge *e)
84 {
85   ipa_ref *ref;
86 
87   FOR_EACH_ALIAS (node, ref)
88     {
89       cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
90       if ((alias->callers && alias->callers != e)
91           || !can_remove_node_now_p_1 (alias, e))
92 	return false;
93     }
94   /* FIXME: When address is taken of DECL_EXTERNAL function we still
95      can remove its offline copy, but we would need to keep unanalyzed node in
96      the callgraph so references can point to it.
97 
98      Also for comdat group we can ignore references inside a group as we
99      want to prove the group as a whole to be dead.  */
100   return (!node->address_taken
101 	  && node->can_remove_if_no_direct_calls_and_refs_p ()
102 	  /* Inlining might enable more devirtualizing, so we want to remove
103 	     those only after all devirtualizable virtual calls are processed.
104 	     Lacking may edges in callgraph we just preserve them post
105 	     inlining.  */
106 	  && (!DECL_VIRTUAL_P (node->decl)
107 	      || !opt_for_fn (node->decl, flag_devirtualize))
108 	  /* During early inlining some unanalyzed cgraph nodes might be in the
109 	     callgraph and they might reffer the function in question.  */
110 	  && !cgraph_new_nodes.exists ());
111 }
112 
113 /* We are going to eliminate last direct call to NODE (or alias of it) via edge E.
114    Verify that the NODE can be removed from unit and if it is contained in comdat
115    group that the whole comdat group is removable.  */
116 
117 static bool
118 can_remove_node_now_p (struct cgraph_node *node, struct cgraph_edge *e)
119 {
120   struct cgraph_node *next;
121   if (!can_remove_node_now_p_1 (node, e))
122     return false;
123 
124   /* When we see same comdat group, we need to be sure that all
125      items can be removed.  */
126   if (!node->same_comdat_group || !node->externally_visible)
127     return true;
128   for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
129        next != node; next = dyn_cast<cgraph_node *> (next->same_comdat_group))
130     {
131       if (next->alias)
132 	continue;
133       if ((next->callers && next->callers != e)
134 	  || !can_remove_node_now_p_1 (next, e))
135         return false;
136     }
137   return true;
138 }
139 
140 /* Return true if NODE is a master clone with non-inline clones.  */
141 
142 static bool
143 master_clone_with_noninline_clones_p (struct cgraph_node *node)
144 {
145   if (node->clone_of)
146     return false;
147 
148   for (struct cgraph_node *n = node->clones; n; n = n->next_sibling_clone)
149     if (n->decl != node->decl)
150       return true;
151 
152   return false;
153 }
154 
155 /* E is expected to be an edge being inlined.  Clone destination node of
156    the edge and redirect it to the new clone.
157    DUPLICATE is used for bookkeeping on whether we are actually creating new
158    clones or re-using node originally representing out-of-line function call.
159    By default the offline copy is removed, when it appears dead after inlining.
160    UPDATE_ORIGINAL prevents this transformation.
161    If OVERALL_SIZE is non-NULL, the size is updated to reflect the
162    transformation.
163    FREQ_SCALE specify the scaling of frequencies of call sites.  */
164 
165 void
166 clone_inlined_nodes (struct cgraph_edge *e, bool duplicate,
167 		     bool update_original, int *overall_size, int freq_scale)
168 {
169   struct cgraph_node *inlining_into;
170   struct cgraph_edge *next;
171 
172   if (e->caller->global.inlined_to)
173     inlining_into = e->caller->global.inlined_to;
174   else
175     inlining_into = e->caller;
176 
177   if (duplicate)
178     {
179       /* We may eliminate the need for out-of-line copy to be output.
180 	 In that case just go ahead and re-use it.  This is not just an
181 	 memory optimization.  Making offline copy of fuction disappear
182 	 from the program will improve future decisions on inlining.  */
183       if (!e->callee->callers->next_caller
184 	  /* Recursive inlining never wants the master clone to
185 	     be overwritten.  */
186 	  && update_original
187 	  && can_remove_node_now_p (e->callee, e)
188 	  /* We cannot overwrite a master clone with non-inline clones
189 	     until after these clones are materialized.  */
190 	  && !master_clone_with_noninline_clones_p (e->callee))
191 	{
192 	  /* TODO: When callee is in a comdat group, we could remove all of it,
193 	     including all inline clones inlined into it.  That would however
194 	     need small function inlining to register edge removal hook to
195 	     maintain the priority queue.
196 
197 	     For now we keep the ohter functions in the group in program until
198 	     cgraph_remove_unreachable_functions gets rid of them.  */
199 	  gcc_assert (!e->callee->global.inlined_to);
200 	  e->callee->remove_from_same_comdat_group ();
201 	  if (e->callee->definition
202 	      && inline_account_function_p (e->callee))
203 	    {
204 	      gcc_assert (!e->callee->alias);
205 	      if (overall_size)
206 	        *overall_size -= inline_summaries->get (e->callee)->size;
207 	      nfunctions_inlined++;
208 	    }
209 	  duplicate = false;
210 	  e->callee->externally_visible = false;
211           update_noncloned_frequencies (e->callee, e->frequency);
212 
213 	  dump_callgraph_transformation (e->callee, inlining_into,
214 					 "inlining to");
215 	}
216       else
217 	{
218 	  struct cgraph_node *n;
219 
220 	  if (freq_scale == -1)
221 	    freq_scale = e->frequency;
222 	  n = e->callee->create_clone (e->callee->decl,
223 				       MIN (e->count, e->callee->count),
224 				       freq_scale,
225 				       update_original, vNULL, true,
226 				       inlining_into,
227 				       NULL);
228 	  n->used_as_abstract_origin = e->callee->used_as_abstract_origin;
229 	  e->redirect_callee (n);
230 	}
231     }
232   else
233     e->callee->remove_from_same_comdat_group ();
234 
235   e->callee->global.inlined_to = inlining_into;
236 
237   /* Recursively clone all bodies.  */
238   for (e = e->callee->callees; e; e = next)
239     {
240       next = e->next_callee;
241       if (!e->inline_failed)
242         clone_inlined_nodes (e, duplicate, update_original, overall_size, freq_scale);
243     }
244 }
245 
246 /* Check all speculations in N and resolve them if they seems useless. */
247 
248 static bool
249 check_speculations (cgraph_node *n)
250 {
251   bool speculation_removed = false;
252   cgraph_edge *next;
253 
254   for (cgraph_edge *e = n->callees; e; e = next)
255     {
256       next = e->next_callee;
257       if (e->speculative && !speculation_useful_p (e, true))
258 	{
259 	  e->resolve_speculation (NULL);
260 	  speculation_removed = true;
261 	}
262       else if (!e->inline_failed)
263 	speculation_removed |= check_speculations (e->callee);
264     }
265   return speculation_removed;
266 }
267 
268 /* Mark all call graph edges coming out of NODE and all nodes that have been
269    inlined to it as in_polymorphic_cdtor.  */
270 
271 static void
272 mark_all_inlined_calls_cdtor (cgraph_node *node)
273 {
274   for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
275     {
276       cs->in_polymorphic_cdtor = true;
277       if (!cs->inline_failed)
278     mark_all_inlined_calls_cdtor (cs->callee);
279     }
280   for (cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
281     cs->in_polymorphic_cdtor = true;
282 }
283 
284 
285 /* Mark edge E as inlined and update callgraph accordingly.  UPDATE_ORIGINAL
286    specify whether profile of original function should be updated.  If any new
287    indirect edges are discovered in the process, add them to NEW_EDGES, unless
288    it is NULL. If UPDATE_OVERALL_SUMMARY is false, do not bother to recompute overall
289    size of caller after inlining. Caller is required to eventually do it via
290    inline_update_overall_summary.
291    If callee_removed is non-NULL, set it to true if we removed callee node.
292 
293    Return true iff any new callgraph edges were discovered as a
294    result of inlining.  */
295 
296 bool
297 inline_call (struct cgraph_edge *e, bool update_original,
298 	     vec<cgraph_edge *> *new_edges,
299 	     int *overall_size, bool update_overall_summary,
300 	     bool *callee_removed)
301 {
302   int old_size = 0, new_size = 0;
303   struct cgraph_node *to = NULL;
304   struct cgraph_edge *curr = e;
305   struct cgraph_node *callee = e->callee->ultimate_alias_target ();
306   bool new_edges_found = false;
307 
308   int estimated_growth = 0;
309   if (! update_overall_summary)
310     estimated_growth = estimate_edge_growth (e);
311   /* This is used only for assert bellow.  */
312 #if 0
313   bool predicated = inline_edge_summary (e)->predicate != NULL;
314 #endif
315 
316   /* Don't inline inlined edges.  */
317   gcc_assert (e->inline_failed);
318   /* Don't even think of inlining inline clone.  */
319   gcc_assert (!callee->global.inlined_to);
320 
321   to = e->caller;
322   if (to->global.inlined_to)
323     to = to->global.inlined_to;
324   if (to->thunk.thunk_p)
325     {
326       struct cgraph_node *target = to->callees->callee;
327       if (in_lto_p)
328 	to->get_untransformed_body ();
329       to->expand_thunk (false, true);
330       /* When thunk is instrumented we may have multiple callees.  */
331       for (e = to->callees; e && e->callee != target; e = e->next_callee)
332 	;
333       gcc_assert (e);
334     }
335 
336 
337   e->inline_failed = CIF_OK;
338   DECL_POSSIBLY_INLINED (callee->decl) = true;
339 
340   if (DECL_FUNCTION_PERSONALITY (callee->decl))
341     DECL_FUNCTION_PERSONALITY (to->decl)
342       = DECL_FUNCTION_PERSONALITY (callee->decl);
343 
344   bool reload_optimization_node = false;
345   if (!opt_for_fn (callee->decl, flag_strict_aliasing)
346       && opt_for_fn (to->decl, flag_strict_aliasing))
347     {
348       struct gcc_options opts = global_options;
349 
350       cl_optimization_restore (&opts, opts_for_fn (to->decl));
351       opts.x_flag_strict_aliasing = false;
352       if (dump_file)
353 	fprintf (dump_file, "Dropping flag_strict_aliasing on %s:%i\n",
354 		 to->name (), to->order);
355       DECL_FUNCTION_SPECIFIC_OPTIMIZATION (to->decl)
356 	 = build_optimization_node (&opts);
357       reload_optimization_node = true;
358     }
359 
360   inline_summary *caller_info = inline_summaries->get (to);
361   inline_summary *callee_info = inline_summaries->get (callee);
362   if (!caller_info->fp_expressions && callee_info->fp_expressions)
363     {
364       caller_info->fp_expressions = true;
365       if (opt_for_fn (callee->decl, flag_rounding_math)
366 	  != opt_for_fn (to->decl, flag_rounding_math)
367 	  || opt_for_fn (callee->decl, flag_trapping_math)
368 	     != opt_for_fn (to->decl, flag_trapping_math)
369 	  || opt_for_fn (callee->decl, flag_unsafe_math_optimizations)
370 	     != opt_for_fn (to->decl, flag_unsafe_math_optimizations)
371 	  || opt_for_fn (callee->decl, flag_finite_math_only)
372 	     != opt_for_fn (to->decl, flag_finite_math_only)
373 	  || opt_for_fn (callee->decl, flag_signaling_nans)
374 	     != opt_for_fn (to->decl, flag_signaling_nans)
375 	  || opt_for_fn (callee->decl, flag_cx_limited_range)
376 	     != opt_for_fn (to->decl, flag_cx_limited_range)
377 	  || opt_for_fn (callee->decl, flag_signed_zeros)
378 	     != opt_for_fn (to->decl, flag_signed_zeros)
379 	  || opt_for_fn (callee->decl, flag_associative_math)
380 	     != opt_for_fn (to->decl, flag_associative_math)
381 	  || opt_for_fn (callee->decl, flag_reciprocal_math)
382 	     != opt_for_fn (to->decl, flag_reciprocal_math)
383 	  || opt_for_fn (callee->decl, flag_fp_int_builtin_inexact)
384 	     != opt_for_fn (to->decl, flag_fp_int_builtin_inexact)
385 	  || opt_for_fn (callee->decl, flag_errno_math)
386 	     != opt_for_fn (to->decl, flag_errno_math))
387 	{
388 	  struct gcc_options opts = global_options;
389 
390 	  cl_optimization_restore (&opts, opts_for_fn (to->decl));
391 	  opts.x_flag_rounding_math
392 	    = opt_for_fn (callee->decl, flag_rounding_math);
393 	  opts.x_flag_trapping_math
394 	    = opt_for_fn (callee->decl, flag_trapping_math);
395 	  opts.x_flag_unsafe_math_optimizations
396 	    = opt_for_fn (callee->decl, flag_unsafe_math_optimizations);
397 	  opts.x_flag_finite_math_only
398 	    = opt_for_fn (callee->decl, flag_finite_math_only);
399 	  opts.x_flag_signaling_nans
400 	    = opt_for_fn (callee->decl, flag_signaling_nans);
401 	  opts.x_flag_cx_limited_range
402 	    = opt_for_fn (callee->decl, flag_cx_limited_range);
403 	  opts.x_flag_signed_zeros
404 	    = opt_for_fn (callee->decl, flag_signed_zeros);
405 	  opts.x_flag_associative_math
406 	    = opt_for_fn (callee->decl, flag_associative_math);
407 	  opts.x_flag_reciprocal_math
408 	    = opt_for_fn (callee->decl, flag_reciprocal_math);
409 	  opts.x_flag_fp_int_builtin_inexact
410 	    = opt_for_fn (callee->decl, flag_fp_int_builtin_inexact);
411 	  opts.x_flag_errno_math
412 	    = opt_for_fn (callee->decl, flag_errno_math);
413 	  if (dump_file)
414 	    fprintf (dump_file, "Copying FP flags from %s:%i to %s:%i\n",
415 		     callee->name (), callee->order, to->name (), to->order);
416 	  DECL_FUNCTION_SPECIFIC_OPTIMIZATION (to->decl)
417 	     = build_optimization_node (&opts);
418 	  reload_optimization_node = true;
419 	}
420     }
421 
422   /* Reload global optimization flags.  */
423   if (reload_optimization_node && DECL_STRUCT_FUNCTION (to->decl) == cfun)
424     set_cfun (cfun, true);
425 
426   /* If aliases are involved, redirect edge to the actual destination and
427      possibly remove the aliases.  */
428   if (e->callee != callee)
429     {
430       struct cgraph_node *alias = e->callee, *next_alias;
431       e->redirect_callee (callee);
432       while (alias && alias != callee)
433 	{
434 	  if (!alias->callers
435 	      && can_remove_node_now_p (alias,
436 					!e->next_caller && !e->prev_caller ? e : NULL))
437 	    {
438 	      next_alias = alias->get_alias_target ();
439 	      alias->remove ();
440 	      if (callee_removed)
441 		*callee_removed = true;
442 	      alias = next_alias;
443 	    }
444 	  else
445 	    break;
446 	}
447     }
448 
449   clone_inlined_nodes (e, true, update_original, overall_size, e->frequency);
450 
451   gcc_assert (curr->callee->global.inlined_to == to);
452 
453   old_size = inline_summaries->get (to)->size;
454   inline_merge_summary (e);
455   if (e->in_polymorphic_cdtor)
456     mark_all_inlined_calls_cdtor (e->callee);
457   if (opt_for_fn (e->caller->decl, optimize))
458     new_edges_found = ipa_propagate_indirect_call_infos (curr, new_edges);
459   check_speculations (e->callee);
460   if (update_overall_summary)
461     inline_update_overall_summary (to);
462   else
463     /* Update self size by the estimate so overall function growth limits
464        work for further inlining into this function.  Before inlining
465        the function we inlined to again we expect the caller to update
466        the overall summary.  */
467     inline_summaries->get (to)->size += estimated_growth;
468   new_size = inline_summaries->get (to)->size;
469 
470   if (callee->calls_comdat_local)
471     to->calls_comdat_local = true;
472   else if (to->calls_comdat_local && callee->comdat_local_p ())
473     {
474       struct cgraph_edge *se = to->callees;
475       for (; se; se = se->next_callee)
476 	if (se->inline_failed && se->callee->comdat_local_p ())
477 	  break;
478       if (se == NULL)
479 	to->calls_comdat_local = false;
480     }
481 
482   /* FIXME: This assert suffers from roundoff errors, disable it for GCC 5
483      and revisit it after conversion to sreals in GCC 6.
484      See PR 65654.  */
485 #if 0
486   /* Verify that estimated growth match real growth.  Allow off-by-one
487      error due to INLINE_SIZE_SCALE roudoff errors.  */
488   gcc_assert (!update_overall_summary || !overall_size || new_edges_found
489 	      || abs (estimated_growth - (new_size - old_size)) <= 1
490 	      || speculation_removed
491 	      /* FIXME: a hack.  Edges with false predicate are accounted
492 		 wrong, we should remove them from callgraph.  */
493 	      || predicated);
494 #endif
495 
496   /* Account the change of overall unit size; external functions will be
497      removed and are thus not accounted.  */
498   if (overall_size && inline_account_function_p (to))
499     *overall_size += new_size - old_size;
500   ncalls_inlined++;
501 
502   /* This must happen after inline_merge_summary that rely on jump
503      functions of callee to not be updated.  */
504   return new_edges_found;
505 }
506 
507 
508 /* Copy function body of NODE and redirect all inline clones to it.
509    This is done before inline plan is applied to NODE when there are
510    still some inline clones if it.
511 
512    This is necessary because inline decisions are not really transitive
513    and the other inline clones may have different bodies.  */
514 
515 static struct cgraph_node *
516 save_inline_function_body (struct cgraph_node *node)
517 {
518   struct cgraph_node *first_clone, *n;
519 
520   if (dump_file)
521     fprintf (dump_file, "\nSaving body of %s for later reuse\n",
522 	     node->name ());
523 
524   gcc_assert (node == cgraph_node::get (node->decl));
525 
526   /* first_clone will be turned into real function.  */
527   first_clone = node->clones;
528 
529   /* Arrange first clone to not be thunk as those do not have bodies.  */
530   if (first_clone->thunk.thunk_p)
531     {
532       while (first_clone->thunk.thunk_p)
533         first_clone = first_clone->next_sibling_clone;
534       first_clone->prev_sibling_clone->next_sibling_clone
535 	= first_clone->next_sibling_clone;
536       if (first_clone->next_sibling_clone)
537 	first_clone->next_sibling_clone->prev_sibling_clone
538 	   = first_clone->prev_sibling_clone;
539       first_clone->next_sibling_clone = node->clones;
540       first_clone->prev_sibling_clone = NULL;
541       node->clones->prev_sibling_clone = first_clone;
542       node->clones = first_clone;
543     }
544   first_clone->decl = copy_node (node->decl);
545   first_clone->decl->decl_with_vis.symtab_node = first_clone;
546   gcc_assert (first_clone == cgraph_node::get (first_clone->decl));
547 
548   /* Now reshape the clone tree, so all other clones descends from
549      first_clone.  */
550   if (first_clone->next_sibling_clone)
551     {
552       for (n = first_clone->next_sibling_clone; n->next_sibling_clone;
553 	   n = n->next_sibling_clone)
554         n->clone_of = first_clone;
555       n->clone_of = first_clone;
556       n->next_sibling_clone = first_clone->clones;
557       if (first_clone->clones)
558         first_clone->clones->prev_sibling_clone = n;
559       first_clone->clones = first_clone->next_sibling_clone;
560       first_clone->next_sibling_clone->prev_sibling_clone = NULL;
561       first_clone->next_sibling_clone = NULL;
562       gcc_assert (!first_clone->prev_sibling_clone);
563     }
564   first_clone->clone_of = NULL;
565 
566   /* Now node in question has no clones.  */
567   node->clones = NULL;
568 
569   /* Inline clones share decl with the function they are cloned
570      from.  Walk the whole clone tree and redirect them all to the
571      new decl.  */
572   if (first_clone->clones)
573     for (n = first_clone->clones; n != first_clone;)
574       {
575         gcc_assert (n->decl == node->decl);
576 	n->decl = first_clone->decl;
577 	if (n->clones)
578 	  n = n->clones;
579 	else if (n->next_sibling_clone)
580 	  n = n->next_sibling_clone;
581 	else
582 	  {
583 	    while (n != first_clone && !n->next_sibling_clone)
584 	      n = n->clone_of;
585 	    if (n != first_clone)
586 	      n = n->next_sibling_clone;
587 	  }
588       }
589 
590   /* Copy the OLD_VERSION_NODE function tree to the new version.  */
591   tree_function_versioning (node->decl, first_clone->decl,
592 			    NULL, true, NULL, false,
593 			    NULL, NULL);
594 
595   /* The function will be short lived and removed after we inline all the clones,
596      but make it internal so we won't confuse ourself.  */
597   DECL_EXTERNAL (first_clone->decl) = 0;
598   TREE_PUBLIC (first_clone->decl) = 0;
599   DECL_COMDAT (first_clone->decl) = 0;
600   first_clone->ipa_transforms_to_apply.release ();
601 
602   /* When doing recursive inlining, the clone may become unnecessary.
603      This is possible i.e. in the case when the recursive function is proved to be
604      non-throwing and the recursion happens only in the EH landing pad.
605      We can not remove the clone until we are done with saving the body.
606      Remove it now.  */
607   if (!first_clone->callers)
608     {
609       first_clone->remove_symbol_and_inline_clones ();
610       first_clone = NULL;
611     }
612   else if (flag_checking)
613     first_clone->verify ();
614 
615   return first_clone;
616 }
617 
618 /* Return true when function body of DECL still needs to be kept around
619    for later re-use.  */
620 static bool
621 preserve_function_body_p (struct cgraph_node *node)
622 {
623   gcc_assert (symtab->global_info_ready);
624   gcc_assert (!node->alias && !node->thunk.thunk_p);
625 
626   /* Look if there is any non-thunk clone around.  */
627   for (node = node->clones; node; node = node->next_sibling_clone)
628     if (!node->thunk.thunk_p)
629       return true;
630   return false;
631 }
632 
633 /* Apply inline plan to function.  */
634 
635 unsigned int
636 inline_transform (struct cgraph_node *node)
637 {
638   unsigned int todo = 0;
639   struct cgraph_edge *e, *next;
640   bool has_inline = false;
641 
642   /* FIXME: Currently the pass manager is adding inline transform more than
643      once to some clones.  This needs revisiting after WPA cleanups.  */
644   if (cfun->after_inlining)
645     return 0;
646 
647   /* We might need the body of this function so that we can expand
648      it inline somewhere else.  */
649   if (preserve_function_body_p (node))
650     save_inline_function_body (node);
651 
652   for (e = node->callees; e; e = next)
653     {
654       if (!e->inline_failed)
655 	has_inline = true;
656       next = e->next_callee;
657       e->redirect_call_stmt_to_callee ();
658     }
659   node->remove_all_references ();
660 
661   timevar_push (TV_INTEGRATION);
662   if (node->callees && (opt_for_fn (node->decl, optimize) || has_inline))
663     todo = optimize_inline_calls (current_function_decl);
664   timevar_pop (TV_INTEGRATION);
665 
666   cfun->always_inline_functions_inlined = true;
667   cfun->after_inlining = true;
668   todo |= execute_fixup_cfg ();
669 
670   if (!(todo & TODO_update_ssa_any))
671     /* Redirecting edges might lead to a need for vops to be recomputed.  */
672     todo |= TODO_update_ssa_only_virtuals;
673 
674   return todo;
675 }
676