xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/cgraphunit.c (revision b7b7574d3bf8eeb51a1fa3977b59142ec6434a55)
1 /* Callgraph based interprocedural optimizations.
2    Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3    Free Software Foundation, Inc.
4    Contributed by Jan Hubicka
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 /* This module implements main driver of compilation process as well as
23    few basic interprocedural optimizers.
24 
25    The main scope of this file is to act as an interface in between
26    tree based frontends and the backend (and middle end)
27 
28    The front-end is supposed to use following functionality:
29 
30     - cgraph_finalize_function
31 
32       This function is called once front-end has parsed whole body of function
33       and it is certain that the function body nor the declaration will change.
34 
35       (There is one exception needed for implementing GCC extern inline
36 	function.)
37 
38     - varpool_finalize_variable
39 
40       This function has same behavior as the above but is used for static
41       variables.
42 
43     - cgraph_finalize_compilation_unit
44 
45       This function is called once (source level) compilation unit is finalized
46       and it will no longer change.
47 
48       In the the call-graph construction and local function
49       analysis takes place here.  Bodies of unreachable functions are released
50       to conserve memory usage.
51 
52       The function can be called multiple times when multiple source level
53       compilation units are combined (such as in C frontend)
54 
55     - cgraph_optimize
56 
57       In this unit-at-a-time compilation the intra procedural analysis takes
58       place here.  In particular the static functions whose address is never
59       taken are marked as local.  Backend can then use this information to
60       modify calling conventions, do better inlining or similar optimizations.
61 
62     - cgraph_mark_needed_node
63     - varpool_mark_needed_node
64 
65       When function or variable is referenced by some hidden way the call-graph
66       data structure must be updated accordingly by this function.
67       There should be little need to call this function and all the references
68       should be made explicit to cgraph code.  At present these functions are
69       used by C++ frontend to explicitly mark the keyed methods.
70 
71     - analyze_expr callback
72 
73       This function is responsible for lowering tree nodes not understood by
74       generic code into understandable ones or alternatively marking
75       callgraph and varpool nodes referenced by the as needed.
76 
77       ??? On the tree-ssa genericizing should take place here and we will avoid
78       need for these hooks (replacing them by genericizing hook)
79 
80         Analyzing of all functions is deferred
81 	to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
82 
83 	In cgraph_finalize_compilation_unit the reachable functions are
84 	analyzed.  During analysis the call-graph edges from reachable
85 	functions are constructed and their destinations are marked as
86 	reachable.  References to functions and variables are discovered too
87 	and variables found to be needed output to the assembly file.  Via
88 	mark_referenced call in assemble_variable functions referenced by
89 	static variables are noticed too.
90 
91 	The intra-procedural information is produced and its existence
92 	indicated by global_info_ready.  Once this flag is set it is impossible
93 	to change function from !reachable to reachable and thus
94 	assemble_variable no longer call mark_referenced.
95 
96 	Finally the call-graph is topologically sorted and all reachable functions
97 	that has not been completely inlined or are not external are output.
98 
99 	??? It is possible that reference to function or variable is optimized
100 	out.  We can not deal with this nicely because topological order is not
101 	suitable for it.  For tree-ssa we may consider another pass doing
102 	optimization and re-discovering reachable functions.
103 
104 	??? Reorganize code so variables are output very last and only if they
105 	really has been referenced by produced code, so we catch more cases
106 	where reference has been optimized out.  */
107 
108 
109 #include "config.h"
110 #include "system.h"
111 #include "coretypes.h"
112 #include "tm.h"
113 #include "tree.h"
114 #include "rtl.h"
115 #include "tree-flow.h"
116 #include "tree-inline.h"
117 #include "langhooks.h"
118 #include "pointer-set.h"
119 #include "toplev.h"
120 #include "flags.h"
121 #include "ggc.h"
122 #include "debug.h"
123 #include "target.h"
124 #include "cgraph.h"
125 #include "diagnostic.h"
126 #include "timevar.h"
127 #include "params.h"
128 #include "fibheap.h"
129 #include "intl.h"
130 #include "function.h"
131 #include "ipa-prop.h"
132 #include "gimple.h"
133 #include "tree-iterator.h"
134 #include "tree-pass.h"
135 #include "tree-dump.h"
136 #include "output.h"
137 #include "coverage.h"
138 #include "plugin.h"
139 
140 static void cgraph_expand_all_functions (void);
141 static void cgraph_mark_functions_to_output (void);
142 static void cgraph_expand_function (struct cgraph_node *);
143 static void cgraph_output_pending_asms (void);
144 static void cgraph_analyze_function (struct cgraph_node *);
145 
146 static FILE *cgraph_dump_file;
147 
148 /* A vector of FUNCTION_DECLs declared as static constructors.  */
149 static GTY (()) VEC(tree, gc) *static_ctors;
150 /* A vector of FUNCTION_DECLs declared as static destructors.  */
151 static GTY (()) VEC(tree, gc) *static_dtors;
152 
153 /* Used for vtable lookup in thunk adjusting.  */
154 static GTY (()) tree vtable_entry_type;
155 
156 /* When target does not have ctors and dtors, we call all constructor
157    and destructor by special initialization/destruction function
158    recognized by collect2.
159 
160    When we are going to build this function, collect all constructors and
161    destructors and turn them into normal functions.  */
162 
163 static void
164 record_cdtor_fn (tree fndecl)
165 {
166   struct cgraph_node *node;
167   if (targetm.have_ctors_dtors
168       || (!DECL_STATIC_CONSTRUCTOR (fndecl)
169 	  && !DECL_STATIC_DESTRUCTOR (fndecl)))
170     return;
171 
172   if (DECL_STATIC_CONSTRUCTOR (fndecl))
173     {
174       VEC_safe_push (tree, gc, static_ctors, fndecl);
175       DECL_STATIC_CONSTRUCTOR (fndecl) = 0;
176     }
177   if (DECL_STATIC_DESTRUCTOR (fndecl))
178     {
179       VEC_safe_push (tree, gc, static_dtors, fndecl);
180       DECL_STATIC_DESTRUCTOR (fndecl) = 0;
181     }
182   node = cgraph_node (fndecl);
183   node->local.disregard_inline_limits = 1;
184   cgraph_mark_reachable_node (node);
185 }
186 
187 /* Define global constructors/destructor functions for the CDTORS, of
188    which they are LEN.  The CDTORS are sorted by initialization
189    priority.  If CTOR_P is true, these are constructors; otherwise,
190    they are destructors.  */
191 
192 static void
193 build_cdtor (bool ctor_p, tree *cdtors, size_t len)
194 {
195   size_t i;
196 
197   i = 0;
198   while (i < len)
199     {
200       tree body;
201       tree fn;
202       priority_type priority;
203 
204       priority = 0;
205       body = NULL_TREE;
206       /* Find the next batch of constructors/destructors with the same
207 	 initialization priority.  */
208       do
209 	{
210 	  priority_type p;
211 	  fn = cdtors[i];
212 	  p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
213 	  if (!body)
214 	    priority = p;
215 	  else if (p != priority)
216 	    break;
217 	  append_to_statement_list (build_function_call_expr (UNKNOWN_LOCATION,
218 							      fn, 0),
219 				    &body);
220 	  ++i;
221 	}
222       while (i < len);
223       gcc_assert (body != NULL_TREE);
224       /* Generate a function to call all the function of like
225 	 priority.  */
226       cgraph_build_static_cdtor (ctor_p ? 'I' : 'D', body, priority);
227     }
228 }
229 
230 /* Comparison function for qsort.  P1 and P2 are actually of type
231    "tree *" and point to static constructors.  DECL_INIT_PRIORITY is
232    used to determine the sort order.  */
233 
234 static int
235 compare_ctor (const void *p1, const void *p2)
236 {
237   tree f1;
238   tree f2;
239   int priority1;
240   int priority2;
241 
242   f1 = *(const tree *)p1;
243   f2 = *(const tree *)p2;
244   priority1 = DECL_INIT_PRIORITY (f1);
245   priority2 = DECL_INIT_PRIORITY (f2);
246 
247   if (priority1 < priority2)
248     return -1;
249   else if (priority1 > priority2)
250     return 1;
251   else
252     /* Ensure a stable sort.  */
253     return (const tree *)p1 - (const tree *)p2;
254 }
255 
256 /* Comparison function for qsort.  P1 and P2 are actually of type
257    "tree *" and point to static destructors.  DECL_FINI_PRIORITY is
258    used to determine the sort order.  */
259 
260 static int
261 compare_dtor (const void *p1, const void *p2)
262 {
263   tree f1;
264   tree f2;
265   int priority1;
266   int priority2;
267 
268   f1 = *(const tree *)p1;
269   f2 = *(const tree *)p2;
270   priority1 = DECL_FINI_PRIORITY (f1);
271   priority2 = DECL_FINI_PRIORITY (f2);
272 
273   if (priority1 < priority2)
274     return -1;
275   else if (priority1 > priority2)
276     return 1;
277   else
278     /* Ensure a stable sort.  */
279     return (const tree *)p1 - (const tree *)p2;
280 }
281 
282 /* Generate functions to call static constructors and destructors
283    for targets that do not support .ctors/.dtors sections.  These
284    functions have magic names which are detected by collect2.  */
285 
286 static void
287 cgraph_build_cdtor_fns (void)
288 {
289   if (!VEC_empty (tree, static_ctors))
290     {
291       gcc_assert (!targetm.have_ctors_dtors);
292       qsort (VEC_address (tree, static_ctors),
293 	     VEC_length (tree, static_ctors),
294 	     sizeof (tree),
295 	     compare_ctor);
296       build_cdtor (/*ctor_p=*/true,
297 		   VEC_address (tree, static_ctors),
298 		   VEC_length (tree, static_ctors));
299       VEC_truncate (tree, static_ctors, 0);
300     }
301 
302   if (!VEC_empty (tree, static_dtors))
303     {
304       gcc_assert (!targetm.have_ctors_dtors);
305       qsort (VEC_address (tree, static_dtors),
306 	     VEC_length (tree, static_dtors),
307 	     sizeof (tree),
308 	     compare_dtor);
309       build_cdtor (/*ctor_p=*/false,
310 		   VEC_address (tree, static_dtors),
311 		   VEC_length (tree, static_dtors));
312       VEC_truncate (tree, static_dtors, 0);
313     }
314 }
315 
316 /* Determine if function DECL is needed.  That is, visible to something
317    either outside this translation unit, something magic in the system
318    configury.  */
319 
320 bool
321 cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
322 {
323   /* If the user told us it is used, then it must be so.  */
324   if (node->local.externally_visible)
325     return true;
326 
327   /* ??? If the assembler name is set by hand, it is possible to assemble
328      the name later after finalizing the function and the fact is noticed
329      in assemble_name then.  This is arguably a bug.  */
330   if (DECL_ASSEMBLER_NAME_SET_P (decl)
331       && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
332     return true;
333 
334   /* With -fkeep-inline-functions we are keeping all inline functions except
335      for extern inline ones.  */
336   if (flag_keep_inline_functions
337       && DECL_DECLARED_INLINE_P (decl)
338       && !DECL_EXTERNAL (decl)
339       && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl)))
340      return true;
341 
342   /* If we decided it was needed before, but at the time we didn't have
343      the body of the function available, then it's still needed.  We have
344      to go back and re-check its dependencies now.  */
345   if (node->needed)
346     return true;
347 
348   /* Externally visible functions must be output.  The exception is
349      COMDAT functions that must be output only when they are needed.
350 
351      When not optimizing, also output the static functions. (see
352      PR24561), but don't do so for always_inline functions, functions
353      declared inline and nested functions.  These was optimized out
354      in the original implementation and it is unclear whether we want
355      to change the behavior here.  */
356   if (((TREE_PUBLIC (decl)
357 	|| (!optimize && !node->local.disregard_inline_limits
358 	    && !DECL_DECLARED_INLINE_P (decl)
359 	    && !node->origin))
360        && !flag_whole_program
361        && !flag_lto
362        && !flag_whopr)
363       && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
364     return true;
365 
366   /* Constructors and destructors are reachable from the runtime by
367      some mechanism.  */
368   if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
369     return true;
370 
371   return false;
372 }
373 
374 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
375    functions into callgraph in a way so they look like ordinary reachable
376    functions inserted into callgraph already at construction time.  */
377 
378 bool
379 cgraph_process_new_functions (void)
380 {
381   bool output = false;
382   tree fndecl;
383   struct cgraph_node *node;
384 
385   /*  Note that this queue may grow as its being processed, as the new
386       functions may generate new ones.  */
387   while (cgraph_new_nodes)
388     {
389       node = cgraph_new_nodes;
390       fndecl = node->decl;
391       cgraph_new_nodes = cgraph_new_nodes->next_needed;
392       switch (cgraph_state)
393 	{
394 	case CGRAPH_STATE_CONSTRUCTION:
395 	  /* At construction time we just need to finalize function and move
396 	     it into reachable functions list.  */
397 
398 	  node->next_needed = NULL;
399 	  cgraph_finalize_function (fndecl, false);
400 	  cgraph_mark_reachable_node (node);
401 	  output = true;
402 	  break;
403 
404 	case CGRAPH_STATE_IPA:
405 	case CGRAPH_STATE_IPA_SSA:
406 	  /* When IPA optimization already started, do all essential
407 	     transformations that has been already performed on the whole
408 	     cgraph but not on this function.  */
409 
410 	  gimple_register_cfg_hooks ();
411 	  if (!node->analyzed)
412 	    cgraph_analyze_function (node);
413 	  push_cfun (DECL_STRUCT_FUNCTION (fndecl));
414 	  current_function_decl = fndecl;
415 	  compute_inline_parameters (node);
416 	  if ((cgraph_state == CGRAPH_STATE_IPA_SSA
417 	      && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
418 	      /* When not optimizing, be sure we run early local passes anyway
419 		 to expand OMP.  */
420 	      || !optimize)
421 	    execute_pass_list (pass_early_local_passes.pass.sub);
422 	  free_dominance_info (CDI_POST_DOMINATORS);
423 	  free_dominance_info (CDI_DOMINATORS);
424 	  pop_cfun ();
425 	  current_function_decl = NULL;
426 	  break;
427 
428 	case CGRAPH_STATE_EXPANSION:
429 	  /* Functions created during expansion shall be compiled
430 	     directly.  */
431 	  node->process = 0;
432 	  cgraph_expand_function (node);
433 	  break;
434 
435 	default:
436 	  gcc_unreachable ();
437 	  break;
438 	}
439       cgraph_call_function_insertion_hooks (node);
440     }
441   return output;
442 }
443 
444 /* As an GCC extension we allow redefinition of the function.  The
445    semantics when both copies of bodies differ is not well defined.
446    We replace the old body with new body so in unit at a time mode
447    we always use new body, while in normal mode we may end up with
448    old body inlined into some functions and new body expanded and
449    inlined in others.
450 
451    ??? It may make more sense to use one body for inlining and other
452    body for expanding the function but this is difficult to do.  */
453 
454 static void
455 cgraph_reset_node (struct cgraph_node *node)
456 {
457   /* If node->process is set, then we have already begun whole-unit analysis.
458      This is *not* testing for whether we've already emitted the function.
459      That case can be sort-of legitimately seen with real function redefinition
460      errors.  I would argue that the front end should never present us with
461      such a case, but don't enforce that for now.  */
462   gcc_assert (!node->process);
463 
464   /* Reset our data structures so we can analyze the function again.  */
465   memset (&node->local, 0, sizeof (node->local));
466   memset (&node->global, 0, sizeof (node->global));
467   memset (&node->rtl, 0, sizeof (node->rtl));
468   node->analyzed = false;
469   node->local.redefined_extern_inline = true;
470   node->local.finalized = false;
471 
472   cgraph_node_remove_callees (node);
473 
474   /* We may need to re-queue the node for assembling in case
475      we already proceeded it and ignored as not needed or got
476      a re-declaration in IMA mode.  */
477   if (node->reachable)
478     {
479       struct cgraph_node *n;
480 
481       for (n = cgraph_nodes_queue; n; n = n->next_needed)
482 	if (n == node)
483 	  break;
484       if (!n)
485 	node->reachable = 0;
486     }
487 }
488 
489 static void
490 cgraph_lower_function (struct cgraph_node *node)
491 {
492   if (node->lowered)
493     return;
494 
495   if (node->nested)
496     lower_nested_functions (node->decl);
497   gcc_assert (!node->nested);
498 
499   tree_lowering_passes (node->decl);
500   node->lowered = true;
501 }
502 
503 /* DECL has been parsed.  Take it, queue it, compile it at the whim of the
504    logic in effect.  If NESTED is true, then our caller cannot stand to have
505    the garbage collector run at the moment.  We would need to either create
506    a new GC context, or just not compile right now.  */
507 
508 void
509 cgraph_finalize_function (tree decl, bool nested)
510 {
511   struct cgraph_node *node = cgraph_node (decl);
512 
513   if (node->local.finalized)
514     cgraph_reset_node (node);
515 
516   node->pid = cgraph_max_pid ++;
517   notice_global_symbol (decl);
518   node->local.finalized = true;
519   node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
520   node->finalized_by_frontend = true;
521   record_cdtor_fn (node->decl);
522 
523   if (cgraph_decide_is_function_needed (node, decl))
524     cgraph_mark_needed_node (node);
525 
526   /* Since we reclaim unreachable nodes at the end of every language
527      level unit, we need to be conservative about possible entry points
528      there.  */
529   if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
530     cgraph_mark_reachable_node (node);
531 
532   /* If we've not yet emitted decl, tell the debug info about it.  */
533   if (!TREE_ASM_WRITTEN (decl))
534     (*debug_hooks->deferred_inline_function) (decl);
535 
536   /* Possibly warn about unused parameters.  */
537   if (warn_unused_parameter)
538     do_warn_unused_parameter (decl);
539 
540   if (!nested)
541     ggc_collect ();
542 }
543 
544 /* C99 extern inline keywords allow changing of declaration after function
545    has been finalized.  We need to re-decide if we want to mark the function as
546    needed then.   */
547 
548 void
549 cgraph_mark_if_needed (tree decl)
550 {
551   struct cgraph_node *node = cgraph_node (decl);
552   if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
553     cgraph_mark_needed_node (node);
554 }
555 
556 /* Verify cgraph nodes of given cgraph node.  */
557 void
558 verify_cgraph_node (struct cgraph_node *node)
559 {
560   struct cgraph_edge *e;
561   struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
562   struct function *saved_cfun = cfun;
563   basic_block this_block;
564   gimple_stmt_iterator gsi;
565   bool error_found = false;
566 
567   if (errorcount || sorrycount)
568     return;
569 
570   timevar_push (TV_CGRAPH_VERIFY);
571   /* debug_generic_stmt needs correct cfun */
572   set_cfun (this_cfun);
573   for (e = node->callees; e; e = e->next_callee)
574     if (e->aux)
575       {
576 	error ("aux field set for edge %s->%s",
577 	       identifier_to_locale (cgraph_node_name (e->caller)),
578 	       identifier_to_locale (cgraph_node_name (e->callee)));
579 	error_found = true;
580       }
581   if (node->count < 0)
582     {
583       error ("Execution count is negative");
584       error_found = true;
585     }
586   if (node->global.inlined_to && node->local.externally_visible)
587     {
588       error ("Externally visible inline clone");
589       error_found = true;
590     }
591   if (node->global.inlined_to && node->address_taken)
592     {
593       error ("Inline clone with address taken");
594       error_found = true;
595     }
596   if (node->global.inlined_to && node->needed)
597     {
598       error ("Inline clone is needed");
599       error_found = true;
600     }
601   for (e = node->callers; e; e = e->next_caller)
602     {
603       if (e->count < 0)
604 	{
605 	  error ("caller edge count is negative");
606 	  error_found = true;
607 	}
608       if (e->frequency < 0)
609 	{
610 	  error ("caller edge frequency is negative");
611 	  error_found = true;
612 	}
613       if (e->frequency > CGRAPH_FREQ_MAX)
614 	{
615 	  error ("caller edge frequency is too large");
616 	  error_found = true;
617 	}
618       if (gimple_has_body_p (e->caller->decl)
619           && !e->caller->global.inlined_to
620           && (e->frequency
621 	      != compute_call_stmt_bb_frequency (e->caller->decl,
622 						 gimple_bb (e->call_stmt))))
623 	{
624 	  error ("caller edge frequency %i does not match BB freqency %i",
625 	  	 e->frequency,
626 		 compute_call_stmt_bb_frequency (e->caller->decl,
627 						 gimple_bb (e->call_stmt)));
628 	  error_found = true;
629 	}
630       if (!e->inline_failed)
631 	{
632 	  if (node->global.inlined_to
633 	      != (e->caller->global.inlined_to
634 		  ? e->caller->global.inlined_to : e->caller))
635 	    {
636 	      error ("inlined_to pointer is wrong");
637 	      error_found = true;
638 	    }
639 	  if (node->callers->next_caller)
640 	    {
641 	      error ("multiple inline callers");
642 	      error_found = true;
643 	    }
644 	}
645       else
646 	if (node->global.inlined_to)
647 	  {
648 	    error ("inlined_to pointer set for noninline callers");
649 	    error_found = true;
650 	  }
651     }
652   if (!node->callers && node->global.inlined_to)
653     {
654       error ("inlined_to pointer is set but no predecessors found");
655       error_found = true;
656     }
657   if (node->global.inlined_to == node)
658     {
659       error ("inlined_to pointer refers to itself");
660       error_found = true;
661     }
662 
663   if (!cgraph_node (node->decl))
664     {
665       error ("node not found in cgraph_hash");
666       error_found = true;
667     }
668 
669   if (node->clone_of)
670     {
671       struct cgraph_node *n;
672       for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
673         if (n == node)
674 	  break;
675       if (!n)
676 	{
677 	  error ("node has wrong clone_of");
678 	  error_found = true;
679 	}
680     }
681   if (node->clones)
682     {
683       struct cgraph_node *n;
684       for (n = node->clones; n; n = n->next_sibling_clone)
685         if (n->clone_of != node)
686 	  break;
687       if (n)
688 	{
689 	  error ("node has wrong clone list");
690 	  error_found = true;
691 	}
692     }
693   if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
694     {
695        error ("node is in clone list but it is not clone");
696        error_found = true;
697     }
698   if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
699     {
700       error ("node has wrong prev_clone pointer");
701       error_found = true;
702     }
703   if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
704     {
705       error ("double linked list of clones corrupted");
706       error_found = true;
707     }
708   if (node->same_comdat_group)
709     {
710       struct cgraph_node *n = node->same_comdat_group;
711 
712       if (!DECL_ONE_ONLY (node->decl))
713 	{
714 	  error ("non-DECL_ONE_ONLY node in a same_comdat_group list");
715 	  error_found = true;
716 	}
717       if (n == node)
718 	{
719 	  error ("node is alone in a comdat group");
720 	  error_found = true;
721 	}
722       do
723 	{
724 	  if (!n->same_comdat_group)
725 	    {
726 	      error ("same_comdat_group is not a circular list");
727 	      error_found = true;
728 	      break;
729 	    }
730 	  n = n->same_comdat_group;
731 	}
732       while (n != node);
733     }
734 
735   if (node->analyzed && gimple_has_body_p (node->decl)
736       && !TREE_ASM_WRITTEN (node->decl)
737       && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
738       && !flag_wpa)
739     {
740       if (this_cfun->cfg)
741 	{
742 	  /* The nodes we're interested in are never shared, so walk
743 	     the tree ignoring duplicates.  */
744 	  struct pointer_set_t *visited_nodes = pointer_set_create ();
745 	  /* Reach the trees by walking over the CFG, and note the
746 	     enclosing basic-blocks in the call edges.  */
747 	  FOR_EACH_BB_FN (this_block, this_cfun)
748 	    for (gsi = gsi_start_bb (this_block);
749                  !gsi_end_p (gsi);
750                  gsi_next (&gsi))
751 	      {
752 		gimple stmt = gsi_stmt (gsi);
753 		tree decl;
754 		if (is_gimple_call (stmt) && (decl = gimple_call_fndecl (stmt)))
755 		  {
756 		    struct cgraph_edge *e = cgraph_edge (node, stmt);
757 		    if (e)
758 		      {
759 			if (e->aux)
760 			  {
761 			    error ("shared call_stmt:");
762 			    debug_gimple_stmt (stmt);
763 			    error_found = true;
764 			  }
765 			if (e->callee->same_body_alias)
766 			  {
767 			    error ("edge points to same body alias:");
768 			    debug_tree (e->callee->decl);
769 			    error_found = true;
770 			  }
771 			e->aux = (void *)1;
772 		      }
773 		    else
774 		      {
775 			error ("missing callgraph edge for call stmt:");
776 			debug_gimple_stmt (stmt);
777 			error_found = true;
778 		      }
779 		  }
780 	      }
781 	  pointer_set_destroy (visited_nodes);
782 	}
783       else
784 	/* No CFG available?!  */
785 	gcc_unreachable ();
786 
787       for (e = node->callees; e; e = e->next_callee)
788 	{
789 	  if (!e->aux && !e->indirect_call)
790 	    {
791 	      error ("edge %s->%s has no corresponding call_stmt",
792 		     identifier_to_locale (cgraph_node_name (e->caller)),
793 		     identifier_to_locale (cgraph_node_name (e->callee)));
794 	      debug_gimple_stmt (e->call_stmt);
795 	      error_found = true;
796 	    }
797 	  e->aux = 0;
798 	}
799     }
800   if (error_found)
801     {
802       dump_cgraph_node (stderr, node);
803       internal_error ("verify_cgraph_node failed");
804     }
805   set_cfun (saved_cfun);
806   timevar_pop (TV_CGRAPH_VERIFY);
807 }
808 
809 /* Verify whole cgraph structure.  */
810 void
811 verify_cgraph (void)
812 {
813   struct cgraph_node *node;
814 
815   if (sorrycount || errorcount)
816     return;
817 
818   for (node = cgraph_nodes; node; node = node->next)
819     verify_cgraph_node (node);
820 }
821 
822 /* Output all asm statements we have stored up to be output.  */
823 
824 static void
825 cgraph_output_pending_asms (void)
826 {
827   struct cgraph_asm_node *can;
828 
829   if (errorcount || sorrycount)
830     return;
831 
832   for (can = cgraph_asm_nodes; can; can = can->next)
833     assemble_asm (can->asm_str);
834   cgraph_asm_nodes = NULL;
835 }
836 
837 /* Analyze the function scheduled to be output.  */
838 static void
839 cgraph_analyze_function (struct cgraph_node *node)
840 {
841   tree save = current_function_decl;
842   tree decl = node->decl;
843 
844   current_function_decl = decl;
845   push_cfun (DECL_STRUCT_FUNCTION (decl));
846 
847   assign_assembler_name_if_neeeded (node->decl);
848 
849   /* Make sure to gimplify bodies only once.  During analyzing a
850      function we lower it, which will require gimplified nested
851      functions, so we can end up here with an already gimplified
852      body.  */
853   if (!gimple_body (decl))
854     gimplify_function_tree (decl);
855   dump_function (TDI_generic, decl);
856 
857   cgraph_lower_function (node);
858   node->analyzed = true;
859 
860   pop_cfun ();
861   current_function_decl = save;
862 }
863 
864 /* Look for externally_visible and used attributes and mark cgraph nodes
865    accordingly.
866 
867    We cannot mark the nodes at the point the attributes are processed (in
868    handle_*_attribute) because the copy of the declarations available at that
869    point may not be canonical.  For example, in:
870 
871     void f();
872     void f() __attribute__((used));
873 
874    the declaration we see in handle_used_attribute will be the second
875    declaration -- but the front end will subsequently merge that declaration
876    with the original declaration and discard the second declaration.
877 
878    Furthermore, we can't mark these nodes in cgraph_finalize_function because:
879 
880     void f() {}
881     void f() __attribute__((externally_visible));
882 
883    is valid.
884 
885    So, we walk the nodes at the end of the translation unit, applying the
886    attributes at that point.  */
887 
888 static void
889 process_function_and_variable_attributes (struct cgraph_node *first,
890                                           struct varpool_node *first_var)
891 {
892   struct cgraph_node *node;
893   struct varpool_node *vnode;
894 
895   for (node = cgraph_nodes; node != first; node = node->next)
896     {
897       tree decl = node->decl;
898       if (DECL_PRESERVE_P (decl))
899 	{
900 	  mark_decl_referenced (decl);
901 	  if (node->local.finalized)
902 	     cgraph_mark_needed_node (node);
903 	}
904       if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
905 	{
906 	  if (! TREE_PUBLIC (node->decl))
907 	    warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
908 			"%<externally_visible%>"
909 			" attribute have effect only on public objects");
910 	  else if (node->local.finalized)
911 	     cgraph_mark_needed_node (node);
912 	}
913     }
914   for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
915     {
916       tree decl = vnode->decl;
917       if (DECL_PRESERVE_P (decl))
918 	{
919 	  mark_decl_referenced (decl);
920 	  vnode->force_output = true;
921 	  if (vnode->finalized)
922 	    varpool_mark_needed_node (vnode);
923 	}
924       if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
925 	{
926 	  if (! TREE_PUBLIC (vnode->decl))
927 	    warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
928 			"%<externally_visible%>"
929 			" attribute have effect only on public objects");
930 	  else if (vnode->finalized)
931 	    varpool_mark_needed_node (vnode);
932 	}
933     }
934 }
935 
936 /* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
937    each reachable functions) and build cgraph.
938    The function can be called multiple times after inserting new nodes
939    into beginning of queue.  Just the new part of queue is re-scanned then.  */
940 
941 static void
942 cgraph_analyze_functions (void)
943 {
944   /* Keep track of already processed nodes when called multiple times for
945      intermodule optimization.  */
946   static struct cgraph_node *first_analyzed;
947   struct cgraph_node *first_processed = first_analyzed;
948   static struct varpool_node *first_analyzed_var;
949   struct cgraph_node *node, *next;
950 
951   process_function_and_variable_attributes (first_processed,
952 					    first_analyzed_var);
953   first_processed = cgraph_nodes;
954   first_analyzed_var = varpool_nodes;
955   varpool_analyze_pending_decls ();
956   if (cgraph_dump_file)
957     {
958       fprintf (cgraph_dump_file, "Initial entry points:");
959       for (node = cgraph_nodes; node != first_analyzed; node = node->next)
960 	if (node->needed)
961 	  fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
962       fprintf (cgraph_dump_file, "\n");
963     }
964   cgraph_process_new_functions ();
965 
966   /* Propagate reachability flag and lower representation of all reachable
967      functions.  In the future, lowering will introduce new functions and
968      new entry points on the way (by template instantiation and virtual
969      method table generation for instance).  */
970   while (cgraph_nodes_queue)
971     {
972       struct cgraph_edge *edge;
973       tree decl = cgraph_nodes_queue->decl;
974 
975       node = cgraph_nodes_queue;
976       cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
977       node->next_needed = NULL;
978 
979       /* ??? It is possible to create extern inline function and later using
980 	 weak alias attribute to kill its body. See
981 	 gcc.c-torture/compile/20011119-1.c  */
982       if (!DECL_STRUCT_FUNCTION (decl))
983 	{
984 	  cgraph_reset_node (node);
985 	  continue;
986 	}
987 
988       if (!node->analyzed)
989 	cgraph_analyze_function (node);
990 
991       for (edge = node->callees; edge; edge = edge->next_callee)
992 	if (!edge->callee->reachable)
993 	  cgraph_mark_reachable_node (edge->callee);
994 
995       if (node->same_comdat_group)
996 	{
997 	  for (next = node->same_comdat_group;
998 	       next != node;
999 	       next = next->same_comdat_group)
1000 	    cgraph_mark_reachable_node (next);
1001 	}
1002 
1003       /* If decl is a clone of an abstract function, mark that abstract
1004 	 function so that we don't release its body. The DECL_INITIAL() of that
1005          abstract function declaration will be later needed to output debug info.  */
1006       if (DECL_ABSTRACT_ORIGIN (decl))
1007 	{
1008 	  struct cgraph_node *origin_node = cgraph_node (DECL_ABSTRACT_ORIGIN (decl));
1009 	  origin_node->abstract_and_needed = true;
1010 	}
1011 
1012       /* We finalize local static variables during constructing callgraph
1013          edges.  Process their attributes too.  */
1014       process_function_and_variable_attributes (first_processed,
1015 						first_analyzed_var);
1016       first_processed = cgraph_nodes;
1017       first_analyzed_var = varpool_nodes;
1018       varpool_analyze_pending_decls ();
1019       cgraph_process_new_functions ();
1020     }
1021 
1022   /* Collect entry points to the unit.  */
1023   if (cgraph_dump_file)
1024     {
1025       fprintf (cgraph_dump_file, "Unit entry points:");
1026       for (node = cgraph_nodes; node != first_analyzed; node = node->next)
1027 	if (node->needed)
1028 	  fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1029       fprintf (cgraph_dump_file, "\n\nInitial ");
1030       dump_cgraph (cgraph_dump_file);
1031     }
1032 
1033   if (cgraph_dump_file)
1034     fprintf (cgraph_dump_file, "\nReclaiming functions:");
1035 
1036   for (node = cgraph_nodes; node != first_analyzed; node = next)
1037     {
1038       tree decl = node->decl;
1039       next = node->next;
1040 
1041       if (node->local.finalized && !gimple_has_body_p (decl))
1042 	cgraph_reset_node (node);
1043 
1044       if (!node->reachable && gimple_has_body_p (decl))
1045 	{
1046 	  if (cgraph_dump_file)
1047 	    fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
1048 	  cgraph_remove_node (node);
1049 	  continue;
1050 	}
1051       else
1052 	node->next_needed = NULL;
1053       gcc_assert (!node->local.finalized || gimple_has_body_p (decl));
1054       gcc_assert (node->analyzed == node->local.finalized);
1055     }
1056   if (cgraph_dump_file)
1057     {
1058       fprintf (cgraph_dump_file, "\n\nReclaimed ");
1059       dump_cgraph (cgraph_dump_file);
1060     }
1061   first_analyzed = cgraph_nodes;
1062   ggc_collect ();
1063 }
1064 
1065 
1066 /* Analyze the whole compilation unit once it is parsed completely.  */
1067 
1068 void
1069 cgraph_finalize_compilation_unit (void)
1070 {
1071   timevar_push (TV_CGRAPH);
1072 
1073   /* Do not skip analyzing the functions if there were errors, we
1074      miss diagnostics for following functions otherwise.  */
1075 
1076   /* Emit size functions we didn't inline.  */
1077   finalize_size_functions ();
1078 
1079   /* Call functions declared with the "constructor" or "destructor"
1080      attribute.  */
1081   cgraph_build_cdtor_fns ();
1082 
1083   /* Mark alias targets necessary and emit diagnostics.  */
1084   finish_aliases_1 ();
1085 
1086   if (!quiet_flag)
1087     {
1088       fprintf (stderr, "\nAnalyzing compilation unit\n");
1089       fflush (stderr);
1090     }
1091 
1092   /* Gimplify and lower all functions, compute reachability and
1093      remove unreachable nodes.  */
1094   cgraph_analyze_functions ();
1095 
1096   /* Mark alias targets necessary and emit diagnostics.  */
1097   finish_aliases_1 ();
1098 
1099   /* Gimplify and lower thunks.  */
1100   cgraph_analyze_functions ();
1101 
1102   /* Finally drive the pass manager.  */
1103   cgraph_optimize ();
1104 
1105   timevar_pop (TV_CGRAPH);
1106 }
1107 
1108 
1109 /* Figure out what functions we want to assemble.  */
1110 
1111 static void
1112 cgraph_mark_functions_to_output (void)
1113 {
1114   struct cgraph_node *node;
1115 #ifdef ENABLE_CHECKING
1116   bool check_same_comdat_groups = false;
1117 
1118   for (node = cgraph_nodes; node; node = node->next)
1119     gcc_assert (!node->process);
1120 #endif
1121 
1122   for (node = cgraph_nodes; node; node = node->next)
1123     {
1124       tree decl = node->decl;
1125       struct cgraph_edge *e;
1126 
1127       gcc_assert (!node->process || node->same_comdat_group);
1128       if (node->process)
1129 	continue;
1130 
1131       for (e = node->callers; e; e = e->next_caller)
1132 	if (e->inline_failed)
1133 	  break;
1134 
1135       /* We need to output all local functions that are used and not
1136 	 always inlined, as well as those that are reachable from
1137 	 outside the current compilation unit.  */
1138       if (node->analyzed
1139 	  && !node->global.inlined_to
1140 	  && (node->needed
1141 	      || (e && node->reachable))
1142 	  && !TREE_ASM_WRITTEN (decl)
1143 	  && !DECL_EXTERNAL (decl))
1144 	{
1145 	  node->process = 1;
1146 	  if (node->same_comdat_group)
1147 	    {
1148 	      struct cgraph_node *next;
1149 	      for (next = node->same_comdat_group;
1150 		   next != node;
1151 		   next = next->same_comdat_group)
1152 		next->process = 1;
1153 	    }
1154 	}
1155       else if (node->same_comdat_group)
1156 	{
1157 #ifdef ENABLE_CHECKING
1158 	  check_same_comdat_groups = true;
1159 #endif
1160 	}
1161       else
1162 	{
1163 	  /* We should've reclaimed all functions that are not needed.  */
1164 #ifdef ENABLE_CHECKING
1165 	  if (!node->global.inlined_to
1166 	      && gimple_has_body_p (decl)
1167 	      && !DECL_EXTERNAL (decl))
1168 	    {
1169 	      dump_cgraph_node (stderr, node);
1170 	      internal_error ("failed to reclaim unneeded function");
1171 	    }
1172 #endif
1173 	  gcc_assert (node->global.inlined_to
1174 		      || !gimple_has_body_p (decl)
1175 		      || DECL_EXTERNAL (decl));
1176 
1177 	}
1178 
1179     }
1180 #ifdef ENABLE_CHECKING
1181   if (check_same_comdat_groups)
1182     for (node = cgraph_nodes; node; node = node->next)
1183       if (node->same_comdat_group && !node->process)
1184 	{
1185 	  tree decl = node->decl;
1186 	  if (!node->global.inlined_to
1187 	      && gimple_has_body_p (decl)
1188 	      && !DECL_EXTERNAL (decl))
1189 	    {
1190 	      dump_cgraph_node (stderr, node);
1191 	      internal_error ("failed to reclaim unneeded function");
1192 	    }
1193 	}
1194 #endif
1195 }
1196 
1197 /* DECL is FUNCTION_DECL.  Initialize datastructures so DECL is a function
1198    in lowered gimple form.
1199 
1200    Set current_function_decl and cfun to newly constructed empty function body.
1201    return basic block in the function body.  */
1202 
1203 static basic_block
1204 init_lowered_empty_function (tree decl)
1205 {
1206   basic_block bb;
1207 
1208   current_function_decl = decl;
1209   allocate_struct_function (decl, false);
1210   gimple_register_cfg_hooks ();
1211   init_empty_tree_cfg ();
1212   init_tree_ssa (cfun);
1213   init_ssa_operands ();
1214   cfun->gimple_df->in_ssa_p = true;
1215   DECL_INITIAL (decl) = make_node (BLOCK);
1216 
1217   DECL_SAVED_TREE (decl) = error_mark_node;
1218   cfun->curr_properties |=
1219     (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_referenced_vars |
1220      PROP_ssa);
1221 
1222   /* Create BB for body of the function and connect it properly.  */
1223   bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1224   make_edge (ENTRY_BLOCK_PTR, bb, 0);
1225   make_edge (bb, EXIT_BLOCK_PTR, 0);
1226 
1227   return bb;
1228 }
1229 
1230 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1231    offset indicated by VIRTUAL_OFFSET, if that is
1232    non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1233    zero for a result adjusting thunk.  */
1234 
1235 static tree
1236 thunk_adjust (gimple_stmt_iterator * bsi,
1237 	      tree ptr, bool this_adjusting,
1238 	      HOST_WIDE_INT fixed_offset, tree virtual_offset)
1239 {
1240   gimple stmt;
1241   tree ret;
1242 
1243   if (this_adjusting
1244       && fixed_offset != 0)
1245     {
1246       stmt = gimple_build_assign (ptr,
1247 				  fold_build2_loc (input_location,
1248 						   POINTER_PLUS_EXPR,
1249 						   TREE_TYPE (ptr), ptr,
1250 						   size_int (fixed_offset)));
1251       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1252     }
1253 
1254   /* If there's a virtual offset, look up that value in the vtable and
1255      adjust the pointer again.  */
1256   if (virtual_offset)
1257     {
1258       tree vtabletmp;
1259       tree vtabletmp2;
1260       tree vtabletmp3;
1261       tree offsettmp;
1262 
1263       if (!vtable_entry_type)
1264 	{
1265 	  tree vfunc_type = make_node (FUNCTION_TYPE);
1266 	  TREE_TYPE (vfunc_type) = integer_type_node;
1267 	  TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1268 	  layout_type (vfunc_type);
1269 
1270 	  vtable_entry_type = build_pointer_type (vfunc_type);
1271 	}
1272 
1273       vtabletmp =
1274 	create_tmp_var (build_pointer_type
1275 			(build_pointer_type (vtable_entry_type)), "vptr");
1276 
1277       /* The vptr is always at offset zero in the object.  */
1278       stmt = gimple_build_assign (vtabletmp,
1279 				  build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1280 					  ptr));
1281       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1282       mark_symbols_for_renaming (stmt);
1283       find_referenced_vars_in (stmt);
1284 
1285       /* Form the vtable address.  */
1286       vtabletmp2 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp)),
1287 				   "vtableaddr");
1288       stmt = gimple_build_assign (vtabletmp2,
1289 				  build1 (INDIRECT_REF,
1290 					  TREE_TYPE (vtabletmp2), vtabletmp));
1291       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1292       mark_symbols_for_renaming (stmt);
1293       find_referenced_vars_in (stmt);
1294 
1295       /* Find the entry with the vcall offset.  */
1296       stmt = gimple_build_assign (vtabletmp2,
1297 				  fold_build2_loc (input_location,
1298 						   POINTER_PLUS_EXPR,
1299 						   TREE_TYPE (vtabletmp2),
1300 						   vtabletmp2,
1301 						   fold_convert (sizetype,
1302 								 virtual_offset)));
1303       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1304 
1305       /* Get the offset itself.  */
1306       vtabletmp3 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1307 				   "vcalloffset");
1308       stmt = gimple_build_assign (vtabletmp3,
1309 				  build1 (INDIRECT_REF,
1310 					  TREE_TYPE (vtabletmp3),
1311 					  vtabletmp2));
1312       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1313       mark_symbols_for_renaming (stmt);
1314       find_referenced_vars_in (stmt);
1315 
1316       /* Cast to sizetype.  */
1317       offsettmp = create_tmp_var (sizetype, "offset");
1318       stmt = gimple_build_assign (offsettmp, fold_convert (sizetype, vtabletmp3));
1319       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1320       mark_symbols_for_renaming (stmt);
1321       find_referenced_vars_in (stmt);
1322 
1323       /* Adjust the `this' pointer.  */
1324       ptr = fold_build2_loc (input_location,
1325 			     POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,
1326 			     offsettmp);
1327     }
1328 
1329   if (!this_adjusting
1330       && fixed_offset != 0)
1331     /* Adjust the pointer by the constant.  */
1332     {
1333       tree ptrtmp;
1334 
1335       if (TREE_CODE (ptr) == VAR_DECL)
1336         ptrtmp = ptr;
1337       else
1338         {
1339           ptrtmp = create_tmp_var (TREE_TYPE (ptr), "ptr");
1340           stmt = gimple_build_assign (ptrtmp, ptr);
1341 	  gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1342 	  mark_symbols_for_renaming (stmt);
1343 	  find_referenced_vars_in (stmt);
1344 	}
1345       ptr = fold_build2_loc (input_location,
1346 			     POINTER_PLUS_EXPR, TREE_TYPE (ptrtmp), ptrtmp,
1347 			     size_int (fixed_offset));
1348     }
1349 
1350   /* Emit the statement and gimplify the adjustment expression.  */
1351   ret = create_tmp_var (TREE_TYPE (ptr), "adjusted_this");
1352   stmt = gimple_build_assign (ret, ptr);
1353   mark_symbols_for_renaming (stmt);
1354   find_referenced_vars_in (stmt);
1355   gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1356 
1357   return ret;
1358 }
1359 
1360 /* Produce assembler for thunk NODE.  */
1361 
1362 static void
1363 assemble_thunk (struct cgraph_node *node)
1364 {
1365   bool this_adjusting = node->thunk.this_adjusting;
1366   HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1367   HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1368   tree virtual_offset = NULL;
1369   tree alias = node->thunk.alias;
1370   tree thunk_fndecl = node->decl;
1371   tree a = DECL_ARGUMENTS (thunk_fndecl);
1372 
1373   current_function_decl = thunk_fndecl;
1374 
1375   if (this_adjusting
1376       && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1377 					      virtual_value, alias))
1378     {
1379       const char *fnname;
1380       tree fn_block;
1381 
1382       DECL_RESULT (thunk_fndecl)
1383 	= build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1384 		      RESULT_DECL, 0, integer_type_node);
1385       fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1386 
1387       /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1388 	 create one.  */
1389       fn_block = make_node (BLOCK);
1390       BLOCK_VARS (fn_block) = a;
1391       DECL_INITIAL (thunk_fndecl) = fn_block;
1392       init_function_start (thunk_fndecl);
1393       cfun->is_thunk = 1;
1394       assemble_start_function (thunk_fndecl, fnname);
1395 
1396       targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1397 				       fixed_offset, virtual_value, alias);
1398 
1399       assemble_end_function (thunk_fndecl, fnname);
1400       init_insn_lengths ();
1401       free_after_compilation (cfun);
1402       set_cfun (NULL);
1403       TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1404     }
1405   else
1406     {
1407       tree restype;
1408       basic_block bb, then_bb, else_bb, return_bb;
1409       gimple_stmt_iterator bsi;
1410       int nargs = 0;
1411       tree arg;
1412       int i;
1413       tree resdecl;
1414       tree restmp = NULL;
1415       VEC(tree, heap) *vargs;
1416 
1417       gimple call;
1418       gimple ret;
1419 
1420       DECL_IGNORED_P (thunk_fndecl) = 1;
1421       bitmap_obstack_initialize (NULL);
1422 
1423       if (node->thunk.virtual_offset_p)
1424         virtual_offset = size_int (virtual_value);
1425 
1426       /* Build the return declaration for the function.  */
1427       restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1428       if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1429 	{
1430 	  resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1431 	  DECL_ARTIFICIAL (resdecl) = 1;
1432 	  DECL_IGNORED_P (resdecl) = 1;
1433 	  DECL_RESULT (thunk_fndecl) = resdecl;
1434 	}
1435       else
1436 	resdecl = DECL_RESULT (thunk_fndecl);
1437 
1438       bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl);
1439 
1440       bsi = gsi_start_bb (bb);
1441 
1442       /* Build call to the function being thunked.  */
1443       if (!VOID_TYPE_P (restype))
1444 	{
1445 	  if (!is_gimple_reg_type (restype))
1446 	    {
1447 	      restmp = resdecl;
1448 	      cfun->local_decls = tree_cons (NULL_TREE, restmp, cfun->local_decls);
1449 	      BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1450 	    }
1451 	  else
1452             restmp = create_tmp_var_raw (restype, "retval");
1453 	}
1454 
1455       for (arg = a; arg; arg = TREE_CHAIN (arg))
1456         nargs++;
1457       vargs = VEC_alloc (tree, heap, nargs);
1458       if (this_adjusting)
1459         VEC_quick_push (tree, vargs,
1460 			thunk_adjust (&bsi,
1461 				      a, 1, fixed_offset,
1462 				      virtual_offset));
1463       else
1464         VEC_quick_push (tree, vargs, a);
1465       for (i = 1, arg = TREE_CHAIN (a); i < nargs; i++, arg = TREE_CHAIN (arg))
1466         VEC_quick_push (tree, vargs, arg);
1467       call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1468       VEC_free (tree, heap, vargs);
1469       gimple_call_set_cannot_inline (call, true);
1470       gimple_call_set_from_thunk (call, true);
1471       if (restmp)
1472         gimple_call_set_lhs (call, restmp);
1473       gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1474       mark_symbols_for_renaming (call);
1475       find_referenced_vars_in (call);
1476       update_stmt (call);
1477 
1478       if (restmp && !this_adjusting)
1479         {
1480 	  tree true_label = NULL_TREE;
1481 
1482 	  if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1483 	    {
1484 	      gimple stmt;
1485 	      /* If the return type is a pointer, we need to
1486 		 protect against NULL.  We know there will be an
1487 		 adjustment, because that's why we're emitting a
1488 		 thunk.  */
1489 	      then_bb = create_basic_block (NULL, (void *) 0, bb);
1490 	      return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1491 	      else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1492 	      remove_edge (single_succ_edge (bb));
1493 	      true_label = gimple_block_label (then_bb);
1494 	      stmt = gimple_build_cond (NE_EXPR, restmp,
1495 	      				fold_convert (TREE_TYPE (restmp),
1496 						      integer_zero_node),
1497 	      			        NULL_TREE, NULL_TREE);
1498 	      gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1499 	      make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1500 	      make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1501 	      make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1502 	      make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1503 	      make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1504 	      bsi = gsi_last_bb (then_bb);
1505 	    }
1506 
1507 	  restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1508 			         fixed_offset, virtual_offset);
1509 	  if (true_label)
1510 	    {
1511 	      gimple stmt;
1512 	      bsi = gsi_last_bb (else_bb);
1513 	      stmt = gimple_build_assign (restmp, fold_convert (TREE_TYPE (restmp),
1514 								integer_zero_node));
1515 	      gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1516 	      bsi = gsi_last_bb (return_bb);
1517 	    }
1518 	}
1519       else
1520         gimple_call_set_tail (call, true);
1521 
1522       /* Build return value.  */
1523       ret = gimple_build_return (restmp);
1524       gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1525 
1526       delete_unreachable_blocks ();
1527       update_ssa (TODO_update_ssa);
1528 
1529       cgraph_remove_same_body_alias (node);
1530       /* Since we want to emit the thunk, we explicitly mark its name as
1531 	 referenced.  */
1532       mark_decl_referenced (thunk_fndecl);
1533       cgraph_add_new_function (thunk_fndecl, true);
1534       bitmap_obstack_release (NULL);
1535     }
1536   current_function_decl = NULL;
1537 }
1538 
1539 /* Expand function specified by NODE.  */
1540 
1541 static void
1542 cgraph_expand_function (struct cgraph_node *node)
1543 {
1544   tree decl = node->decl;
1545 
1546   /* We ought to not compile any inline clones.  */
1547   gcc_assert (!node->global.inlined_to);
1548 
1549   announce_function (decl);
1550   node->process = 0;
1551 
1552   gcc_assert (node->lowered);
1553 
1554   /* Generate RTL for the body of DECL.  */
1555   tree_rest_of_compilation (decl);
1556 
1557   /* Make sure that BE didn't give up on compiling.  */
1558   gcc_assert (TREE_ASM_WRITTEN (decl));
1559   current_function_decl = NULL;
1560   if (node->same_body)
1561     {
1562       struct cgraph_node *alias, *next;
1563       bool saved_alias = node->alias;
1564       for (alias = node->same_body;
1565       	   alias && alias->next; alias = alias->next)
1566         ;
1567       /* Walk aliases in the order they were created; it is possible that
1568          thunks reffers to the aliases made earlier.  */
1569       for (; alias; alias = next)
1570         {
1571 	  next = alias->previous;
1572 	  if (!alias->thunk.thunk_p)
1573 	    assemble_alias (alias->decl,
1574 			    DECL_ASSEMBLER_NAME (alias->thunk.alias));
1575 	  else
1576 	    assemble_thunk (alias);
1577 	}
1578       node->alias = saved_alias;
1579     }
1580   gcc_assert (!cgraph_preserve_function_body_p (decl));
1581   cgraph_release_function_body (node);
1582   /* Eliminate all call edges.  This is important so the GIMPLE_CALL no longer
1583      points to the dead function body.  */
1584   cgraph_node_remove_callees (node);
1585 
1586   cgraph_function_flags_ready = true;
1587 }
1588 
1589 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL.  */
1590 
1591 bool
1592 cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
1593 {
1594   *reason = e->inline_failed;
1595   return !e->inline_failed;
1596 }
1597 
1598 
1599 
1600 /* Expand all functions that must be output.
1601 
1602    Attempt to topologically sort the nodes so function is output when
1603    all called functions are already assembled to allow data to be
1604    propagated across the callgraph.  Use a stack to get smaller distance
1605    between a function and its callees (later we may choose to use a more
1606    sophisticated algorithm for function reordering; we will likely want
1607    to use subsections to make the output functions appear in top-down
1608    order).  */
1609 
1610 static void
1611 cgraph_expand_all_functions (void)
1612 {
1613   struct cgraph_node *node;
1614   struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1615   int order_pos, new_order_pos = 0;
1616   int i;
1617 
1618   order_pos = cgraph_postorder (order);
1619   gcc_assert (order_pos == cgraph_n_nodes);
1620 
1621   /* Garbage collector may remove inline clones we eliminate during
1622      optimization.  So we must be sure to not reference them.  */
1623   for (i = 0; i < order_pos; i++)
1624     if (order[i]->process)
1625       order[new_order_pos++] = order[i];
1626 
1627   for (i = new_order_pos - 1; i >= 0; i--)
1628     {
1629       node = order[i];
1630       if (node->process)
1631 	{
1632 	  gcc_assert (node->reachable);
1633 	  node->process = 0;
1634 	  cgraph_expand_function (node);
1635 	}
1636     }
1637   cgraph_process_new_functions ();
1638 
1639   free (order);
1640 
1641 }
1642 
1643 /* This is used to sort the node types by the cgraph order number.  */
1644 
1645 enum cgraph_order_sort_kind
1646 {
1647   ORDER_UNDEFINED = 0,
1648   ORDER_FUNCTION,
1649   ORDER_VAR,
1650   ORDER_ASM
1651 };
1652 
1653 struct cgraph_order_sort
1654 {
1655   enum cgraph_order_sort_kind kind;
1656   union
1657   {
1658     struct cgraph_node *f;
1659     struct varpool_node *v;
1660     struct cgraph_asm_node *a;
1661   } u;
1662 };
1663 
1664 /* Output all functions, variables, and asm statements in the order
1665    according to their order fields, which is the order in which they
1666    appeared in the file.  This implements -fno-toplevel-reorder.  In
1667    this mode we may output functions and variables which don't really
1668    need to be output.  */
1669 
1670 static void
1671 cgraph_output_in_order (void)
1672 {
1673   int max;
1674   struct cgraph_order_sort *nodes;
1675   int i;
1676   struct cgraph_node *pf;
1677   struct varpool_node *pv;
1678   struct cgraph_asm_node *pa;
1679 
1680   max = cgraph_order;
1681   nodes = XCNEWVEC (struct cgraph_order_sort, max);
1682 
1683   varpool_analyze_pending_decls ();
1684 
1685   for (pf = cgraph_nodes; pf; pf = pf->next)
1686     {
1687       if (pf->process)
1688 	{
1689 	  i = pf->order;
1690 	  gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1691 	  nodes[i].kind = ORDER_FUNCTION;
1692 	  nodes[i].u.f = pf;
1693 	}
1694     }
1695 
1696   for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
1697     {
1698       i = pv->order;
1699       gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1700       nodes[i].kind = ORDER_VAR;
1701       nodes[i].u.v = pv;
1702     }
1703 
1704   for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1705     {
1706       i = pa->order;
1707       gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1708       nodes[i].kind = ORDER_ASM;
1709       nodes[i].u.a = pa;
1710     }
1711 
1712   /* In toplevel reorder mode we output all statics; mark them as needed.  */
1713   for (i = 0; i < max; ++i)
1714     {
1715       if (nodes[i].kind == ORDER_VAR)
1716         {
1717 	  varpool_mark_needed_node (nodes[i].u.v);
1718 	}
1719     }
1720   varpool_empty_needed_queue ();
1721 
1722   for (i = 0; i < max; ++i)
1723     {
1724       switch (nodes[i].kind)
1725 	{
1726 	case ORDER_FUNCTION:
1727 	  nodes[i].u.f->process = 0;
1728 	  cgraph_expand_function (nodes[i].u.f);
1729 	  break;
1730 
1731 	case ORDER_VAR:
1732 	  varpool_assemble_decl (nodes[i].u.v);
1733 	  break;
1734 
1735 	case ORDER_ASM:
1736 	  assemble_asm (nodes[i].u.a->asm_str);
1737 	  break;
1738 
1739 	case ORDER_UNDEFINED:
1740 	  break;
1741 
1742 	default:
1743 	  gcc_unreachable ();
1744 	}
1745     }
1746 
1747   cgraph_asm_nodes = NULL;
1748   free (nodes);
1749 }
1750 
1751 /* Return true when function body of DECL still needs to be kept around
1752    for later re-use.  */
1753 bool
1754 cgraph_preserve_function_body_p (tree decl)
1755 {
1756   struct cgraph_node *node;
1757 
1758   gcc_assert (cgraph_global_info_ready);
1759   /* Look if there is any clone around.  */
1760   node = cgraph_node (decl);
1761   if (node->clones)
1762     return true;
1763   return false;
1764 }
1765 
1766 static void
1767 ipa_passes (void)
1768 {
1769   set_cfun (NULL);
1770   current_function_decl = NULL;
1771   gimple_register_cfg_hooks ();
1772   bitmap_obstack_initialize (NULL);
1773 
1774   invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
1775 
1776   if (!in_lto_p)
1777     execute_ipa_pass_list (all_small_ipa_passes);
1778 
1779   /* If pass_all_early_optimizations was not scheduled, the state of
1780      the cgraph will not be properly updated.  Update it now.  */
1781   if (cgraph_state < CGRAPH_STATE_IPA_SSA)
1782     cgraph_state = CGRAPH_STATE_IPA_SSA;
1783 
1784   if (!in_lto_p)
1785     {
1786       /* Generate coverage variables and constructors.  */
1787       coverage_finish ();
1788 
1789       /* Process new functions added.  */
1790       set_cfun (NULL);
1791       current_function_decl = NULL;
1792       cgraph_process_new_functions ();
1793 
1794       execute_ipa_summary_passes
1795 	((struct ipa_opt_pass_d *) all_regular_ipa_passes);
1796     }
1797 
1798   /* Some targets need to handle LTO assembler output specially.  */
1799   if (flag_generate_lto)
1800     targetm.asm_out.lto_start ();
1801 
1802   execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
1803 
1804   if (!in_lto_p)
1805     ipa_write_summaries ();
1806 
1807   if (flag_generate_lto)
1808     targetm.asm_out.lto_end ();
1809 
1810   if (!flag_ltrans)
1811     execute_ipa_pass_list (all_regular_ipa_passes);
1812   invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
1813 
1814   bitmap_obstack_release (NULL);
1815 }
1816 
1817 
1818 /* Perform simple optimizations based on callgraph.  */
1819 
1820 void
1821 cgraph_optimize (void)
1822 {
1823   if (errorcount || sorrycount)
1824     return;
1825 
1826 #ifdef ENABLE_CHECKING
1827   verify_cgraph ();
1828 #endif
1829 
1830   /* Frontend may output common variables after the unit has been finalized.
1831      It is safe to deal with them here as they are always zero initialized.  */
1832   varpool_analyze_pending_decls ();
1833 
1834   timevar_push (TV_CGRAPHOPT);
1835   if (pre_ipa_mem_report)
1836     {
1837       fprintf (stderr, "Memory consumption before IPA\n");
1838       dump_memory_report (false);
1839     }
1840   if (!quiet_flag)
1841     fprintf (stderr, "Performing interprocedural optimizations\n");
1842   cgraph_state = CGRAPH_STATE_IPA;
1843 
1844   /* Don't run the IPA passes if there was any error or sorry messages.  */
1845   if (errorcount == 0 && sorrycount == 0)
1846     ipa_passes ();
1847 
1848   /* Do nothing else if any IPA pass found errors.  */
1849   if (errorcount || sorrycount)
1850     {
1851       timevar_pop (TV_CGRAPHOPT);
1852       return;
1853     }
1854 
1855   /* This pass remove bodies of extern inline functions we never inlined.
1856      Do this later so other IPA passes see what is really going on.  */
1857   cgraph_remove_unreachable_nodes (false, dump_file);
1858   cgraph_global_info_ready = true;
1859   if (cgraph_dump_file)
1860     {
1861       fprintf (cgraph_dump_file, "Optimized ");
1862       dump_cgraph (cgraph_dump_file);
1863       dump_varpool (cgraph_dump_file);
1864     }
1865   if (post_ipa_mem_report)
1866     {
1867       fprintf (stderr, "Memory consumption after IPA\n");
1868       dump_memory_report (false);
1869     }
1870   timevar_pop (TV_CGRAPHOPT);
1871 
1872   /* Output everything.  */
1873   (*debug_hooks->assembly_start) ();
1874   if (!quiet_flag)
1875     fprintf (stderr, "Assembling functions:\n");
1876 #ifdef ENABLE_CHECKING
1877   verify_cgraph ();
1878 #endif
1879 
1880   cgraph_materialize_all_clones ();
1881   cgraph_mark_functions_to_output ();
1882 
1883   cgraph_state = CGRAPH_STATE_EXPANSION;
1884   if (!flag_toplevel_reorder)
1885     cgraph_output_in_order ();
1886   else
1887     {
1888       cgraph_output_pending_asms ();
1889 
1890       cgraph_expand_all_functions ();
1891       varpool_remove_unreferenced_decls ();
1892 
1893       varpool_assemble_pending_decls ();
1894     }
1895   cgraph_process_new_functions ();
1896   cgraph_state = CGRAPH_STATE_FINISHED;
1897 
1898   if (cgraph_dump_file)
1899     {
1900       fprintf (cgraph_dump_file, "\nFinal ");
1901       dump_cgraph (cgraph_dump_file);
1902     }
1903 #ifdef ENABLE_CHECKING
1904   verify_cgraph ();
1905   /* Double check that all inline clones are gone and that all
1906      function bodies have been released from memory.  */
1907   if (!(sorrycount || errorcount))
1908     {
1909       struct cgraph_node *node;
1910       bool error_found = false;
1911 
1912       for (node = cgraph_nodes; node; node = node->next)
1913 	if (node->analyzed
1914 	    && (node->global.inlined_to
1915 		|| gimple_has_body_p (node->decl)))
1916 	  {
1917 	    error_found = true;
1918 	    dump_cgraph_node (stderr, node);
1919 	  }
1920       if (error_found)
1921 	internal_error ("nodes with unreleased memory found");
1922     }
1923 #endif
1924 }
1925 
1926 
1927 /* Generate and emit a static constructor or destructor.  WHICH must
1928    be one of 'I' (for a constructor) or 'D' (for a destructor).  BODY
1929    is a STATEMENT_LIST containing GENERIC statements.  PRIORITY is the
1930    initialization priority for this constructor or destructor.  */
1931 
1932 void
1933 cgraph_build_static_cdtor (char which, tree body, int priority)
1934 {
1935   static int counter = 0;
1936   char which_buf[16];
1937   tree decl, name, resdecl;
1938 
1939   /* The priority is encoded in the constructor or destructor name.
1940      collect2 will sort the names and arrange that they are called at
1941      program startup.  */
1942   sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
1943   name = get_file_function_name (which_buf);
1944 
1945   decl = build_decl (input_location, FUNCTION_DECL, name,
1946 		     build_function_type (void_type_node, void_list_node));
1947   current_function_decl = decl;
1948 
1949   resdecl = build_decl (input_location,
1950 			RESULT_DECL, NULL_TREE, void_type_node);
1951   DECL_ARTIFICIAL (resdecl) = 1;
1952   DECL_RESULT (decl) = resdecl;
1953   DECL_CONTEXT (resdecl) = decl;
1954 
1955   allocate_struct_function (decl, false);
1956 
1957   TREE_STATIC (decl) = 1;
1958   TREE_USED (decl) = 1;
1959   DECL_ARTIFICIAL (decl) = 1;
1960   DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1961   DECL_SAVED_TREE (decl) = body;
1962   if (!targetm.have_ctors_dtors)
1963     {
1964       TREE_PUBLIC (decl) = 1;
1965       DECL_PRESERVE_P (decl) = 1;
1966     }
1967   DECL_UNINLINABLE (decl) = 1;
1968 
1969   DECL_INITIAL (decl) = make_node (BLOCK);
1970   TREE_USED (DECL_INITIAL (decl)) = 1;
1971 
1972   DECL_SOURCE_LOCATION (decl) = input_location;
1973   cfun->function_end_locus = input_location;
1974 
1975   switch (which)
1976     {
1977     case 'I':
1978       DECL_STATIC_CONSTRUCTOR (decl) = 1;
1979       decl_init_priority_insert (decl, priority);
1980       break;
1981     case 'D':
1982       DECL_STATIC_DESTRUCTOR (decl) = 1;
1983       decl_fini_priority_insert (decl, priority);
1984       break;
1985     default:
1986       gcc_unreachable ();
1987     }
1988 
1989   gimplify_function_tree (decl);
1990 
1991   cgraph_add_new_function (decl, false);
1992   cgraph_mark_needed_node (cgraph_node (decl));
1993   set_cfun (NULL);
1994 }
1995 
1996 void
1997 init_cgraph (void)
1998 {
1999   cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
2000 }
2001 
2002 /* The edges representing the callers of the NEW_VERSION node were
2003    fixed by cgraph_function_versioning (), now the call_expr in their
2004    respective tree code should be updated to call the NEW_VERSION.  */
2005 
2006 static void
2007 update_call_expr (struct cgraph_node *new_version)
2008 {
2009   struct cgraph_edge *e;
2010 
2011   gcc_assert (new_version);
2012 
2013   /* Update the call expr on the edges to call the new version.  */
2014   for (e = new_version->callers; e; e = e->next_caller)
2015     {
2016       struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
2017       gimple_call_set_fndecl (e->call_stmt, new_version->decl);
2018       maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
2019     }
2020 }
2021 
2022 
2023 /* Create a new cgraph node which is the new version of
2024    OLD_VERSION node.  REDIRECT_CALLERS holds the callers
2025    edges which should be redirected to point to
2026    NEW_VERSION.  ALL the callees edges of OLD_VERSION
2027    are cloned to the new version node.  Return the new
2028    version node.  */
2029 
2030 static struct cgraph_node *
2031 cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
2032 				 tree new_decl,
2033 				 VEC(cgraph_edge_p,heap) *redirect_callers)
2034  {
2035    struct cgraph_node *new_version;
2036    struct cgraph_edge *e, *new_e;
2037    struct cgraph_edge *next_callee;
2038    unsigned i;
2039 
2040    gcc_assert (old_version);
2041 
2042    new_version = cgraph_node (new_decl);
2043 
2044    new_version->analyzed = true;
2045    new_version->local = old_version->local;
2046    new_version->global = old_version->global;
2047    new_version->rtl = new_version->rtl;
2048    new_version->reachable = true;
2049    new_version->count = old_version->count;
2050 
2051    /* Clone the old node callees.  Recursive calls are
2052       also cloned.  */
2053    for (e = old_version->callees;e; e=e->next_callee)
2054      {
2055        new_e = cgraph_clone_edge (e, new_version, e->call_stmt,
2056 				  e->lto_stmt_uid, 0, e->frequency,
2057 				  e->loop_nest, true);
2058        new_e->count = e->count;
2059      }
2060    /* Fix recursive calls.
2061       If OLD_VERSION has a recursive call after the
2062       previous edge cloning, the new version will have an edge
2063       pointing to the old version, which is wrong;
2064       Redirect it to point to the new version. */
2065    for (e = new_version->callees ; e; e = next_callee)
2066      {
2067        next_callee = e->next_callee;
2068        if (e->callee == old_version)
2069 	 cgraph_redirect_edge_callee (e, new_version);
2070 
2071        if (!next_callee)
2072 	 break;
2073      }
2074    for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
2075      {
2076        /* Redirect calls to the old version node to point to its new
2077 	  version.  */
2078        cgraph_redirect_edge_callee (e, new_version);
2079      }
2080 
2081    return new_version;
2082  }
2083 
2084  /* Perform function versioning.
2085     Function versioning includes copying of the tree and
2086     a callgraph update (creating a new cgraph node and updating
2087     its callees and callers).
2088 
2089     REDIRECT_CALLERS varray includes the edges to be redirected
2090     to the new version.
2091 
2092     TREE_MAP is a mapping of tree nodes we want to replace with
2093     new ones (according to results of prior analysis).
2094     OLD_VERSION_NODE is the node that is versioned.
2095     It returns the new version's cgraph node.
2096     ARGS_TO_SKIP lists arguments to be omitted from functions
2097     */
2098 
2099 struct cgraph_node *
2100 cgraph_function_versioning (struct cgraph_node *old_version_node,
2101 			    VEC(cgraph_edge_p,heap) *redirect_callers,
2102 			    VEC (ipa_replace_map_p,gc)* tree_map,
2103 			    bitmap args_to_skip)
2104 {
2105   tree old_decl = old_version_node->decl;
2106   struct cgraph_node *new_version_node = NULL;
2107   tree new_decl;
2108 
2109   if (!tree_versionable_function_p (old_decl))
2110     return NULL;
2111 
2112   /* Make a new FUNCTION_DECL tree node for the
2113      new version. */
2114   if (!args_to_skip)
2115     new_decl = copy_node (old_decl);
2116   else
2117     new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
2118 
2119   /* Generate a new name for the new version. */
2120   DECL_NAME (new_decl) = clone_function_name (old_decl);
2121   SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
2122   SET_DECL_RTL (new_decl, NULL);
2123 
2124   /* Create the new version's call-graph node.
2125      and update the edges of the new node. */
2126   new_version_node =
2127     cgraph_copy_node_for_versioning (old_version_node, new_decl,
2128 				     redirect_callers);
2129 
2130   /* Copy the OLD_VERSION_NODE function tree to the new version.  */
2131   tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip);
2132 
2133   /* Update the new version's properties.
2134      Make The new version visible only within this translation unit.  Make sure
2135      that is not weak also.
2136      ??? We cannot use COMDAT linkage because there is no
2137      ABI support for this.  */
2138   cgraph_make_decl_local (new_version_node->decl);
2139   DECL_VIRTUAL_P (new_version_node->decl) = 0;
2140   new_version_node->local.externally_visible = 0;
2141   new_version_node->local.local = 1;
2142   new_version_node->lowered = true;
2143 
2144   /* Update the call_expr on the edges to call the new version node. */
2145   update_call_expr (new_version_node);
2146 
2147   cgraph_call_function_insertion_hooks (new_version_node);
2148   return new_version_node;
2149 }
2150 
2151 /* Produce separate function body for inline clones so the offline copy can be
2152    modified without affecting them.  */
2153 struct cgraph_node *
2154 save_inline_function_body (struct cgraph_node *node)
2155 {
2156   struct cgraph_node *first_clone, *n;
2157 
2158   gcc_assert (node == cgraph_node (node->decl));
2159 
2160   cgraph_lower_function (node);
2161 
2162   first_clone = node->clones;
2163 
2164   first_clone->decl = copy_node (node->decl);
2165   cgraph_insert_node_to_hashtable (first_clone);
2166   gcc_assert (first_clone == cgraph_node (first_clone->decl));
2167   if (first_clone->next_sibling_clone)
2168     {
2169       for (n = first_clone->next_sibling_clone; n->next_sibling_clone; n = n->next_sibling_clone)
2170         n->clone_of = first_clone;
2171       n->clone_of = first_clone;
2172       n->next_sibling_clone = first_clone->clones;
2173       if (first_clone->clones)
2174         first_clone->clones->prev_sibling_clone = n;
2175       first_clone->clones = first_clone->next_sibling_clone;
2176       first_clone->next_sibling_clone->prev_sibling_clone = NULL;
2177       first_clone->next_sibling_clone = NULL;
2178       gcc_assert (!first_clone->prev_sibling_clone);
2179     }
2180   first_clone->clone_of = NULL;
2181   node->clones = NULL;
2182 
2183   if (first_clone->clones)
2184     for (n = first_clone->clones; n != first_clone;)
2185       {
2186         gcc_assert (n->decl == node->decl);
2187 	n->decl = first_clone->decl;
2188 	if (n->clones)
2189 	  n = n->clones;
2190 	else if (n->next_sibling_clone)
2191 	  n = n->next_sibling_clone;
2192 	else
2193 	  {
2194 	    while (n != first_clone && !n->next_sibling_clone)
2195 	      n = n->clone_of;
2196 	    if (n != first_clone)
2197 	      n = n->next_sibling_clone;
2198 	  }
2199       }
2200 
2201   /* Copy the OLD_VERSION_NODE function tree to the new version.  */
2202   tree_function_versioning (node->decl, first_clone->decl, NULL, true, NULL);
2203 
2204   DECL_EXTERNAL (first_clone->decl) = 0;
2205   DECL_COMDAT_GROUP (first_clone->decl) = NULL_TREE;
2206   TREE_PUBLIC (first_clone->decl) = 0;
2207   DECL_COMDAT (first_clone->decl) = 0;
2208   VEC_free (ipa_opt_pass, heap,
2209             first_clone->ipa_transforms_to_apply);
2210   first_clone->ipa_transforms_to_apply = NULL;
2211 
2212 #ifdef ENABLE_CHECKING
2213   verify_cgraph_node (first_clone);
2214 #endif
2215   return first_clone;
2216 }
2217 
2218 /* Given virtual clone, turn it into actual clone.  */
2219 static void
2220 cgraph_materialize_clone (struct cgraph_node *node)
2221 {
2222   bitmap_obstack_initialize (NULL);
2223   /* Copy the OLD_VERSION_NODE function tree to the new version.  */
2224   tree_function_versioning (node->clone_of->decl, node->decl,
2225   			    node->clone.tree_map, true,
2226 			    node->clone.args_to_skip);
2227   if (cgraph_dump_file)
2228     {
2229       dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
2230       dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
2231     }
2232 
2233   /* Function is no longer clone.  */
2234   if (node->next_sibling_clone)
2235     node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
2236   if (node->prev_sibling_clone)
2237     node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
2238   else
2239     node->clone_of->clones = node->next_sibling_clone;
2240   node->next_sibling_clone = NULL;
2241   node->prev_sibling_clone = NULL;
2242   if (!node->clone_of->analyzed && !node->clone_of->clones)
2243     {
2244       cgraph_release_function_body (node->clone_of);
2245       cgraph_node_remove_callees (node->clone_of);
2246     }
2247   node->clone_of = NULL;
2248   bitmap_obstack_release (NULL);
2249 }
2250 
2251 /* If necessary, change the function declaration in the call statement
2252    associated with E so that it corresponds to the edge callee.  */
2253 
2254 gimple
2255 cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
2256 {
2257   tree decl = gimple_call_fndecl (e->call_stmt);
2258   gimple new_stmt;
2259 
2260   if (!decl || decl == e->callee->decl
2261       /* Don't update call from same body alias to the real function.  */
2262       || cgraph_get_node (decl) == cgraph_get_node (e->callee->decl))
2263     return e->call_stmt;
2264 
2265   if (cgraph_dump_file)
2266     {
2267       fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ",
2268 	       cgraph_node_name (e->caller), e->caller->uid,
2269 	       cgraph_node_name (e->callee), e->callee->uid);
2270       print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2271     }
2272 
2273   if (e->callee->clone.combined_args_to_skip)
2274     {
2275       gimple_stmt_iterator gsi;
2276 
2277       new_stmt
2278 	= gimple_call_copy_skip_args (e->call_stmt,
2279 				      e->callee->clone.combined_args_to_skip);
2280 
2281       if (gimple_vdef (new_stmt)
2282 	  && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
2283 	SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2284 
2285       gsi = gsi_for_stmt (e->call_stmt);
2286       gsi_replace (&gsi, new_stmt, true);
2287     }
2288   else
2289     new_stmt = e->call_stmt;
2290 
2291   gimple_call_set_fndecl (new_stmt, e->callee->decl);
2292 
2293   cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt);
2294 
2295   if (cgraph_dump_file)
2296     {
2297       fprintf (cgraph_dump_file, "  updated to:");
2298       print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2299     }
2300   return new_stmt;
2301 }
2302 
2303 /* Once all functions from compilation unit are in memory, produce all clones
2304    and update all calls.  We might also do this on demand if we don't want to
2305    bring all functions to memory prior compilation, but current WHOPR
2306    implementation does that and it is is bit easier to keep everything right in
2307    this order.  */
2308 void
2309 cgraph_materialize_all_clones (void)
2310 {
2311   struct cgraph_node *node;
2312   bool stabilized = false;
2313 
2314   if (cgraph_dump_file)
2315     fprintf (cgraph_dump_file, "Materializing clones\n");
2316 #ifdef ENABLE_CHECKING
2317   verify_cgraph ();
2318 #endif
2319 
2320   /* We can also do topological order, but number of iterations should be
2321      bounded by number of IPA passes since single IPA pass is probably not
2322      going to create clones of clones it created itself.  */
2323   while (!stabilized)
2324     {
2325       stabilized = true;
2326       for (node = cgraph_nodes; node; node = node->next)
2327         {
2328 	  if (node->clone_of && node->decl != node->clone_of->decl
2329 	      && !gimple_has_body_p (node->decl))
2330 	    {
2331 	      if (gimple_has_body_p (node->clone_of->decl))
2332 	        {
2333 		  if (cgraph_dump_file)
2334 		    {
2335 		      fprintf (cgraph_dump_file, "clonning %s to %s\n",
2336 			       cgraph_node_name (node->clone_of),
2337 			       cgraph_node_name (node));
2338 		      if (node->clone.tree_map)
2339 		        {
2340 			  unsigned int i;
2341 		          fprintf (cgraph_dump_file, "   replace map: ");
2342 			  for (i = 0; i < VEC_length (ipa_replace_map_p,
2343 			  			      node->clone.tree_map);
2344 						      i++)
2345 			    {
2346 			      struct ipa_replace_map *replace_info;
2347 			      replace_info = VEC_index (ipa_replace_map_p,
2348 			      				node->clone.tree_map,
2349 							i);
2350 			      print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
2351 			      fprintf (cgraph_dump_file, " -> ");
2352 			      print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
2353 			      fprintf (cgraph_dump_file, "%s%s;",
2354 			      	       replace_info->replace_p ? "(replace)":"",
2355 				       replace_info->ref_p ? "(ref)":"");
2356 			    }
2357 			  fprintf (cgraph_dump_file, "\n");
2358 			}
2359 		      if (node->clone.args_to_skip)
2360 			{
2361 		          fprintf (cgraph_dump_file, "   args_to_skip: ");
2362 		          dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
2363 			}
2364 		      if (node->clone.args_to_skip)
2365 			{
2366 		          fprintf (cgraph_dump_file, "   combined_args_to_skip:");
2367 		          dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
2368 			}
2369 		    }
2370 		  cgraph_materialize_clone (node);
2371 	        }
2372 	      else
2373 		stabilized = false;
2374 	    }
2375 	}
2376     }
2377   for (node = cgraph_nodes; node; node = node->next)
2378     if (!node->analyzed && node->callees)
2379       cgraph_node_remove_callees (node);
2380   if (cgraph_dump_file)
2381     fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
2382 #ifdef ENABLE_CHECKING
2383   verify_cgraph ();
2384 #endif
2385   cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
2386 }
2387 
2388 #include "gt-cgraphunit.h"
2389