xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/cgraphunit.c (revision d909946ca08dceb44d7d0f22ec9488679695d976)
1 /* Driver of optimization process
2    Copyright (C) 2003-2013 Free Software Foundation, Inc.
3    Contributed by Jan Hubicka
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 /* This module implements main driver of compilation process.
22 
23    The main scope of this file is to act as an interface in between
24    tree based frontends and the backend.
25 
26    The front-end is supposed to use following functionality:
27 
28     - cgraph_finalize_function
29 
30       This function is called once front-end has parsed whole body of function
31       and it is certain that the function body nor the declaration will change.
32 
33       (There is one exception needed for implementing GCC extern inline
34 	function.)
35 
36     - varpool_finalize_decl
37 
38       This function has same behavior as the above but is used for static
39       variables.
40 
41     - add_asm_node
42 
43       Insert new toplevel ASM statement
44 
45     - finalize_compilation_unit
46 
47       This function is called once (source level) compilation unit is finalized
48       and it will no longer change.
49 
50       The symbol table is constructed starting from the trivially needed
51       symbols finalized by the frontend.  Functions are lowered into
52       GIMPLE representation and callgraph/reference lists are constructed.
53       Those are used to discover other necessary functions and variables.
54 
55       At the end the bodies of unreachable functions are removed.
56 
57       The function can be called multiple times when multiple source level
58       compilation units are combined.
59 
60     - compile
61 
62       This passes control to the back-end.  Optimizations are performed and
63       final assembler is generated.  This is done in the following way. Note
64       that with link time optimization the process is split into three
65       stages (compile time, linktime analysis and parallel linktime as
66       indicated bellow).
67 
68       Compile time:
69 
70 	1) Inter-procedural optimization.
71 	   (ipa_passes)
72 
73 	   This part is further split into:
74 
75 	   a) early optimizations. These are local passes executed in
76 	      the topological order on the callgraph.
77 
78 	      The purpose of early optimiations is to optimize away simple
79 	      things that may otherwise confuse IP analysis. Very simple
80 	      propagation across the callgraph is done i.e. to discover
81 	      functions without side effects and simple inlining is performed.
82 
83 	   b) early small interprocedural passes.
84 
85 	      Those are interprocedural passes executed only at compilation
86 	      time.  These include, for exmaple, transational memory lowering,
87 	      unreachable code removal and other simple transformations.
88 
89 	   c) IP analysis stage.  All interprocedural passes do their
90 	      analysis.
91 
92 	      Interprocedural passes differ from small interprocedural
93 	      passes by their ability to operate across whole program
94 	      at linktime.  Their analysis stage is performed early to
95 	      both reduce linking times and linktime memory usage by
96 	      not having to represent whole program in memory.
97 
98 	   d) LTO sreaming.  When doing LTO, everything important gets
99 	      streamed into the object file.
100 
101        Compile time and or linktime analysis stage (WPA):
102 
103 	      At linktime units gets streamed back and symbol table is
104 	      merged.  Function bodies are not streamed in and not
105 	      available.
106 	   e) IP propagation stage.  All IP passes execute their
107 	      IP propagation. This is done based on the earlier analysis
108 	      without having function bodies at hand.
109 	   f) Ltrans streaming.  When doing WHOPR LTO, the program
110 	      is partitioned and streamed into multple object files.
111 
112        Compile time and/or parallel linktime stage (ltrans)
113 
114 	      Each of the object files is streamed back and compiled
115 	      separately.  Now the function bodies becomes available
116 	      again.
117 
118 	 2) Virtual clone materialization
119 	    (cgraph_materialize_clone)
120 
121 	    IP passes can produce copies of existing functoins (such
122 	    as versioned clones or inline clones) without actually
123 	    manipulating their bodies by creating virtual clones in
124 	    the callgraph. At this time the virtual clones are
125 	    turned into real functions
126 	 3) IP transformation
127 
128 	    All IP passes transform function bodies based on earlier
129 	    decision of the IP propagation.
130 
131 	 4) late small IP passes
132 
133 	    Simple IP passes working within single program partition.
134 
135 	 5) Expansion
136 	    (expand_all_functions)
137 
138 	    At this stage functions that needs to be output into
139 	    assembler are identified and compiled in topological order
140 	 6) Output of variables and aliases
141 	    Now it is known what variable references was not optimized
142 	    out and thus all variables are output to the file.
143 
144 	    Note that with -fno-toplevel-reorder passes 5 and 6
145 	    are combined together in cgraph_output_in_order.
146 
147    Finally there are functions to manipulate the callgraph from
148    backend.
149     - cgraph_add_new_function is used to add backend produced
150       functions introduced after the unit is finalized.
151       The functions are enqueue for later processing and inserted
152       into callgraph with cgraph_process_new_functions.
153 
154     - cgraph_function_versioning
155 
156       produces a copy of function into new one (a version)
157       and apply simple transformations
158 */
159 
160 #include "config.h"
161 #include "system.h"
162 #include "coretypes.h"
163 #include "tm.h"
164 #include "tree.h"
165 #include "output.h"
166 #include "rtl.h"
167 #include "tree-flow.h"
168 #include "tree-inline.h"
169 #include "langhooks.h"
170 #include "pointer-set.h"
171 #include "toplev.h"
172 #include "flags.h"
173 #include "ggc.h"
174 #include "debug.h"
175 #include "target.h"
176 #include "cgraph.h"
177 #include "diagnostic.h"
178 #include "params.h"
179 #include "fibheap.h"
180 #include "intl.h"
181 #include "function.h"
182 #include "ipa-prop.h"
183 #include "gimple.h"
184 #include "tree-iterator.h"
185 #include "tree-pass.h"
186 #include "tree-dump.h"
187 #include "gimple-pretty-print.h"
188 #include "output.h"
189 #include "coverage.h"
190 #include "plugin.h"
191 #include "ipa-inline.h"
192 #include "ipa-utils.h"
193 #include "lto-streamer.h"
194 #include "except.h"
195 #include "regset.h"     /* FIXME: For reg_obstack.  */
196 
197 /* Queue of cgraph nodes scheduled to be added into cgraph.  This is a
198    secondary queue used during optimization to accommodate passes that
199    may generate new functions that need to be optimized and expanded.  */
200 cgraph_node_set cgraph_new_nodes;
201 
202 static void expand_all_functions (void);
203 static void mark_functions_to_output (void);
204 static void expand_function (struct cgraph_node *);
205 static void cgraph_analyze_function (struct cgraph_node *);
206 static void handle_alias_pairs (void);
207 
208 FILE *cgraph_dump_file;
209 
210 /* Linked list of cgraph asm nodes.  */
211 struct asm_node *asm_nodes;
212 
213 /* Last node in cgraph_asm_nodes.  */
214 static GTY(()) struct asm_node *asm_last_node;
215 
216 /* Used for vtable lookup in thunk adjusting.  */
217 static GTY (()) tree vtable_entry_type;
218 
219 /* Determine if function DECL is trivially needed and should stay in the
220    compilation unit.  This is used at the symbol table construction time
221    and differs from later logic removing unnecessary functions that can
222    take into account results of analysis, whole program info etc.  */
223 
224 static bool
225 cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
226 {
227   /* If the user told us it is used, then it must be so.  */
228   if (node->symbol.force_output)
229     return true;
230 
231   /* Double check that no one output the function into assembly file
232      early.  */
233   gcc_checking_assert (!DECL_ASSEMBLER_NAME_SET_P (decl)
234 	               || (node->thunk.thunk_p || node->same_body_alias)
235 	               ||  !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)));
236 
237 
238   /* Keep constructors, destructors and virtual functions.  */
239   if (DECL_STATIC_CONSTRUCTOR (decl)
240       || DECL_STATIC_DESTRUCTOR (decl)
241       || (DECL_VIRTUAL_P (decl)
242 	  && optimize && (DECL_COMDAT (decl) || DECL_EXTERNAL (decl))))
243      return true;
244 
245   /* Externally visible functions must be output.  The exception is
246      COMDAT functions that must be output only when they are needed.  */
247 
248   if (TREE_PUBLIC (decl)
249       && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
250     return true;
251 
252   return false;
253 }
254 
255 /* Head of the queue of nodes to be processed while building callgraph */
256 
257 static symtab_node first = (symtab_node)(void *)1;
258 
259 /* Add NODE to queue starting at FIRST.
260    The queue is linked via AUX pointers and terminated by pointer to 1.  */
261 
262 static void
263 enqueue_node (symtab_node node)
264 {
265   if (node->symbol.aux)
266     return;
267   gcc_checking_assert (first);
268   node->symbol.aux = first;
269   first = node;
270 }
271 
272 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
273    functions into callgraph in a way so they look like ordinary reachable
274    functions inserted into callgraph already at construction time.  */
275 
276 bool
277 cgraph_process_new_functions (void)
278 {
279   bool output = false;
280   tree fndecl;
281   struct cgraph_node *node;
282   cgraph_node_set_iterator csi;
283 
284   if (!cgraph_new_nodes)
285     return false;
286   handle_alias_pairs ();
287   /*  Note that this queue may grow as its being processed, as the new
288       functions may generate new ones.  */
289   for (csi = csi_start (cgraph_new_nodes); !csi_end_p (csi); csi_next (&csi))
290     {
291       node = csi_node (csi);
292       fndecl = node->symbol.decl;
293       switch (cgraph_state)
294 	{
295 	case CGRAPH_STATE_CONSTRUCTION:
296 	  /* At construction time we just need to finalize function and move
297 	     it into reachable functions list.  */
298 
299 	  cgraph_finalize_function (fndecl, false);
300 	  output = true;
301           cgraph_call_function_insertion_hooks (node);
302 	  enqueue_node ((symtab_node) node);
303 	  break;
304 
305 	case CGRAPH_STATE_IPA:
306 	case CGRAPH_STATE_IPA_SSA:
307 	  /* When IPA optimization already started, do all essential
308 	     transformations that has been already performed on the whole
309 	     cgraph but not on this function.  */
310 
311 	  gimple_register_cfg_hooks ();
312 	  if (!node->analyzed)
313 	    cgraph_analyze_function (node);
314 	  push_cfun (DECL_STRUCT_FUNCTION (fndecl));
315 	  if ((cgraph_state == CGRAPH_STATE_IPA_SSA
316 	      && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
317 	      /* When not optimizing, be sure we run early local passes anyway
318 		 to expand OMP.  */
319 	      || !optimize)
320 	    execute_pass_list (pass_early_local_passes.pass.sub);
321 	  else
322 	    compute_inline_parameters (node, true);
323 	  free_dominance_info (CDI_POST_DOMINATORS);
324 	  free_dominance_info (CDI_DOMINATORS);
325 	  pop_cfun ();
326           cgraph_call_function_insertion_hooks (node);
327 	  break;
328 
329 	case CGRAPH_STATE_EXPANSION:
330 	  /* Functions created during expansion shall be compiled
331 	     directly.  */
332 	  node->process = 0;
333           cgraph_call_function_insertion_hooks (node);
334 	  expand_function (node);
335 	  break;
336 
337 	default:
338 	  gcc_unreachable ();
339 	  break;
340 	}
341     }
342   free_cgraph_node_set (cgraph_new_nodes);
343   cgraph_new_nodes = NULL;
344   return output;
345 }
346 
347 /* As an GCC extension we allow redefinition of the function.  The
348    semantics when both copies of bodies differ is not well defined.
349    We replace the old body with new body so in unit at a time mode
350    we always use new body, while in normal mode we may end up with
351    old body inlined into some functions and new body expanded and
352    inlined in others.
353 
354    ??? It may make more sense to use one body for inlining and other
355    body for expanding the function but this is difficult to do.  */
356 
357 static void
358 cgraph_reset_node (struct cgraph_node *node)
359 {
360   /* If node->process is set, then we have already begun whole-unit analysis.
361      This is *not* testing for whether we've already emitted the function.
362      That case can be sort-of legitimately seen with real function redefinition
363      errors.  I would argue that the front end should never present us with
364      such a case, but don't enforce that for now.  */
365   gcc_assert (!node->process);
366 
367   /* Reset our data structures so we can analyze the function again.  */
368   memset (&node->local, 0, sizeof (node->local));
369   memset (&node->global, 0, sizeof (node->global));
370   memset (&node->rtl, 0, sizeof (node->rtl));
371   node->analyzed = false;
372   node->local.finalized = false;
373 
374   cgraph_node_remove_callees (node);
375 }
376 
377 /* Return true when there are references to NODE.  */
378 
379 static bool
380 referred_to_p (symtab_node node)
381 {
382   struct ipa_ref *ref;
383 
384   /* See if there are any references at all.  */
385   if (ipa_ref_list_referring_iterate (&node->symbol.ref_list, 0, ref))
386     return true;
387   /* For functions check also calls.  */
388   cgraph_node *cn = dyn_cast <cgraph_node> (node);
389   if (cn && cn->callers)
390     return true;
391   return false;
392 }
393 
394 /* DECL has been parsed.  Take it, queue it, compile it at the whim of the
395    logic in effect.  If NESTED is true, then our caller cannot stand to have
396    the garbage collector run at the moment.  We would need to either create
397    a new GC context, or just not compile right now.  */
398 
399 void
400 cgraph_finalize_function (tree decl, bool nested)
401 {
402   struct cgraph_node *node = cgraph_get_create_node (decl);
403 
404   if (node->local.finalized)
405     {
406       cgraph_reset_node (node);
407       node->local.redefined_extern_inline = true;
408     }
409 
410   notice_global_symbol (decl);
411   node->local.finalized = true;
412   node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
413 
414   /* With -fkeep-inline-functions we are keeping all inline functions except
415      for extern inline ones.  */
416   if (flag_keep_inline_functions
417       && DECL_DECLARED_INLINE_P (decl)
418       && !DECL_EXTERNAL (decl)
419       && !DECL_DISREGARD_INLINE_LIMITS (decl))
420     node->symbol.force_output = 1;
421 
422   /* When not optimizing, also output the static functions. (see
423      PR24561), but don't do so for always_inline functions, functions
424      declared inline and nested functions.  These were optimized out
425      in the original implementation and it is unclear whether we want
426      to change the behavior here.  */
427   if ((!optimize
428        && !node->same_body_alias
429        && !DECL_DISREGARD_INLINE_LIMITS (decl)
430        && !DECL_DECLARED_INLINE_P (decl)
431        && !(DECL_CONTEXT (decl)
432 	    && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL))
433       && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
434     node->symbol.force_output = 1;
435 
436   /* If we've not yet emitted decl, tell the debug info about it.  */
437   if (!TREE_ASM_WRITTEN (decl))
438     (*debug_hooks->deferred_inline_function) (decl);
439 
440   /* Possibly warn about unused parameters.  */
441   if (warn_unused_parameter)
442     do_warn_unused_parameter (decl);
443 
444   if (!nested)
445     ggc_collect ();
446 
447   if (cgraph_state == CGRAPH_STATE_CONSTRUCTION
448       && (cgraph_decide_is_function_needed (node, decl)
449 	  || referred_to_p ((symtab_node)node)))
450     enqueue_node ((symtab_node)node);
451 }
452 
453 /* Add the function FNDECL to the call graph.
454    Unlike cgraph_finalize_function, this function is intended to be used
455    by middle end and allows insertion of new function at arbitrary point
456    of compilation.  The function can be either in high, low or SSA form
457    GIMPLE.
458 
459    The function is assumed to be reachable and have address taken (so no
460    API breaking optimizations are performed on it).
461 
462    Main work done by this function is to enqueue the function for later
463    processing to avoid need the passes to be re-entrant.  */
464 
465 void
466 cgraph_add_new_function (tree fndecl, bool lowered)
467 {
468   struct cgraph_node *node;
469   switch (cgraph_state)
470     {
471       case CGRAPH_STATE_PARSING:
472 	cgraph_finalize_function (fndecl, false);
473 	break;
474       case CGRAPH_STATE_CONSTRUCTION:
475 	/* Just enqueue function to be processed at nearest occurrence.  */
476 	node = cgraph_create_node (fndecl);
477 	if (lowered)
478 	  node->lowered = true;
479 	if (!cgraph_new_nodes)
480 	  cgraph_new_nodes = cgraph_node_set_new ();
481 	cgraph_node_set_add (cgraph_new_nodes, node);
482         break;
483 
484       case CGRAPH_STATE_IPA:
485       case CGRAPH_STATE_IPA_SSA:
486       case CGRAPH_STATE_EXPANSION:
487 	/* Bring the function into finalized state and enqueue for later
488 	   analyzing and compilation.  */
489 	node = cgraph_get_create_node (fndecl);
490 	node->local.local = false;
491 	node->local.finalized = true;
492 	node->symbol.force_output = true;
493 	if (!lowered && cgraph_state == CGRAPH_STATE_EXPANSION)
494 	  {
495 	    push_cfun (DECL_STRUCT_FUNCTION (fndecl));
496 	    gimple_register_cfg_hooks ();
497 	    bitmap_obstack_initialize (NULL);
498 	    execute_pass_list (all_lowering_passes);
499 	    execute_pass_list (pass_early_local_passes.pass.sub);
500 	    bitmap_obstack_release (NULL);
501 	    pop_cfun ();
502 
503 	    lowered = true;
504 	  }
505 	if (lowered)
506 	  node->lowered = true;
507 	if (!cgraph_new_nodes)
508 	  cgraph_new_nodes = cgraph_node_set_new ();
509 	cgraph_node_set_add (cgraph_new_nodes, node);
510         break;
511 
512       case CGRAPH_STATE_FINISHED:
513 	/* At the very end of compilation we have to do all the work up
514 	   to expansion.  */
515 	node = cgraph_create_node (fndecl);
516 	if (lowered)
517 	  node->lowered = true;
518 	cgraph_analyze_function (node);
519 	push_cfun (DECL_STRUCT_FUNCTION (fndecl));
520 	gimple_register_cfg_hooks ();
521 	bitmap_obstack_initialize (NULL);
522 	if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
523 	  execute_pass_list (pass_early_local_passes.pass.sub);
524 	bitmap_obstack_release (NULL);
525 	pop_cfun ();
526 	expand_function (node);
527 	break;
528 
529       default:
530 	gcc_unreachable ();
531     }
532 
533   /* Set a personality if required and we already passed EH lowering.  */
534   if (lowered
535       && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
536 	  == eh_personality_lang))
537     DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
538 }
539 
540 /* Add a top-level asm statement to the list.  */
541 
542 struct asm_node *
543 add_asm_node (tree asm_str)
544 {
545   struct asm_node *node;
546 
547   node = ggc_alloc_cleared_asm_node ();
548   node->asm_str = asm_str;
549   node->order = symtab_order++;
550   node->next = NULL;
551   if (asm_nodes == NULL)
552     asm_nodes = node;
553   else
554     asm_last_node->next = node;
555   asm_last_node = node;
556   return node;
557 }
558 
559 /* Output all asm statements we have stored up to be output.  */
560 
561 static void
562 output_asm_statements (void)
563 {
564   struct asm_node *can;
565 
566   if (seen_error ())
567     return;
568 
569   for (can = asm_nodes; can; can = can->next)
570     assemble_asm (can->asm_str);
571   asm_nodes = NULL;
572 }
573 
574 /* C++ FE sometimes change linkage flags after producing same body aliases.  */
575 void
576 fixup_same_cpp_alias_visibility (symtab_node node, symtab_node target, tree alias)
577 {
578   DECL_VIRTUAL_P (node->symbol.decl) = DECL_VIRTUAL_P (alias);
579   if (TREE_PUBLIC (node->symbol.decl))
580     {
581       DECL_EXTERNAL (node->symbol.decl) = DECL_EXTERNAL (alias);
582       DECL_COMDAT (node->symbol.decl) = DECL_COMDAT (alias);
583       DECL_COMDAT_GROUP (node->symbol.decl) = DECL_COMDAT_GROUP (alias);
584       if (DECL_ONE_ONLY (alias)
585 	  && !node->symbol.same_comdat_group)
586 	symtab_add_to_same_comdat_group ((symtab_node)node, (symtab_node)target);
587     }
588 }
589 
590 /* Analyze the function scheduled to be output.  */
591 static void
592 cgraph_analyze_function (struct cgraph_node *node)
593 {
594   tree decl = node->symbol.decl;
595   location_t saved_loc = input_location;
596   input_location = DECL_SOURCE_LOCATION (decl);
597 
598   if (node->alias && node->thunk.alias)
599     {
600       struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias);
601       struct cgraph_node *n;
602 
603       for (n = tgt; n && n->alias;
604 	   n = n->analyzed ? cgraph_alias_aliased_node (n) : NULL)
605 	if (n == node)
606 	  {
607 	    error ("function %q+D part of alias cycle", node->symbol.decl);
608 	    node->alias = false;
609 	    input_location = saved_loc;
610 	    return;
611 	  }
612       if (!vec_safe_length (node->symbol.ref_list.references))
613         ipa_record_reference ((symtab_node)node, (symtab_node)tgt,
614 			      IPA_REF_ALIAS, NULL);
615       if (node->same_body_alias)
616 	{
617 	  DECL_DECLARED_INLINE_P (node->symbol.decl)
618 	     = DECL_DECLARED_INLINE_P (node->thunk.alias);
619 	  DECL_DISREGARD_INLINE_LIMITS (node->symbol.decl)
620 	     = DECL_DISREGARD_INLINE_LIMITS (node->thunk.alias);
621 	  fixup_same_cpp_alias_visibility ((symtab_node) node, (symtab_node) tgt, node->thunk.alias);
622 	}
623 
624       if (node->symbol.address_taken)
625 	cgraph_mark_address_taken_node (cgraph_alias_aliased_node (node));
626     }
627   else if (node->thunk.thunk_p)
628     {
629       cgraph_create_edge (node, cgraph_get_node (node->thunk.alias),
630 			  NULL, 0, CGRAPH_FREQ_BASE);
631     }
632   else if (node->dispatcher_function)
633     {
634       /* Generate the dispatcher body of multi-versioned functions.  */
635       struct cgraph_function_version_info *dispatcher_version_info
636 	= get_cgraph_node_version (node);
637       if (dispatcher_version_info != NULL
638           && (dispatcher_version_info->dispatcher_resolver
639 	      == NULL_TREE))
640 	{
641 	  tree resolver = NULL_TREE;
642 	  gcc_assert (targetm.generate_version_dispatcher_body);
643 	  resolver = targetm.generate_version_dispatcher_body (node);
644 	  gcc_assert (resolver != NULL_TREE);
645 	}
646     }
647   else
648     {
649       push_cfun (DECL_STRUCT_FUNCTION (decl));
650 
651       assign_assembler_name_if_neeeded (node->symbol.decl);
652 
653       /* Make sure to gimplify bodies only once.  During analyzing a
654 	 function we lower it, which will require gimplified nested
655 	 functions, so we can end up here with an already gimplified
656 	 body.  */
657       if (!gimple_has_body_p (decl))
658 	gimplify_function_tree (decl);
659       dump_function (TDI_generic, decl);
660 
661       /* Lower the function.  */
662       if (!node->lowered)
663 	{
664 	  if (node->nested)
665 	    lower_nested_functions (node->symbol.decl);
666 	  gcc_assert (!node->nested);
667 
668 	  gimple_register_cfg_hooks ();
669 	  bitmap_obstack_initialize (NULL);
670 	  execute_pass_list (all_lowering_passes);
671 	  free_dominance_info (CDI_POST_DOMINATORS);
672 	  free_dominance_info (CDI_DOMINATORS);
673 	  compact_blocks ();
674 	  bitmap_obstack_release (NULL);
675 	  node->lowered = true;
676 	}
677 
678       pop_cfun ();
679     }
680   node->analyzed = true;
681 
682   input_location = saved_loc;
683 }
684 
685 /* C++ frontend produce same body aliases all over the place, even before PCH
686    gets streamed out. It relies on us linking the aliases with their function
687    in order to do the fixups, but ipa-ref is not PCH safe.  Consequentely we
688    first produce aliases without links, but once C++ FE is sure he won't sream
689    PCH we build the links via this function.  */
690 
691 void
692 cgraph_process_same_body_aliases (void)
693 {
694   struct cgraph_node *node;
695   FOR_EACH_FUNCTION (node)
696     if (node->same_body_alias
697 	&& !vec_safe_length (node->symbol.ref_list.references))
698       {
699         struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias);
700         ipa_record_reference ((symtab_node)node, (symtab_node)tgt,
701 			      IPA_REF_ALIAS, NULL);
702       }
703   same_body_aliases_done = true;
704 }
705 
706 /* Process attributes common for vars and functions.  */
707 
708 static void
709 process_common_attributes (tree decl)
710 {
711   tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
712 
713   if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
714     {
715       warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
716 		  "%<weakref%> attribute should be accompanied with"
717 		  " an %<alias%> attribute");
718       DECL_WEAK (decl) = 0;
719       DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
720 						 DECL_ATTRIBUTES (decl));
721     }
722 }
723 
724 /* Look for externally_visible and used attributes and mark cgraph nodes
725    accordingly.
726 
727    We cannot mark the nodes at the point the attributes are processed (in
728    handle_*_attribute) because the copy of the declarations available at that
729    point may not be canonical.  For example, in:
730 
731     void f();
732     void f() __attribute__((used));
733 
734    the declaration we see in handle_used_attribute will be the second
735    declaration -- but the front end will subsequently merge that declaration
736    with the original declaration and discard the second declaration.
737 
738    Furthermore, we can't mark these nodes in cgraph_finalize_function because:
739 
740     void f() {}
741     void f() __attribute__((externally_visible));
742 
743    is valid.
744 
745    So, we walk the nodes at the end of the translation unit, applying the
746    attributes at that point.  */
747 
748 static void
749 process_function_and_variable_attributes (struct cgraph_node *first,
750                                           struct varpool_node *first_var)
751 {
752   struct cgraph_node *node;
753   struct varpool_node *vnode;
754 
755   for (node = cgraph_first_function (); node != first;
756        node = cgraph_next_function (node))
757     {
758       tree decl = node->symbol.decl;
759       if (DECL_PRESERVE_P (decl))
760 	cgraph_mark_force_output_node (node);
761       else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
762 	{
763 	  if (! TREE_PUBLIC (node->symbol.decl))
764 	    warning_at (DECL_SOURCE_LOCATION (node->symbol.decl), OPT_Wattributes,
765 			"%<externally_visible%>"
766 			" attribute have effect only on public objects");
767 	}
768       if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
769 	  && (node->local.finalized && !node->alias))
770 	{
771 	  warning_at (DECL_SOURCE_LOCATION (node->symbol.decl), OPT_Wattributes,
772 		      "%<weakref%> attribute ignored"
773 		      " because function is defined");
774 	  DECL_WEAK (decl) = 0;
775 	  DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
776 						     DECL_ATTRIBUTES (decl));
777 	}
778 
779       if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
780 	  && !DECL_DECLARED_INLINE_P (decl)
781 	  /* redefining extern inline function makes it DECL_UNINLINABLE.  */
782 	  && !DECL_UNINLINABLE (decl))
783 	warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
784 		    "always_inline function might not be inlinable");
785 
786       process_common_attributes (decl);
787     }
788   for (vnode = varpool_first_variable (); vnode != first_var;
789        vnode = varpool_next_variable (vnode))
790     {
791       tree decl = vnode->symbol.decl;
792       if (DECL_EXTERNAL (decl)
793 	  && DECL_INITIAL (decl)
794 	  && const_value_known_p (decl))
795 	varpool_finalize_decl (decl);
796       if (DECL_PRESERVE_P (decl))
797 	vnode->symbol.force_output = true;
798       else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
799 	{
800 	  if (! TREE_PUBLIC (vnode->symbol.decl))
801 	    warning_at (DECL_SOURCE_LOCATION (vnode->symbol.decl), OPT_Wattributes,
802 			"%<externally_visible%>"
803 			" attribute have effect only on public objects");
804 	}
805       if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
806 	  && vnode->finalized
807 	  && DECL_INITIAL (decl))
808 	{
809 	  warning_at (DECL_SOURCE_LOCATION (vnode->symbol.decl), OPT_Wattributes,
810 		      "%<weakref%> attribute ignored"
811 		      " because variable is initialized");
812 	  DECL_WEAK (decl) = 0;
813 	  DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
814 						      DECL_ATTRIBUTES (decl));
815 	}
816       process_common_attributes (decl);
817     }
818 }
819 
820 /* Mark DECL as finalized.  By finalizing the declaration, frontend instruct the
821    middle end to output the variable to asm file, if needed or externally
822    visible.  */
823 
824 void
825 varpool_finalize_decl (tree decl)
826 {
827   struct varpool_node *node = varpool_node_for_decl (decl);
828 
829   gcc_assert (TREE_STATIC (decl) || DECL_EXTERNAL (decl));
830 
831   if (node->finalized)
832     return;
833   notice_global_symbol (decl);
834   node->finalized = true;
835   if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl)
836       /* Traditionally we do not eliminate static variables when not
837 	 optimizing and when not doing toplevel reoder.  */
838       || (!flag_toplevel_reorder && !DECL_COMDAT (node->symbol.decl)
839 	  && !DECL_ARTIFICIAL (node->symbol.decl)))
840     node->symbol.force_output = true;
841 
842   if (cgraph_state == CGRAPH_STATE_CONSTRUCTION
843       && (decide_is_variable_needed (node, decl)
844 	  || referred_to_p ((symtab_node)node)))
845     enqueue_node ((symtab_node)node);
846   if (cgraph_state >= CGRAPH_STATE_IPA_SSA)
847     varpool_analyze_node (node);
848   /* Some frontends produce various interface variables after compilation
849      finished.  */
850   if (cgraph_state == CGRAPH_STATE_FINISHED)
851     varpool_assemble_decl (node);
852 }
853 
854 
855 /* Determine if a symbol NODE is finalized and needed.  */
856 
857 inline static bool
858 symbol_finalized_and_needed (symtab_node node)
859 {
860   if (cgraph_node *cnode = dyn_cast <cgraph_node> (node))
861     return cnode->local.finalized
862 	   && cgraph_decide_is_function_needed (cnode, cnode->symbol.decl);
863   if (varpool_node *vnode = dyn_cast <varpool_node> (node))
864     return vnode->finalized
865 	   && !DECL_EXTERNAL (vnode->symbol.decl)
866 	   && decide_is_variable_needed (vnode, vnode->symbol.decl);
867   return false;
868 }
869 
870 /* Determine if a symbol NODE is finalized.  */
871 
872 inline static bool
873 symbol_finalized (symtab_node node)
874 {
875   if (cgraph_node *cnode= dyn_cast <cgraph_node> (node))
876     return cnode->local.finalized;
877   if (varpool_node *vnode = dyn_cast <varpool_node> (node))
878     return vnode->finalized;
879   return false;
880 }
881 
882 
883 /* Discover all functions and variables that are trivially needed, analyze
884    them as well as all functions and variables referred by them  */
885 
886 static void
887 cgraph_analyze_functions (void)
888 {
889   /* Keep track of already processed nodes when called multiple times for
890      intermodule optimization.  */
891   static struct cgraph_node *first_analyzed;
892   struct cgraph_node *first_handled = first_analyzed;
893   static struct varpool_node *first_analyzed_var;
894   struct varpool_node *first_handled_var = first_analyzed_var;
895 
896   symtab_node node, next;
897   int i;
898   struct ipa_ref *ref;
899   bool changed = true;
900 
901   bitmap_obstack_initialize (NULL);
902   cgraph_state = CGRAPH_STATE_CONSTRUCTION;
903 
904   /* Analysis adds static variables that in turn adds references to new functions.
905      So we need to iterate the process until it stabilize.  */
906   while (changed)
907     {
908       changed = false;
909       process_function_and_variable_attributes (first_analyzed,
910 						first_analyzed_var);
911 
912       /* First identify the trivially needed symbols.  */
913       for (node = symtab_nodes;
914 	   node != (symtab_node)first_analyzed
915 	   && node != (symtab_node)first_analyzed_var; node = node->symbol.next)
916 	{
917 	  if (symbol_finalized_and_needed (node))
918 	    {
919 	      enqueue_node (node);
920 	      if (!changed && cgraph_dump_file)
921 		fprintf (cgraph_dump_file, "Trivially needed symbols:");
922 	      changed = true;
923 	      if (cgraph_dump_file)
924 		fprintf (cgraph_dump_file, " %s", symtab_node_asm_name (node));
925 	    }
926 	  if (node == (symtab_node)first_analyzed
927 	      || node == (symtab_node)first_analyzed_var)
928 	    break;
929 	}
930       cgraph_process_new_functions ();
931       first_analyzed_var = varpool_first_variable ();
932       first_analyzed = cgraph_first_function ();
933 
934       if (changed && dump_file)
935 	fprintf (cgraph_dump_file, "\n");
936 
937       /* Lower representation, build callgraph edges and references for all trivially
938          needed symbols and all symbols referred by them.  */
939       while (first != (symtab_node)(void *)1)
940 	{
941 	  changed = true;
942 	  node = first;
943 	  first = (symtab_node)first->symbol.aux;
944 	  cgraph_node *cnode = dyn_cast <cgraph_node> (node);
945 	  if (cnode && cnode->local.finalized)
946 	    {
947 	      struct cgraph_edge *edge;
948 	      tree decl = cnode->symbol.decl;
949 
950 	      /* ??? It is possible to create extern inline function
951 	      and later using weak alias attribute to kill its body.
952 	      See gcc.c-torture/compile/20011119-1.c  */
953 	      if (!DECL_STRUCT_FUNCTION (decl)
954 		  && (!cnode->alias || !cnode->thunk.alias)
955 		  && !cnode->thunk.thunk_p
956 		  && !cnode->dispatcher_function)
957 		{
958 		  cgraph_reset_node (cnode);
959 		  cnode->local.redefined_extern_inline = true;
960 		  continue;
961 		}
962 
963 	      if (!cnode->analyzed)
964 		cgraph_analyze_function (cnode);
965 
966 	      for (edge = cnode->callees; edge; edge = edge->next_callee)
967 		if (edge->callee->local.finalized)
968 		   enqueue_node ((symtab_node)edge->callee);
969 
970 	      /* If decl is a clone of an abstract function,
971 	      mark that abstract function so that we don't release its body.
972 	      The DECL_INITIAL() of that abstract function declaration
973 	      will be later needed to output debug info.  */
974 	      if (DECL_ABSTRACT_ORIGIN (decl))
975 		{
976 		  struct cgraph_node *origin_node
977 	    	  = cgraph_get_node (DECL_ABSTRACT_ORIGIN (decl));
978 		  origin_node->abstract_and_needed = true;
979 		}
980 	    }
981 	  else
982 	    {
983 	      varpool_node *vnode = dyn_cast <varpool_node> (node);
984 	      if (vnode && vnode->finalized)
985 		varpool_analyze_node (vnode);
986 	    }
987 
988 	  if (node->symbol.same_comdat_group)
989 	    {
990 	      symtab_node next;
991 	      for (next = node->symbol.same_comdat_group;
992 		   next != node;
993 		   next = next->symbol.same_comdat_group)
994 		enqueue_node (next);
995 	    }
996 	  for (i = 0; ipa_ref_list_reference_iterate (&node->symbol.ref_list, i, ref); i++)
997 	    if (symbol_finalized (ref->referred))
998 	      enqueue_node (ref->referred);
999           cgraph_process_new_functions ();
1000 	}
1001     }
1002 
1003   /* Collect entry points to the unit.  */
1004   if (cgraph_dump_file)
1005     {
1006       fprintf (cgraph_dump_file, "\n\nInitial ");
1007       dump_symtab (cgraph_dump_file);
1008     }
1009 
1010   if (cgraph_dump_file)
1011     fprintf (cgraph_dump_file, "\nRemoving unused symbols:");
1012 
1013   for (node = symtab_nodes;
1014        node != (symtab_node)first_handled
1015        && node != (symtab_node)first_handled_var; node = next)
1016     {
1017       next = node->symbol.next;
1018       if (!node->symbol.aux && !referred_to_p (node))
1019 	{
1020 	  if (cgraph_dump_file)
1021 	    fprintf (cgraph_dump_file, " %s", symtab_node_name (node));
1022 	  symtab_remove_node (node);
1023 	  continue;
1024 	}
1025       if (cgraph_node *cnode = dyn_cast <cgraph_node> (node))
1026 	{
1027 	  tree decl = node->symbol.decl;
1028 
1029 	  if (cnode->local.finalized && !gimple_has_body_p (decl)
1030 	      && (!cnode->alias || !cnode->thunk.alias)
1031 	      && !cnode->thunk.thunk_p)
1032 	    cgraph_reset_node (cnode);
1033 
1034 	  gcc_assert (!cnode->local.finalized || cnode->thunk.thunk_p
1035 		      || cnode->alias
1036 		      || gimple_has_body_p (decl));
1037 	  gcc_assert (cnode->analyzed == cnode->local.finalized);
1038 	}
1039       node->symbol.aux = NULL;
1040     }
1041   first_analyzed = cgraph_first_function ();
1042   first_analyzed_var = varpool_first_variable ();
1043   if (cgraph_dump_file)
1044     {
1045       fprintf (cgraph_dump_file, "\n\nReclaimed ");
1046       dump_symtab (cgraph_dump_file);
1047     }
1048   bitmap_obstack_release (NULL);
1049   ggc_collect ();
1050 }
1051 
1052 /* Translate the ugly representation of aliases as alias pairs into nice
1053    representation in callgraph.  We don't handle all cases yet,
1054    unforutnately.  */
1055 
1056 static void
1057 handle_alias_pairs (void)
1058 {
1059   alias_pair *p;
1060   unsigned i;
1061 
1062   for (i = 0; alias_pairs && alias_pairs->iterate (i, &p);)
1063     {
1064       symtab_node target_node = symtab_node_for_asm (p->target);
1065 
1066       /* Weakrefs with target not defined in current unit are easy to handle; they
1067 	 behave just as external variables except we need to note the alias flag
1068 	 to later output the weakref pseudo op into asm file.  */
1069       if (!target_node && lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL)
1070 	{
1071 	  if (TREE_CODE (p->decl) == FUNCTION_DECL)
1072 	    cgraph_get_create_node (p->decl)->alias = true;
1073 	  else
1074 	    varpool_get_node (p->decl)->alias = true;
1075 	  DECL_EXTERNAL (p->decl) = 1;
1076 	  alias_pairs->unordered_remove (i);
1077 	  continue;
1078 	}
1079       else if (!target_node)
1080 	{
1081 	  error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
1082 	  alias_pairs->unordered_remove (i);
1083 	  continue;
1084 	}
1085 
1086       /* Normally EXTERNAL flag is used to mark external inlines,
1087 	 however for aliases it seems to be allowed to use it w/o
1088 	 any meaning. See gcc.dg/attr-alias-3.c
1089 	 However for weakref we insist on EXTERNAL flag being set.
1090 	 See gcc.dg/attr-alias-5.c  */
1091       if (DECL_EXTERNAL (p->decl))
1092 	DECL_EXTERNAL (p->decl)
1093 	  = lookup_attribute ("weakref",
1094 			      DECL_ATTRIBUTES (p->decl)) != NULL;
1095 
1096       if (DECL_EXTERNAL (target_node->symbol.decl)
1097 	  /* We use local aliases for C++ thunks to force the tailcall
1098 	     to bind locally.  This is a hack - to keep it working do
1099 	     the following (which is not strictly correct).  */
1100 	  && (TREE_CODE (target_node->symbol.decl) != FUNCTION_DECL
1101 	      || ! DECL_VIRTUAL_P (target_node->symbol.decl))
1102 	  && ! lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)))
1103 	{
1104 	  error ("%q+D aliased to external symbol %qE",
1105 		 p->decl, p->target);
1106 	}
1107 
1108       if (TREE_CODE (p->decl) == FUNCTION_DECL
1109           && target_node && is_a <cgraph_node> (target_node))
1110 	{
1111 	  struct cgraph_node *src_node = cgraph_get_node (p->decl);
1112 	  if (src_node && src_node->local.finalized)
1113             cgraph_reset_node (src_node);
1114 	  cgraph_create_function_alias (p->decl, target_node->symbol.decl);
1115 	  alias_pairs->unordered_remove (i);
1116 	}
1117       else if (TREE_CODE (p->decl) == VAR_DECL
1118 	       && target_node && is_a <varpool_node> (target_node))
1119 	{
1120 	  varpool_create_variable_alias (p->decl, target_node->symbol.decl);
1121 	  alias_pairs->unordered_remove (i);
1122 	}
1123       else
1124 	{
1125 	  error ("%q+D alias in between function and variable is not supported",
1126 		 p->decl);
1127 	  warning (0, "%q+D aliased declaration",
1128 		   target_node->symbol.decl);
1129 	  alias_pairs->unordered_remove (i);
1130 	}
1131     }
1132   vec_free (alias_pairs);
1133 }
1134 
1135 
1136 /* Figure out what functions we want to assemble.  */
1137 
1138 static void
1139 mark_functions_to_output (void)
1140 {
1141   struct cgraph_node *node;
1142 #ifdef ENABLE_CHECKING
1143   bool check_same_comdat_groups = false;
1144 
1145   FOR_EACH_FUNCTION (node)
1146     gcc_assert (!node->process);
1147 #endif
1148 
1149   FOR_EACH_FUNCTION (node)
1150     {
1151       tree decl = node->symbol.decl;
1152 
1153       gcc_assert (!node->process || node->symbol.same_comdat_group);
1154       if (node->process)
1155 	continue;
1156 
1157       /* We need to output all local functions that are used and not
1158 	 always inlined, as well as those that are reachable from
1159 	 outside the current compilation unit.  */
1160       if (node->analyzed
1161 	  && !node->thunk.thunk_p
1162 	  && !node->alias
1163 	  && !node->global.inlined_to
1164 	  && !TREE_ASM_WRITTEN (decl)
1165 	  && !DECL_EXTERNAL (decl))
1166 	{
1167 	  node->process = 1;
1168 	  if (node->symbol.same_comdat_group)
1169 	    {
1170 	      struct cgraph_node *next;
1171 	      for (next = cgraph (node->symbol.same_comdat_group);
1172 		   next != node;
1173 		   next = cgraph (next->symbol.same_comdat_group))
1174 		if (!next->thunk.thunk_p && !next->alias)
1175 		  next->process = 1;
1176 	    }
1177 	}
1178       else if (node->symbol.same_comdat_group)
1179 	{
1180 #ifdef ENABLE_CHECKING
1181 	  check_same_comdat_groups = true;
1182 #endif
1183 	}
1184       else
1185 	{
1186 	  /* We should've reclaimed all functions that are not needed.  */
1187 #ifdef ENABLE_CHECKING
1188 	  if (!node->global.inlined_to
1189 	      && gimple_has_body_p (decl)
1190 	      /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1191 		 are inside partition, we can end up not removing the body since we no longer
1192 		 have analyzed node pointing to it.  */
1193 	      && !node->symbol.in_other_partition
1194 	      && !node->alias
1195 	      && !node->clones
1196 	      && !DECL_EXTERNAL (decl))
1197 	    {
1198 	      dump_cgraph_node (stderr, node);
1199 	      internal_error ("failed to reclaim unneeded function");
1200 	    }
1201 #endif
1202 	  gcc_assert (node->global.inlined_to
1203 		      || !gimple_has_body_p (decl)
1204 		      || node->symbol.in_other_partition
1205 		      || node->clones
1206 		      || DECL_ARTIFICIAL (decl)
1207 		      || DECL_EXTERNAL (decl));
1208 
1209 	}
1210 
1211     }
1212 #ifdef ENABLE_CHECKING
1213   if (check_same_comdat_groups)
1214     FOR_EACH_FUNCTION (node)
1215       if (node->symbol.same_comdat_group && !node->process)
1216 	{
1217 	  tree decl = node->symbol.decl;
1218 	  if (!node->global.inlined_to
1219 	      && gimple_has_body_p (decl)
1220 	      /* FIXME: in an ltrans unit when the offline copy is outside a
1221 		 partition but inline copies are inside a partition, we can
1222 		 end up not removing the body since we no longer have an
1223 		 analyzed node pointing to it.  */
1224 	      && !node->symbol.in_other_partition
1225 	      && !node->clones
1226 	      && !DECL_EXTERNAL (decl))
1227 	    {
1228 	      dump_cgraph_node (stderr, node);
1229 	      internal_error ("failed to reclaim unneeded function in same "
1230 			      "comdat group");
1231 	    }
1232 	}
1233 #endif
1234 }
1235 
1236 /* DECL is FUNCTION_DECL.  Initialize datastructures so DECL is a function
1237    in lowered gimple form.  IN_SSA is true if the gimple is in SSA.
1238 
1239    Set current_function_decl and cfun to newly constructed empty function body.
1240    return basic block in the function body.  */
1241 
1242 basic_block
1243 init_lowered_empty_function (tree decl, bool in_ssa)
1244 {
1245   basic_block bb;
1246 
1247   current_function_decl = decl;
1248   allocate_struct_function (decl, false);
1249   gimple_register_cfg_hooks ();
1250   init_empty_tree_cfg ();
1251 
1252   if (in_ssa)
1253     {
1254       init_tree_ssa (cfun);
1255       init_ssa_operands (cfun);
1256       cfun->gimple_df->in_ssa_p = true;
1257     }
1258 
1259   DECL_INITIAL (decl) = make_node (BLOCK);
1260 
1261   DECL_SAVED_TREE (decl) = error_mark_node;
1262   cfun->curr_properties |=
1263     (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_ssa | PROP_gimple_any);
1264 
1265   /* Create BB for body of the function and connect it properly.  */
1266   bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1267   make_edge (ENTRY_BLOCK_PTR, bb, 0);
1268   make_edge (bb, EXIT_BLOCK_PTR, 0);
1269 
1270   return bb;
1271 }
1272 
1273 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1274    offset indicated by VIRTUAL_OFFSET, if that is
1275    non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1276    zero for a result adjusting thunk.  */
1277 
1278 static tree
1279 thunk_adjust (gimple_stmt_iterator * bsi,
1280 	      tree ptr, bool this_adjusting,
1281 	      HOST_WIDE_INT fixed_offset, tree virtual_offset)
1282 {
1283   gimple stmt;
1284   tree ret;
1285 
1286   if (this_adjusting
1287       && fixed_offset != 0)
1288     {
1289       stmt = gimple_build_assign
1290 		(ptr, fold_build_pointer_plus_hwi_loc (input_location,
1291 						       ptr,
1292 						       fixed_offset));
1293       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1294     }
1295 
1296   /* If there's a virtual offset, look up that value in the vtable and
1297      adjust the pointer again.  */
1298   if (virtual_offset)
1299     {
1300       tree vtabletmp;
1301       tree vtabletmp2;
1302       tree vtabletmp3;
1303 
1304       if (!vtable_entry_type)
1305 	{
1306 	  tree vfunc_type = make_node (FUNCTION_TYPE);
1307 	  TREE_TYPE (vfunc_type) = integer_type_node;
1308 	  TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1309 	  layout_type (vfunc_type);
1310 
1311 	  vtable_entry_type = build_pointer_type (vfunc_type);
1312 	}
1313 
1314       vtabletmp =
1315 	create_tmp_reg (build_pointer_type
1316 			  (build_pointer_type (vtable_entry_type)), "vptr");
1317 
1318       /* The vptr is always at offset zero in the object.  */
1319       stmt = gimple_build_assign (vtabletmp,
1320 				  build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1321 					  ptr));
1322       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1323 
1324       /* Form the vtable address.  */
1325       vtabletmp2 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp)),
1326 				     "vtableaddr");
1327       stmt = gimple_build_assign (vtabletmp2,
1328 				  build_simple_mem_ref (vtabletmp));
1329       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1330 
1331       /* Find the entry with the vcall offset.  */
1332       stmt = gimple_build_assign (vtabletmp2,
1333 				  fold_build_pointer_plus_loc (input_location,
1334 							       vtabletmp2,
1335 							       virtual_offset));
1336       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1337 
1338       /* Get the offset itself.  */
1339       vtabletmp3 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1340 				     "vcalloffset");
1341       stmt = gimple_build_assign (vtabletmp3,
1342 				  build_simple_mem_ref (vtabletmp2));
1343       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1344 
1345       /* Adjust the `this' pointer.  */
1346       ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1347       ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1348 				      GSI_CONTINUE_LINKING);
1349     }
1350 
1351   if (!this_adjusting
1352       && fixed_offset != 0)
1353     /* Adjust the pointer by the constant.  */
1354     {
1355       tree ptrtmp;
1356 
1357       if (TREE_CODE (ptr) == VAR_DECL)
1358         ptrtmp = ptr;
1359       else
1360         {
1361           ptrtmp = create_tmp_reg (TREE_TYPE (ptr), "ptr");
1362           stmt = gimple_build_assign (ptrtmp, ptr);
1363 	  gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1364 	}
1365       ptr = fold_build_pointer_plus_hwi_loc (input_location,
1366 					     ptrtmp, fixed_offset);
1367     }
1368 
1369   /* Emit the statement and gimplify the adjustment expression.  */
1370   ret = create_tmp_reg (TREE_TYPE (ptr), "adjusted_this");
1371   stmt = gimple_build_assign (ret, ptr);
1372   gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1373 
1374   return ret;
1375 }
1376 
1377 /* Produce assembler for thunk NODE.  */
1378 
1379 static void
1380 assemble_thunk (struct cgraph_node *node)
1381 {
1382   bool this_adjusting = node->thunk.this_adjusting;
1383   HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1384   HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1385   tree virtual_offset = NULL;
1386   tree alias = node->thunk.alias;
1387   tree thunk_fndecl = node->symbol.decl;
1388   tree a = DECL_ARGUMENTS (thunk_fndecl);
1389 
1390   current_function_decl = thunk_fndecl;
1391 
1392   /* Ensure thunks are emitted in their correct sections.  */
1393   resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1394 
1395   if (this_adjusting
1396       && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1397 					      virtual_value, alias))
1398     {
1399       const char *fnname;
1400       tree fn_block;
1401       tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1402 
1403       DECL_RESULT (thunk_fndecl)
1404 	= build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1405 		      RESULT_DECL, 0, restype);
1406       fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1407 
1408       /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1409 	 create one.  */
1410       fn_block = make_node (BLOCK);
1411       BLOCK_VARS (fn_block) = a;
1412       DECL_INITIAL (thunk_fndecl) = fn_block;
1413       init_function_start (thunk_fndecl);
1414       cfun->is_thunk = 1;
1415       insn_locations_init ();
1416       set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl));
1417       prologue_location = curr_insn_location ();
1418       assemble_start_function (thunk_fndecl, fnname);
1419 
1420       targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1421 				       fixed_offset, virtual_value, alias);
1422 
1423       assemble_end_function (thunk_fndecl, fnname);
1424       insn_locations_finalize ();
1425       init_insn_lengths ();
1426       free_after_compilation (cfun);
1427       set_cfun (NULL);
1428       TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1429       node->thunk.thunk_p = false;
1430       node->analyzed = false;
1431     }
1432   else
1433     {
1434       tree restype;
1435       basic_block bb, then_bb, else_bb, return_bb;
1436       gimple_stmt_iterator bsi;
1437       int nargs = 0;
1438       tree arg;
1439       int i;
1440       tree resdecl;
1441       tree restmp = NULL;
1442       vec<tree> vargs;
1443 
1444       gimple call;
1445       gimple ret;
1446 
1447       DECL_IGNORED_P (thunk_fndecl) = 1;
1448       bitmap_obstack_initialize (NULL);
1449 
1450       if (node->thunk.virtual_offset_p)
1451         virtual_offset = size_int (virtual_value);
1452 
1453       /* Build the return declaration for the function.  */
1454       restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1455       if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1456 	{
1457 	  resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1458 	  DECL_ARTIFICIAL (resdecl) = 1;
1459 	  DECL_IGNORED_P (resdecl) = 1;
1460 	  DECL_RESULT (thunk_fndecl) = resdecl;
1461 	}
1462       else
1463 	resdecl = DECL_RESULT (thunk_fndecl);
1464 
1465       bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl, true);
1466 
1467       bsi = gsi_start_bb (bb);
1468 
1469       /* Build call to the function being thunked.  */
1470       if (!VOID_TYPE_P (restype))
1471 	{
1472 	  if (!is_gimple_reg_type (restype))
1473 	    {
1474 	      restmp = resdecl;
1475 	      add_local_decl (cfun, restmp);
1476 	      BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1477 	    }
1478 	  else
1479 	    restmp = create_tmp_reg (restype, "retval");
1480 	}
1481 
1482       for (arg = a; arg; arg = DECL_CHAIN (arg))
1483         nargs++;
1484       vargs.create (nargs);
1485       if (this_adjusting)
1486         vargs.quick_push (thunk_adjust (&bsi, a, 1, fixed_offset,
1487 					virtual_offset));
1488       else
1489         vargs.quick_push (a);
1490       for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
1491 	vargs.quick_push (arg);
1492       call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1493       vargs.release ();
1494       gimple_call_set_from_thunk (call, true);
1495       if (restmp)
1496         gimple_call_set_lhs (call, restmp);
1497       gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1498 
1499       if (restmp && !this_adjusting)
1500         {
1501 	  tree true_label = NULL_TREE;
1502 
1503 	  if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1504 	    {
1505 	      gimple stmt;
1506 	      /* If the return type is a pointer, we need to
1507 		 protect against NULL.  We know there will be an
1508 		 adjustment, because that's why we're emitting a
1509 		 thunk.  */
1510 	      then_bb = create_basic_block (NULL, (void *) 0, bb);
1511 	      return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1512 	      else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1513 	      remove_edge (single_succ_edge (bb));
1514 	      true_label = gimple_block_label (then_bb);
1515 	      stmt = gimple_build_cond (NE_EXPR, restmp,
1516 	      				build_zero_cst (TREE_TYPE (restmp)),
1517 	      			        NULL_TREE, NULL_TREE);
1518 	      gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1519 	      make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1520 	      make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1521 	      make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1522 	      make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1523 	      make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1524 	      bsi = gsi_last_bb (then_bb);
1525 	    }
1526 
1527 	  restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1528 			         fixed_offset, virtual_offset);
1529 	  if (true_label)
1530 	    {
1531 	      gimple stmt;
1532 	      bsi = gsi_last_bb (else_bb);
1533 	      stmt = gimple_build_assign (restmp,
1534 					  build_zero_cst (TREE_TYPE (restmp)));
1535 	      gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1536 	      bsi = gsi_last_bb (return_bb);
1537 	    }
1538 	}
1539       else
1540         gimple_call_set_tail (call, true);
1541 
1542       /* Build return value.  */
1543       ret = gimple_build_return (restmp);
1544       gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1545 
1546       delete_unreachable_blocks ();
1547       update_ssa (TODO_update_ssa);
1548 
1549       /* Since we want to emit the thunk, we explicitly mark its name as
1550 	 referenced.  */
1551       node->thunk.thunk_p = false;
1552       cgraph_node_remove_callees (node);
1553       cgraph_add_new_function (thunk_fndecl, true);
1554       bitmap_obstack_release (NULL);
1555     }
1556   current_function_decl = NULL;
1557   set_cfun (NULL);
1558 }
1559 
1560 
1561 
1562 /* Assemble thunks and aliases associated to NODE.  */
1563 
1564 static void
1565 assemble_thunks_and_aliases (struct cgraph_node *node)
1566 {
1567   struct cgraph_edge *e;
1568   int i;
1569   struct ipa_ref *ref;
1570 
1571   for (e = node->callers; e;)
1572     if (e->caller->thunk.thunk_p)
1573       {
1574 	struct cgraph_node *thunk = e->caller;
1575 
1576 	e = e->next_caller;
1577 	assemble_thunks_and_aliases (thunk);
1578         assemble_thunk (thunk);
1579       }
1580     else
1581       e = e->next_caller;
1582   for (i = 0; ipa_ref_list_referring_iterate (&node->symbol.ref_list,
1583 					     i, ref); i++)
1584     if (ref->use == IPA_REF_ALIAS)
1585       {
1586 	struct cgraph_node *alias = ipa_ref_referring_node (ref);
1587         bool saved_written = TREE_ASM_WRITTEN (alias->thunk.alias);
1588 
1589 	/* Force assemble_alias to really output the alias this time instead
1590 	   of buffering it in same alias pairs.  */
1591 	TREE_ASM_WRITTEN (alias->thunk.alias) = 1;
1592 	do_assemble_alias (alias->symbol.decl,
1593 			   DECL_ASSEMBLER_NAME (alias->thunk.alias));
1594 	assemble_thunks_and_aliases (alias);
1595 	TREE_ASM_WRITTEN (alias->thunk.alias) = saved_written;
1596       }
1597 }
1598 
1599 /* Expand function specified by NODE.  */
1600 
1601 static void
1602 expand_function (struct cgraph_node *node)
1603 {
1604   tree decl = node->symbol.decl;
1605   location_t saved_loc;
1606 
1607   /* We ought to not compile any inline clones.  */
1608   gcc_assert (!node->global.inlined_to);
1609 
1610   announce_function (decl);
1611   node->process = 0;
1612   gcc_assert (node->lowered);
1613 
1614   /* Generate RTL for the body of DECL.  */
1615 
1616   timevar_push (TV_REST_OF_COMPILATION);
1617 
1618   gcc_assert (cgraph_global_info_ready);
1619 
1620   /* Initialize the default bitmap obstack.  */
1621   bitmap_obstack_initialize (NULL);
1622 
1623   /* Initialize the RTL code for the function.  */
1624   current_function_decl = decl;
1625   saved_loc = input_location;
1626   input_location = DECL_SOURCE_LOCATION (decl);
1627   init_function_start (decl);
1628 
1629   gimple_register_cfg_hooks ();
1630 
1631   bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
1632 
1633   execute_all_ipa_transforms ();
1634 
1635   /* Perform all tree transforms and optimizations.  */
1636 
1637   /* Signal the start of passes.  */
1638   invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);
1639 
1640   execute_pass_list (all_passes);
1641 
1642   /* Signal the end of passes.  */
1643   invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);
1644 
1645   bitmap_obstack_release (&reg_obstack);
1646 
1647   /* Release the default bitmap obstack.  */
1648   bitmap_obstack_release (NULL);
1649 
1650   /* If requested, warn about function definitions where the function will
1651      return a value (usually of some struct or union type) which itself will
1652      take up a lot of stack space.  */
1653   if (warn_larger_than && !DECL_EXTERNAL (decl) && TREE_TYPE (decl))
1654     {
1655       tree ret_type = TREE_TYPE (TREE_TYPE (decl));
1656 
1657       if (ret_type && TYPE_SIZE_UNIT (ret_type)
1658 	  && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
1659 	  && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type),
1660 				   larger_than_size))
1661 	{
1662 	  unsigned int size_as_int
1663 	    = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
1664 
1665 	  if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
1666 	    warning (OPT_Wlarger_than_, "size of return value of %q+D is %u bytes",
1667                      decl, size_as_int);
1668 	  else
1669 	    warning (OPT_Wlarger_than_, "size of return value of %q+D is larger than %wd bytes",
1670                      decl, larger_than_size);
1671 	}
1672     }
1673 
1674   gimple_set_body (decl, NULL);
1675   if (DECL_STRUCT_FUNCTION (decl) == 0
1676       && !cgraph_get_node (decl)->origin)
1677     {
1678       /* Stop pointing to the local nodes about to be freed.
1679 	 But DECL_INITIAL must remain nonzero so we know this
1680 	 was an actual function definition.
1681 	 For a nested function, this is done in c_pop_function_context.
1682 	 If rest_of_compilation set this to 0, leave it 0.  */
1683       if (DECL_INITIAL (decl) != 0)
1684 	DECL_INITIAL (decl) = error_mark_node;
1685     }
1686 
1687   input_location = saved_loc;
1688 
1689   ggc_collect ();
1690   timevar_pop (TV_REST_OF_COMPILATION);
1691 
1692   /* Make sure that BE didn't give up on compiling.  */
1693   gcc_assert (TREE_ASM_WRITTEN (decl));
1694   set_cfun (NULL);
1695   current_function_decl = NULL;
1696 
1697   /* It would make a lot more sense to output thunks before function body to get more
1698      forward and lest backwarding jumps.  This however would need solving problem
1699      with comdats. See PR48668.  Also aliases must come after function itself to
1700      make one pass assemblers, like one on AIX, happy.  See PR 50689.
1701      FIXME: Perhaps thunks should be move before function IFF they are not in comdat
1702      groups.  */
1703   assemble_thunks_and_aliases (node);
1704   cgraph_release_function_body (node);
1705   /* Eliminate all call edges.  This is important so the GIMPLE_CALL no longer
1706      points to the dead function body.  */
1707   cgraph_node_remove_callees (node);
1708 }
1709 
1710 
1711 /* Expand all functions that must be output.
1712 
1713    Attempt to topologically sort the nodes so function is output when
1714    all called functions are already assembled to allow data to be
1715    propagated across the callgraph.  Use a stack to get smaller distance
1716    between a function and its callees (later we may choose to use a more
1717    sophisticated algorithm for function reordering; we will likely want
1718    to use subsections to make the output functions appear in top-down
1719    order).  */
1720 
1721 static void
1722 expand_all_functions (void)
1723 {
1724   struct cgraph_node *node;
1725   struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1726   int order_pos, new_order_pos = 0;
1727   int i;
1728 
1729   order_pos = ipa_reverse_postorder (order);
1730   gcc_assert (order_pos == cgraph_n_nodes);
1731 
1732   /* Garbage collector may remove inline clones we eliminate during
1733      optimization.  So we must be sure to not reference them.  */
1734   for (i = 0; i < order_pos; i++)
1735     if (order[i]->process)
1736       order[new_order_pos++] = order[i];
1737 
1738   for (i = new_order_pos - 1; i >= 0; i--)
1739     {
1740       node = order[i];
1741       if (node->process)
1742 	{
1743 	  node->process = 0;
1744 	  expand_function (node);
1745 	}
1746     }
1747   cgraph_process_new_functions ();
1748 
1749   free (order);
1750 
1751 }
1752 
1753 /* This is used to sort the node types by the cgraph order number.  */
1754 
1755 enum cgraph_order_sort_kind
1756 {
1757   ORDER_UNDEFINED = 0,
1758   ORDER_FUNCTION,
1759   ORDER_VAR,
1760   ORDER_ASM
1761 };
1762 
1763 struct cgraph_order_sort
1764 {
1765   enum cgraph_order_sort_kind kind;
1766   union
1767   {
1768     struct cgraph_node *f;
1769     struct varpool_node *v;
1770     struct asm_node *a;
1771   } u;
1772 };
1773 
1774 /* Output all functions, variables, and asm statements in the order
1775    according to their order fields, which is the order in which they
1776    appeared in the file.  This implements -fno-toplevel-reorder.  In
1777    this mode we may output functions and variables which don't really
1778    need to be output.  */
1779 
1780 static void
1781 output_in_order (void)
1782 {
1783   int max;
1784   struct cgraph_order_sort *nodes;
1785   int i;
1786   struct cgraph_node *pf;
1787   struct varpool_node *pv;
1788   struct asm_node *pa;
1789 
1790   max = symtab_order;
1791   nodes = XCNEWVEC (struct cgraph_order_sort, max);
1792 
1793   FOR_EACH_DEFINED_FUNCTION (pf)
1794     {
1795       if (pf->process && !pf->thunk.thunk_p && !pf->alias)
1796 	{
1797 	  i = pf->symbol.order;
1798 	  gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1799 	  nodes[i].kind = ORDER_FUNCTION;
1800 	  nodes[i].u.f = pf;
1801 	}
1802     }
1803 
1804   FOR_EACH_DEFINED_VARIABLE (pv)
1805     if (!DECL_EXTERNAL (pv->symbol.decl))
1806       {
1807 	i = pv->symbol.order;
1808 	gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1809 	nodes[i].kind = ORDER_VAR;
1810 	nodes[i].u.v = pv;
1811       }
1812 
1813   for (pa = asm_nodes; pa; pa = pa->next)
1814     {
1815       i = pa->order;
1816       gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1817       nodes[i].kind = ORDER_ASM;
1818       nodes[i].u.a = pa;
1819     }
1820 
1821   /* In toplevel reorder mode we output all statics; mark them as needed.  */
1822 
1823   for (i = 0; i < max; ++i)
1824     if (nodes[i].kind == ORDER_VAR)
1825       varpool_finalize_named_section_flags (nodes[i].u.v);
1826 
1827   for (i = 0; i < max; ++i)
1828     {
1829       switch (nodes[i].kind)
1830 	{
1831 	case ORDER_FUNCTION:
1832 	  nodes[i].u.f->process = 0;
1833 	  expand_function (nodes[i].u.f);
1834 	  break;
1835 
1836 	case ORDER_VAR:
1837 	  varpool_assemble_decl (nodes[i].u.v);
1838 	  break;
1839 
1840 	case ORDER_ASM:
1841 	  assemble_asm (nodes[i].u.a->asm_str);
1842 	  break;
1843 
1844 	case ORDER_UNDEFINED:
1845 	  break;
1846 
1847 	default:
1848 	  gcc_unreachable ();
1849 	}
1850     }
1851 
1852   asm_nodes = NULL;
1853   free (nodes);
1854 }
1855 
1856 static void
1857 ipa_passes (void)
1858 {
1859   set_cfun (NULL);
1860   current_function_decl = NULL;
1861   gimple_register_cfg_hooks ();
1862   bitmap_obstack_initialize (NULL);
1863 
1864   invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
1865 
1866   if (!in_lto_p)
1867     {
1868       execute_ipa_pass_list (all_small_ipa_passes);
1869       if (seen_error ())
1870 	return;
1871     }
1872 
1873   /* We never run removal of unreachable nodes after early passes.  This is
1874      because TODO is run before the subpasses.  It is important to remove
1875      the unreachable functions to save works at IPA level and to get LTO
1876      symbol tables right.  */
1877   symtab_remove_unreachable_nodes (true, cgraph_dump_file);
1878 
1879   /* If pass_all_early_optimizations was not scheduled, the state of
1880      the cgraph will not be properly updated.  Update it now.  */
1881   if (cgraph_state < CGRAPH_STATE_IPA_SSA)
1882     cgraph_state = CGRAPH_STATE_IPA_SSA;
1883 
1884   if (!in_lto_p)
1885     {
1886       /* Generate coverage variables and constructors.  */
1887       coverage_finish ();
1888 
1889       /* Process new functions added.  */
1890       set_cfun (NULL);
1891       current_function_decl = NULL;
1892       cgraph_process_new_functions ();
1893 
1894       execute_ipa_summary_passes
1895 	((struct ipa_opt_pass_d *) all_regular_ipa_passes);
1896     }
1897 
1898   /* Some targets need to handle LTO assembler output specially.  */
1899   if (flag_generate_lto)
1900     targetm.asm_out.lto_start ();
1901 
1902   execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
1903 
1904   if (!in_lto_p)
1905     ipa_write_summaries ();
1906 
1907   if (flag_generate_lto)
1908     targetm.asm_out.lto_end ();
1909 
1910   if (!flag_ltrans && (in_lto_p || !flag_lto || flag_fat_lto_objects))
1911     execute_ipa_pass_list (all_regular_ipa_passes);
1912   invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
1913 
1914   bitmap_obstack_release (NULL);
1915 }
1916 
1917 
1918 /* Return string alias is alias of.  */
1919 
1920 static tree
1921 get_alias_symbol (tree decl)
1922 {
1923   tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
1924   return get_identifier (TREE_STRING_POINTER
1925 			  (TREE_VALUE (TREE_VALUE (alias))));
1926 }
1927 
1928 
1929 /* Weakrefs may be associated to external decls and thus not output
1930    at expansion time.  Emit all necessary aliases.  */
1931 
1932 static void
1933 output_weakrefs (void)
1934 {
1935   struct cgraph_node *node;
1936   struct varpool_node *vnode;
1937   FOR_EACH_FUNCTION (node)
1938     if (node->alias && DECL_EXTERNAL (node->symbol.decl)
1939         && !TREE_ASM_WRITTEN (node->symbol.decl)
1940 	&& lookup_attribute ("weakref", DECL_ATTRIBUTES (node->symbol.decl)))
1941       do_assemble_alias (node->symbol.decl,
1942 		         node->thunk.alias ? DECL_ASSEMBLER_NAME (node->thunk.alias)
1943 		         : get_alias_symbol (node->symbol.decl));
1944   FOR_EACH_VARIABLE (vnode)
1945     if (vnode->alias && DECL_EXTERNAL (vnode->symbol.decl)
1946         && !TREE_ASM_WRITTEN (vnode->symbol.decl)
1947 	&& lookup_attribute ("weakref", DECL_ATTRIBUTES (vnode->symbol.decl)))
1948       do_assemble_alias (vnode->symbol.decl,
1949 		         vnode->alias_of ? DECL_ASSEMBLER_NAME (vnode->alias_of)
1950 		         : get_alias_symbol (vnode->symbol.decl));
1951 }
1952 
1953 /* Initialize callgraph dump file.  */
1954 
1955 void
1956 init_cgraph (void)
1957 {
1958   if (!cgraph_dump_file)
1959     cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1960 }
1961 
1962 
1963 /* Perform simple optimizations based on callgraph.  */
1964 
1965 void
1966 compile (void)
1967 {
1968   if (seen_error ())
1969     return;
1970 
1971 #ifdef ENABLE_CHECKING
1972   verify_symtab ();
1973 #endif
1974 
1975   timevar_push (TV_CGRAPHOPT);
1976   if (pre_ipa_mem_report)
1977     {
1978       fprintf (stderr, "Memory consumption before IPA\n");
1979       dump_memory_report (false);
1980     }
1981   if (!quiet_flag)
1982     fprintf (stderr, "Performing interprocedural optimizations\n");
1983   cgraph_state = CGRAPH_STATE_IPA;
1984 
1985   /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE.  */
1986   if (flag_lto)
1987     lto_streamer_hooks_init ();
1988 
1989   /* Don't run the IPA passes if there was any error or sorry messages.  */
1990   if (!seen_error ())
1991     ipa_passes ();
1992 
1993   /* Do nothing else if any IPA pass found errors or if we are just streaming LTO.  */
1994   if (seen_error ()
1995       || (!in_lto_p && flag_lto && !flag_fat_lto_objects))
1996     {
1997       timevar_pop (TV_CGRAPHOPT);
1998       return;
1999     }
2000 
2001   /* This pass remove bodies of extern inline functions we never inlined.
2002      Do this later so other IPA passes see what is really going on.  */
2003   symtab_remove_unreachable_nodes (false, dump_file);
2004   cgraph_global_info_ready = true;
2005   if (cgraph_dump_file)
2006     {
2007       fprintf (cgraph_dump_file, "Optimized ");
2008       dump_symtab (cgraph_dump_file);
2009     }
2010   if (post_ipa_mem_report)
2011     {
2012       fprintf (stderr, "Memory consumption after IPA\n");
2013       dump_memory_report (false);
2014     }
2015   timevar_pop (TV_CGRAPHOPT);
2016 
2017   /* Output everything.  */
2018   (*debug_hooks->assembly_start) ();
2019   if (!quiet_flag)
2020     fprintf (stderr, "Assembling functions:\n");
2021 #ifdef ENABLE_CHECKING
2022   verify_symtab ();
2023 #endif
2024 
2025   cgraph_materialize_all_clones ();
2026   bitmap_obstack_initialize (NULL);
2027   execute_ipa_pass_list (all_late_ipa_passes);
2028   symtab_remove_unreachable_nodes (true, dump_file);
2029 #ifdef ENABLE_CHECKING
2030   verify_symtab ();
2031 #endif
2032   bitmap_obstack_release (NULL);
2033   mark_functions_to_output ();
2034 
2035   cgraph_state = CGRAPH_STATE_EXPANSION;
2036   if (!flag_toplevel_reorder)
2037     output_in_order ();
2038   else
2039     {
2040       output_asm_statements ();
2041 
2042       expand_all_functions ();
2043       varpool_output_variables ();
2044     }
2045 
2046   cgraph_process_new_functions ();
2047   cgraph_state = CGRAPH_STATE_FINISHED;
2048   output_weakrefs ();
2049 
2050   if (cgraph_dump_file)
2051     {
2052       fprintf (cgraph_dump_file, "\nFinal ");
2053       dump_symtab (cgraph_dump_file);
2054     }
2055 #ifdef ENABLE_CHECKING
2056   verify_symtab ();
2057   /* Double check that all inline clones are gone and that all
2058      function bodies have been released from memory.  */
2059   if (!seen_error ())
2060     {
2061       struct cgraph_node *node;
2062       bool error_found = false;
2063 
2064       FOR_EACH_DEFINED_FUNCTION (node)
2065 	if (node->global.inlined_to
2066 	    || gimple_has_body_p (node->symbol.decl))
2067 	  {
2068 	    error_found = true;
2069 	    dump_cgraph_node (stderr, node);
2070 	  }
2071       if (error_found)
2072 	internal_error ("nodes with unreleased memory found");
2073     }
2074 #endif
2075 }
2076 
2077 
2078 /* Analyze the whole compilation unit once it is parsed completely.  */
2079 
2080 void
2081 finalize_compilation_unit (void)
2082 {
2083   timevar_push (TV_CGRAPH);
2084 
2085   /* If we're here there's no current function anymore.  Some frontends
2086      are lazy in clearing these.  */
2087   current_function_decl = NULL;
2088   set_cfun (NULL);
2089 
2090   /* Do not skip analyzing the functions if there were errors, we
2091      miss diagnostics for following functions otherwise.  */
2092 
2093   /* Emit size functions we didn't inline.  */
2094   finalize_size_functions ();
2095 
2096   /* Mark alias targets necessary and emit diagnostics.  */
2097   handle_alias_pairs ();
2098 
2099   if (!quiet_flag)
2100     {
2101       fprintf (stderr, "\nAnalyzing compilation unit\n");
2102       fflush (stderr);
2103     }
2104 
2105   if (flag_dump_passes)
2106     dump_passes ();
2107 
2108   /* Gimplify and lower all functions, compute reachability and
2109      remove unreachable nodes.  */
2110   cgraph_analyze_functions ();
2111 
2112   /* Mark alias targets necessary and emit diagnostics.  */
2113   handle_alias_pairs ();
2114 
2115   /* Gimplify and lower thunks.  */
2116   cgraph_analyze_functions ();
2117 
2118   /* Finally drive the pass manager.  */
2119   compile ();
2120 
2121   timevar_pop (TV_CGRAPH);
2122 }
2123 
2124 
2125 #include "gt-cgraphunit.h"
2126