xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/cgraphunit.c (revision e6c7e151de239c49d2e38720a061ed9d1fa99309)
1 /* Driver of optimization process
2    Copyright (C) 2003-2017 Free Software Foundation, Inc.
3    Contributed by Jan Hubicka
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 /* This module implements main driver of compilation process.
22 
23    The main scope of this file is to act as an interface in between
24    tree based frontends and the backend.
25 
26    The front-end is supposed to use following functionality:
27 
28     - finalize_function
29 
30       This function is called once front-end has parsed whole body of function
31       and it is certain that the function body nor the declaration will change.
32 
33       (There is one exception needed for implementing GCC extern inline
34 	function.)
35 
36     - varpool_finalize_decl
37 
38       This function has same behavior as the above but is used for static
39       variables.
40 
41     - add_asm_node
42 
43       Insert new toplevel ASM statement
44 
45     - finalize_compilation_unit
46 
47       This function is called once (source level) compilation unit is finalized
48       and it will no longer change.
49 
50       The symbol table is constructed starting from the trivially needed
51       symbols finalized by the frontend.  Functions are lowered into
52       GIMPLE representation and callgraph/reference lists are constructed.
53       Those are used to discover other necessary functions and variables.
54 
55       At the end the bodies of unreachable functions are removed.
56 
57       The function can be called multiple times when multiple source level
58       compilation units are combined.
59 
60     - compile
61 
62       This passes control to the back-end.  Optimizations are performed and
63       final assembler is generated.  This is done in the following way. Note
64       that with link time optimization the process is split into three
65       stages (compile time, linktime analysis and parallel linktime as
66       indicated bellow).
67 
68       Compile time:
69 
70 	1) Inter-procedural optimization.
71 	   (ipa_passes)
72 
73 	   This part is further split into:
74 
75 	   a) early optimizations. These are local passes executed in
76 	      the topological order on the callgraph.
77 
78 	      The purpose of early optimiations is to optimize away simple
79 	      things that may otherwise confuse IP analysis. Very simple
80 	      propagation across the callgraph is done i.e. to discover
81 	      functions without side effects and simple inlining is performed.
82 
83 	   b) early small interprocedural passes.
84 
85 	      Those are interprocedural passes executed only at compilation
86 	      time.  These include, for example, transational memory lowering,
87 	      unreachable code removal and other simple transformations.
88 
89 	   c) IP analysis stage.  All interprocedural passes do their
90 	      analysis.
91 
92 	      Interprocedural passes differ from small interprocedural
93 	      passes by their ability to operate across whole program
94 	      at linktime.  Their analysis stage is performed early to
95 	      both reduce linking times and linktime memory usage by
96 	      not having to represent whole program in memory.
97 
98 	   d) LTO sreaming.  When doing LTO, everything important gets
99 	      streamed into the object file.
100 
101        Compile time and or linktime analysis stage (WPA):
102 
103 	      At linktime units gets streamed back and symbol table is
104 	      merged.  Function bodies are not streamed in and not
105 	      available.
106 	   e) IP propagation stage.  All IP passes execute their
107 	      IP propagation. This is done based on the earlier analysis
108 	      without having function bodies at hand.
109 	   f) Ltrans streaming.  When doing WHOPR LTO, the program
110 	      is partitioned and streamed into multple object files.
111 
112        Compile time and/or parallel linktime stage (ltrans)
113 
114 	      Each of the object files is streamed back and compiled
115 	      separately.  Now the function bodies becomes available
116 	      again.
117 
118 	 2) Virtual clone materialization
119 	    (cgraph_materialize_clone)
120 
121 	    IP passes can produce copies of existing functoins (such
122 	    as versioned clones or inline clones) without actually
123 	    manipulating their bodies by creating virtual clones in
124 	    the callgraph. At this time the virtual clones are
125 	    turned into real functions
126 	 3) IP transformation
127 
128 	    All IP passes transform function bodies based on earlier
129 	    decision of the IP propagation.
130 
131 	 4) late small IP passes
132 
133 	    Simple IP passes working within single program partition.
134 
135 	 5) Expansion
136 	    (expand_all_functions)
137 
138 	    At this stage functions that needs to be output into
139 	    assembler are identified and compiled in topological order
140 	 6) Output of variables and aliases
141 	    Now it is known what variable references was not optimized
142 	    out and thus all variables are output to the file.
143 
144 	    Note that with -fno-toplevel-reorder passes 5 and 6
145 	    are combined together in cgraph_output_in_order.
146 
147    Finally there are functions to manipulate the callgraph from
148    backend.
149     - cgraph_add_new_function is used to add backend produced
150       functions introduced after the unit is finalized.
151       The functions are enqueue for later processing and inserted
152       into callgraph with cgraph_process_new_functions.
153 
154     - cgraph_function_versioning
155 
156       produces a copy of function into new one (a version)
157       and apply simple transformations
158 */
159 
160 #include "config.h"
161 #include "system.h"
162 #include "coretypes.h"
163 #include "backend.h"
164 #include "target.h"
165 #include "rtl.h"
166 #include "tree.h"
167 #include "gimple.h"
168 #include "cfghooks.h"
169 #include "regset.h"     /* FIXME: For reg_obstack.  */
170 #include "alloc-pool.h"
171 #include "tree-pass.h"
172 #include "stringpool.h"
173 #include "gimple-ssa.h"
174 #include "cgraph.h"
175 #include "coverage.h"
176 #include "lto-streamer.h"
177 #include "fold-const.h"
178 #include "varasm.h"
179 #include "stor-layout.h"
180 #include "output.h"
181 #include "cfgcleanup.h"
182 #include "gimple-fold.h"
183 #include "gimplify.h"
184 #include "gimple-iterator.h"
185 #include "gimplify-me.h"
186 #include "tree-cfg.h"
187 #include "tree-into-ssa.h"
188 #include "tree-ssa.h"
189 #include "langhooks.h"
190 #include "toplev.h"
191 #include "debug.h"
192 #include "symbol-summary.h"
193 #include "tree-vrp.h"
194 #include "ipa-prop.h"
195 #include "gimple-pretty-print.h"
196 #include "plugin.h"
197 #include "ipa-inline.h"
198 #include "ipa-utils.h"
199 #include "except.h"
200 #include "cfgloop.h"
201 #include "context.h"
202 #include "pass_manager.h"
203 #include "tree-nested.h"
204 #include "dbgcnt.h"
205 #include "tree-chkp.h"
206 #include "lto-section-names.h"
207 
208 /* Queue of cgraph nodes scheduled to be added into cgraph.  This is a
209    secondary queue used during optimization to accommodate passes that
210    may generate new functions that need to be optimized and expanded.  */
211 vec<cgraph_node *> cgraph_new_nodes;
212 
213 static void expand_all_functions (void);
214 static void mark_functions_to_output (void);
215 static void handle_alias_pairs (void);
216 
217 /* Used for vtable lookup in thunk adjusting.  */
218 static GTY (()) tree vtable_entry_type;
219 
220 /* Return true if this symbol is a function from the C frontend specified
221    directly in RTL form (with "__RTL").  */
222 
223 bool
224 symtab_node::native_rtl_p () const
225 {
226   if (TREE_CODE (decl) != FUNCTION_DECL)
227     return false;
228   if (!DECL_STRUCT_FUNCTION (decl))
229     return false;
230   return DECL_STRUCT_FUNCTION (decl)->curr_properties & PROP_rtl;
231 }
232 
233 /* Determine if symbol declaration is needed.  That is, visible to something
234    either outside this translation unit, something magic in the system
235    configury */
236 bool
237 symtab_node::needed_p (void)
238 {
239   /* Double check that no one output the function into assembly file
240      early.  */
241   if (!native_rtl_p ())
242       gcc_checking_assert
243 	(!DECL_ASSEMBLER_NAME_SET_P (decl)
244 	 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)));
245 
246   if (!definition)
247     return false;
248 
249   if (DECL_EXTERNAL (decl))
250     return false;
251 
252   /* If the user told us it is used, then it must be so.  */
253   if (force_output)
254     return true;
255 
256   /* ABI forced symbols are needed when they are external.  */
257   if (forced_by_abi && TREE_PUBLIC (decl))
258     return true;
259 
260   /* Keep constructors, destructors and virtual functions.  */
261    if (TREE_CODE (decl) == FUNCTION_DECL
262        && (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl)))
263     return true;
264 
265   /* Externally visible variables must be output.  The exception is
266      COMDAT variables that must be output only when they are needed.  */
267   if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
268     return true;
269 
270   return false;
271 }
272 
273 /* Head and terminator of the queue of nodes to be processed while building
274    callgraph.  */
275 
276 static symtab_node symtab_terminator;
277 static symtab_node *queued_nodes = &symtab_terminator;
278 
279 /* Add NODE to queue starting at QUEUED_NODES.
280    The queue is linked via AUX pointers and terminated by pointer to 1.  */
281 
282 static void
283 enqueue_node (symtab_node *node)
284 {
285   if (node->aux)
286     return;
287   gcc_checking_assert (queued_nodes);
288   node->aux = queued_nodes;
289   queued_nodes = node;
290 }
291 
292 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
293    functions into callgraph in a way so they look like ordinary reachable
294    functions inserted into callgraph already at construction time.  */
295 
296 void
297 symbol_table::process_new_functions (void)
298 {
299   tree fndecl;
300 
301   if (!cgraph_new_nodes.exists ())
302     return;
303 
304   handle_alias_pairs ();
305   /*  Note that this queue may grow as its being processed, as the new
306       functions may generate new ones.  */
307   for (unsigned i = 0; i < cgraph_new_nodes.length (); i++)
308     {
309       cgraph_node *node = cgraph_new_nodes[i];
310       fndecl = node->decl;
311       switch (state)
312 	{
313 	case CONSTRUCTION:
314 	  /* At construction time we just need to finalize function and move
315 	     it into reachable functions list.  */
316 
317 	  cgraph_node::finalize_function (fndecl, false);
318 	  call_cgraph_insertion_hooks (node);
319 	  enqueue_node (node);
320 	  break;
321 
322 	case IPA:
323 	case IPA_SSA:
324 	case IPA_SSA_AFTER_INLINING:
325 	  /* When IPA optimization already started, do all essential
326 	     transformations that has been already performed on the whole
327 	     cgraph but not on this function.  */
328 
329 	  gimple_register_cfg_hooks ();
330 	  if (!node->analyzed)
331 	    node->analyze ();
332 	  push_cfun (DECL_STRUCT_FUNCTION (fndecl));
333 	  if ((state == IPA_SSA || state == IPA_SSA_AFTER_INLINING)
334 	      && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
335 	    g->get_passes ()->execute_early_local_passes ();
336 	  else if (inline_summaries != NULL)
337 	    compute_inline_parameters (node, true);
338 	  free_dominance_info (CDI_POST_DOMINATORS);
339 	  free_dominance_info (CDI_DOMINATORS);
340 	  pop_cfun ();
341 	  call_cgraph_insertion_hooks (node);
342 	  break;
343 
344 	case EXPANSION:
345 	  /* Functions created during expansion shall be compiled
346 	     directly.  */
347 	  node->process = 0;
348 	  call_cgraph_insertion_hooks (node);
349 	  node->expand ();
350 	  break;
351 
352 	default:
353 	  gcc_unreachable ();
354 	  break;
355 	}
356     }
357 
358   cgraph_new_nodes.release ();
359 }
360 
361 /* As an GCC extension we allow redefinition of the function.  The
362    semantics when both copies of bodies differ is not well defined.
363    We replace the old body with new body so in unit at a time mode
364    we always use new body, while in normal mode we may end up with
365    old body inlined into some functions and new body expanded and
366    inlined in others.
367 
368    ??? It may make more sense to use one body for inlining and other
369    body for expanding the function but this is difficult to do.  */
370 
371 void
372 cgraph_node::reset (void)
373 {
374   /* If process is set, then we have already begun whole-unit analysis.
375      This is *not* testing for whether we've already emitted the function.
376      That case can be sort-of legitimately seen with real function redefinition
377      errors.  I would argue that the front end should never present us with
378      such a case, but don't enforce that for now.  */
379   gcc_assert (!process);
380 
381   /* Reset our data structures so we can analyze the function again.  */
382   memset (&local, 0, sizeof (local));
383   memset (&global, 0, sizeof (global));
384   memset (&rtl, 0, sizeof (rtl));
385   analyzed = false;
386   definition = false;
387   alias = false;
388   transparent_alias = false;
389   weakref = false;
390   cpp_implicit_alias = false;
391 
392   remove_callees ();
393   remove_all_references ();
394 }
395 
396 /* Return true when there are references to the node.  INCLUDE_SELF is
397    true if a self reference counts as a reference.  */
398 
399 bool
400 symtab_node::referred_to_p (bool include_self)
401 {
402   ipa_ref *ref = NULL;
403 
404   /* See if there are any references at all.  */
405   if (iterate_referring (0, ref))
406     return true;
407   /* For functions check also calls.  */
408   cgraph_node *cn = dyn_cast <cgraph_node *> (this);
409   if (cn && cn->callers)
410     {
411       if (include_self)
412 	return true;
413       for (cgraph_edge *e = cn->callers; e; e = e->next_caller)
414 	if (e->caller != this)
415 	  return true;
416     }
417   return false;
418 }
419 
420 /* DECL has been parsed.  Take it, queue it, compile it at the whim of the
421    logic in effect.  If NO_COLLECT is true, then our caller cannot stand to have
422    the garbage collector run at the moment.  We would need to either create
423    a new GC context, or just not compile right now.  */
424 
425 void
426 cgraph_node::finalize_function (tree decl, bool no_collect)
427 {
428   cgraph_node *node = cgraph_node::get_create (decl);
429 
430   if (node->definition)
431     {
432       /* Nested functions should only be defined once.  */
433       gcc_assert (!DECL_CONTEXT (decl)
434 		  || TREE_CODE (DECL_CONTEXT (decl)) !=	FUNCTION_DECL);
435       node->reset ();
436       node->local.redefined_extern_inline = true;
437     }
438 
439   /* Set definition first before calling notice_global_symbol so that
440      it is available to notice_global_symbol.  */
441   node->definition = true;
442   notice_global_symbol (decl);
443   node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
444 
445   /* With -fkeep-inline-functions we are keeping all inline functions except
446      for extern inline ones.  */
447   if (flag_keep_inline_functions
448       && DECL_DECLARED_INLINE_P (decl)
449       && !DECL_EXTERNAL (decl)
450       && !DECL_DISREGARD_INLINE_LIMITS (decl))
451     node->force_output = 1;
452 
453   /* __RTL functions were already output as soon as they were parsed (due
454      to the large amount of global state in the backend).
455      Mark such functions as "force_output" to reflect the fact that they
456      will be in the asm file when considering the symbols they reference.
457      The attempt to output them later on will bail out immediately.  */
458   if (node->native_rtl_p ())
459     node->force_output = 1;
460 
461   /* When not optimizing, also output the static functions. (see
462      PR24561), but don't do so for always_inline functions, functions
463      declared inline and nested functions.  These were optimized out
464      in the original implementation and it is unclear whether we want
465      to change the behavior here.  */
466   if (((!opt_for_fn (decl, optimize) || flag_keep_static_functions)
467        && !node->cpp_implicit_alias
468        && !DECL_DISREGARD_INLINE_LIMITS (decl)
469        && !DECL_DECLARED_INLINE_P (decl)
470        && !(DECL_CONTEXT (decl)
471 	    && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL))
472       && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
473     node->force_output = 1;
474 
475   /* If we've not yet emitted decl, tell the debug info about it.  */
476   if (!TREE_ASM_WRITTEN (decl))
477     (*debug_hooks->deferred_inline_function) (decl);
478 
479   if (!no_collect)
480     ggc_collect ();
481 
482   if (symtab->state == CONSTRUCTION
483       && (node->needed_p () || node->referred_to_p ()))
484     enqueue_node (node);
485 }
486 
487 /* Add the function FNDECL to the call graph.
488    Unlike finalize_function, this function is intended to be used
489    by middle end and allows insertion of new function at arbitrary point
490    of compilation.  The function can be either in high, low or SSA form
491    GIMPLE.
492 
493    The function is assumed to be reachable and have address taken (so no
494    API breaking optimizations are performed on it).
495 
496    Main work done by this function is to enqueue the function for later
497    processing to avoid need the passes to be re-entrant.  */
498 
499 void
500 cgraph_node::add_new_function (tree fndecl, bool lowered)
501 {
502   gcc::pass_manager *passes = g->get_passes ();
503   cgraph_node *node;
504 
505   if (dump_file)
506     {
507       struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
508       const char *function_type = ((gimple_has_body_p (fndecl))
509 				   ? (lowered
510 				      ? (gimple_in_ssa_p (fn)
511 					 ? "ssa gimple"
512 					 : "low gimple")
513 				      : "high gimple")
514 				   : "to-be-gimplified");
515       fprintf (dump_file,
516 	       "Added new %s function %s to callgraph\n",
517 	       function_type,
518 	       fndecl_name (fndecl));
519     }
520 
521   switch (symtab->state)
522     {
523       case PARSING:
524 	cgraph_node::finalize_function (fndecl, false);
525 	break;
526       case CONSTRUCTION:
527 	/* Just enqueue function to be processed at nearest occurrence.  */
528 	node = cgraph_node::get_create (fndecl);
529 	if (lowered)
530 	  node->lowered = true;
531 	cgraph_new_nodes.safe_push (node);
532         break;
533 
534       case IPA:
535       case IPA_SSA:
536       case IPA_SSA_AFTER_INLINING:
537       case EXPANSION:
538 	/* Bring the function into finalized state and enqueue for later
539 	   analyzing and compilation.  */
540 	node = cgraph_node::get_create (fndecl);
541 	node->local.local = false;
542 	node->definition = true;
543 	node->force_output = true;
544 	if (TREE_PUBLIC (fndecl))
545 	  node->externally_visible = true;
546 	if (!lowered && symtab->state == EXPANSION)
547 	  {
548 	    push_cfun (DECL_STRUCT_FUNCTION (fndecl));
549 	    gimple_register_cfg_hooks ();
550 	    bitmap_obstack_initialize (NULL);
551 	    execute_pass_list (cfun, passes->all_lowering_passes);
552 	    passes->execute_early_local_passes ();
553 	    bitmap_obstack_release (NULL);
554 	    pop_cfun ();
555 
556 	    lowered = true;
557 	  }
558 	if (lowered)
559 	  node->lowered = true;
560 	cgraph_new_nodes.safe_push (node);
561         break;
562 
563       case FINISHED:
564 	/* At the very end of compilation we have to do all the work up
565 	   to expansion.  */
566 	node = cgraph_node::create (fndecl);
567 	if (lowered)
568 	  node->lowered = true;
569 	node->definition = true;
570 	node->analyze ();
571 	push_cfun (DECL_STRUCT_FUNCTION (fndecl));
572 	gimple_register_cfg_hooks ();
573 	bitmap_obstack_initialize (NULL);
574 	if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
575 	  g->get_passes ()->execute_early_local_passes ();
576 	bitmap_obstack_release (NULL);
577 	pop_cfun ();
578 	node->expand ();
579 	break;
580 
581       default:
582 	gcc_unreachable ();
583     }
584 
585   /* Set a personality if required and we already passed EH lowering.  */
586   if (lowered
587       && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
588 	  == eh_personality_lang))
589     DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
590 }
591 
592 /* Analyze the function scheduled to be output.  */
593 void
594 cgraph_node::analyze (void)
595 {
596   if (native_rtl_p ())
597     {
598       analyzed = true;
599       return;
600     }
601 
602   tree decl = this->decl;
603   location_t saved_loc = input_location;
604   input_location = DECL_SOURCE_LOCATION (decl);
605 
606   if (thunk.thunk_p)
607     {
608       cgraph_node *t = cgraph_node::get (thunk.alias);
609 
610       create_edge (t, NULL, 0, CGRAPH_FREQ_BASE);
611       callees->can_throw_external = !TREE_NOTHROW (t->decl);
612       /* Target code in expand_thunk may need the thunk's target
613 	 to be analyzed, so recurse here.  */
614       if (!t->analyzed)
615 	t->analyze ();
616       if (t->alias)
617 	{
618 	  t = t->get_alias_target ();
619 	  if (!t->analyzed)
620 	    t->analyze ();
621 	}
622       if (!expand_thunk (false, false))
623 	{
624 	  thunk.alias = NULL;
625 	  return;
626 	}
627       thunk.alias = NULL;
628     }
629   if (alias)
630     resolve_alias (cgraph_node::get (alias_target), transparent_alias);
631   else if (dispatcher_function)
632     {
633       /* Generate the dispatcher body of multi-versioned functions.  */
634       cgraph_function_version_info *dispatcher_version_info
635 	= function_version ();
636       if (dispatcher_version_info != NULL
637           && (dispatcher_version_info->dispatcher_resolver
638 	      == NULL_TREE))
639 	{
640 	  tree resolver = NULL_TREE;
641 	  gcc_assert (targetm.generate_version_dispatcher_body);
642 	  resolver = targetm.generate_version_dispatcher_body (this);
643 	  gcc_assert (resolver != NULL_TREE);
644 	}
645     }
646   else
647     {
648       push_cfun (DECL_STRUCT_FUNCTION (decl));
649 
650       assign_assembler_name_if_needed (decl);
651 
652       /* Make sure to gimplify bodies only once.  During analyzing a
653 	 function we lower it, which will require gimplified nested
654 	 functions, so we can end up here with an already gimplified
655 	 body.  */
656       if (!gimple_has_body_p (decl))
657 	gimplify_function_tree (decl);
658 
659       /* Lower the function.  */
660       if (!lowered)
661 	{
662 	  if (nested)
663 	    lower_nested_functions (decl);
664 	  gcc_assert (!nested);
665 
666 	  gimple_register_cfg_hooks ();
667 	  bitmap_obstack_initialize (NULL);
668 	  execute_pass_list (cfun, g->get_passes ()->all_lowering_passes);
669 	  free_dominance_info (CDI_POST_DOMINATORS);
670 	  free_dominance_info (CDI_DOMINATORS);
671 	  compact_blocks ();
672 	  bitmap_obstack_release (NULL);
673 	  lowered = true;
674 	}
675 
676       pop_cfun ();
677     }
678   analyzed = true;
679 
680   input_location = saved_loc;
681 }
682 
683 /* C++ frontend produce same body aliases all over the place, even before PCH
684    gets streamed out. It relies on us linking the aliases with their function
685    in order to do the fixups, but ipa-ref is not PCH safe.  Consequentely we
686    first produce aliases without links, but once C++ FE is sure he won't sream
687    PCH we build the links via this function.  */
688 
689 void
690 symbol_table::process_same_body_aliases (void)
691 {
692   symtab_node *node;
693   FOR_EACH_SYMBOL (node)
694     if (node->cpp_implicit_alias && !node->analyzed)
695       node->resolve_alias
696 	(VAR_P (node->alias_target)
697 	 ? (symtab_node *)varpool_node::get_create (node->alias_target)
698 	 : (symtab_node *)cgraph_node::get_create (node->alias_target));
699   cpp_implicit_aliases_done = true;
700 }
701 
702 /* Process attributes common for vars and functions.  */
703 
704 static void
705 process_common_attributes (symtab_node *node, tree decl)
706 {
707   tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
708 
709   if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
710     {
711       warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
712 		  "%<weakref%> attribute should be accompanied with"
713 		  " an %<alias%> attribute");
714       DECL_WEAK (decl) = 0;
715       DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
716 						 DECL_ATTRIBUTES (decl));
717     }
718 
719   if (lookup_attribute ("no_reorder", DECL_ATTRIBUTES (decl)))
720     node->no_reorder = 1;
721 }
722 
723 /* Look for externally_visible and used attributes and mark cgraph nodes
724    accordingly.
725 
726    We cannot mark the nodes at the point the attributes are processed (in
727    handle_*_attribute) because the copy of the declarations available at that
728    point may not be canonical.  For example, in:
729 
730     void f();
731     void f() __attribute__((used));
732 
733    the declaration we see in handle_used_attribute will be the second
734    declaration -- but the front end will subsequently merge that declaration
735    with the original declaration and discard the second declaration.
736 
737    Furthermore, we can't mark these nodes in finalize_function because:
738 
739     void f() {}
740     void f() __attribute__((externally_visible));
741 
742    is valid.
743 
744    So, we walk the nodes at the end of the translation unit, applying the
745    attributes at that point.  */
746 
747 static void
748 process_function_and_variable_attributes (cgraph_node *first,
749                                           varpool_node *first_var)
750 {
751   cgraph_node *node;
752   varpool_node *vnode;
753 
754   for (node = symtab->first_function (); node != first;
755        node = symtab->next_function (node))
756     {
757       tree decl = node->decl;
758       if (DECL_PRESERVE_P (decl))
759 	node->mark_force_output ();
760       else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
761 	{
762 	  if (! TREE_PUBLIC (node->decl))
763 	    warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
764 			"%<externally_visible%>"
765 			" attribute have effect only on public objects");
766 	}
767       if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
768 	  && (node->definition && !node->alias))
769 	{
770 	  warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
771 		      "%<weakref%> attribute ignored"
772 		      " because function is defined");
773 	  DECL_WEAK (decl) = 0;
774 	  DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
775 						     DECL_ATTRIBUTES (decl));
776 	}
777 
778       if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
779 	  && !DECL_DECLARED_INLINE_P (decl)
780 	  /* redefining extern inline function makes it DECL_UNINLINABLE.  */
781 	  && !DECL_UNINLINABLE (decl))
782 	warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
783 		    "always_inline function might not be inlinable");
784 
785       process_common_attributes (node, decl);
786     }
787   for (vnode = symtab->first_variable (); vnode != first_var;
788        vnode = symtab->next_variable (vnode))
789     {
790       tree decl = vnode->decl;
791       if (DECL_EXTERNAL (decl)
792 	  && DECL_INITIAL (decl))
793 	varpool_node::finalize_decl (decl);
794       if (DECL_PRESERVE_P (decl))
795 	vnode->force_output = true;
796       else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
797 	{
798 	  if (! TREE_PUBLIC (vnode->decl))
799 	    warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
800 			"%<externally_visible%>"
801 			" attribute have effect only on public objects");
802 	}
803       if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
804 	  && vnode->definition
805 	  && DECL_INITIAL (decl))
806 	{
807 	  warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
808 		      "%<weakref%> attribute ignored"
809 		      " because variable is initialized");
810 	  DECL_WEAK (decl) = 0;
811 	  DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
812 						      DECL_ATTRIBUTES (decl));
813 	}
814       process_common_attributes (vnode, decl);
815     }
816 }
817 
818 /* Mark DECL as finalized.  By finalizing the declaration, frontend instruct the
819    middle end to output the variable to asm file, if needed or externally
820    visible.  */
821 
822 void
823 varpool_node::finalize_decl (tree decl)
824 {
825   varpool_node *node = varpool_node::get_create (decl);
826 
827   gcc_assert (TREE_STATIC (decl) || DECL_EXTERNAL (decl));
828 
829   if (node->definition)
830     return;
831   /* Set definition first before calling notice_global_symbol so that
832      it is available to notice_global_symbol.  */
833   node->definition = true;
834   notice_global_symbol (decl);
835   if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl)
836       /* Traditionally we do not eliminate static variables when not
837 	 optimizing and when not doing toplevel reoder.  */
838       || node->no_reorder
839       || ((!flag_toplevel_reorder
840           && !DECL_COMDAT (node->decl)
841 	   && !DECL_ARTIFICIAL (node->decl))))
842     node->force_output = true;
843 
844   if (symtab->state == CONSTRUCTION
845       && (node->needed_p () || node->referred_to_p ()))
846     enqueue_node (node);
847   if (symtab->state >= IPA_SSA)
848     node->analyze ();
849   /* Some frontends produce various interface variables after compilation
850      finished.  */
851   if (symtab->state == FINISHED
852       || (!flag_toplevel_reorder
853 	&& symtab->state == EXPANSION))
854     node->assemble_decl ();
855 
856   if (DECL_INITIAL (decl))
857     chkp_register_var_initializer (decl);
858 }
859 
860 /* EDGE is an polymorphic call.  Mark all possible targets as reachable
861    and if there is only one target, perform trivial devirtualization.
862    REACHABLE_CALL_TARGETS collects target lists we already walked to
863    avoid udplicate work.  */
864 
865 static void
866 walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
867 			       cgraph_edge *edge)
868 {
869   unsigned int i;
870   void *cache_token;
871   bool final;
872   vec <cgraph_node *>targets
873     = possible_polymorphic_call_targets
874 	(edge, &final, &cache_token);
875 
876   if (!reachable_call_targets->add (cache_token))
877     {
878       if (symtab->dump_file)
879 	dump_possible_polymorphic_call_targets
880 	  (symtab->dump_file, edge);
881 
882       for (i = 0; i < targets.length (); i++)
883 	{
884 	  /* Do not bother to mark virtual methods in anonymous namespace;
885 	     either we will find use of virtual table defining it, or it is
886 	     unused.  */
887 	  if (targets[i]->definition
888 	      && TREE_CODE
889 		  (TREE_TYPE (targets[i]->decl))
890 		   == METHOD_TYPE
891 	      && !type_in_anonymous_namespace_p
892 		   (TYPE_METHOD_BASETYPE (TREE_TYPE (targets[i]->decl))))
893 	    enqueue_node (targets[i]);
894 	}
895     }
896 
897   /* Very trivial devirtualization; when the type is
898      final or anonymous (so we know all its derivation)
899      and there is only one possible virtual call target,
900      make the edge direct.  */
901   if (final)
902     {
903       if (targets.length () <= 1 && dbg_cnt (devirt))
904 	{
905 	  cgraph_node *target;
906 	  if (targets.length () == 1)
907 	    target = targets[0];
908 	  else
909 	    target = cgraph_node::create
910 			(builtin_decl_implicit (BUILT_IN_UNREACHABLE));
911 
912 	  if (symtab->dump_file)
913 	    {
914 	      fprintf (symtab->dump_file,
915 		       "Devirtualizing call: ");
916 	      print_gimple_stmt (symtab->dump_file,
917 				 edge->call_stmt, 0,
918 				 TDF_SLIM);
919 	    }
920           if (dump_enabled_p ())
921             {
922 	      location_t locus = gimple_location_safe (edge->call_stmt);
923 	      dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
924 			       "devirtualizing call in %s to %s\n",
925 			       edge->caller->name (), target->name ());
926 	    }
927 
928 	  edge->make_direct (target);
929 	  edge->redirect_call_stmt_to_callee ();
930 
931 	  /* Call to __builtin_unreachable shouldn't be instrumented.  */
932 	  if (!targets.length ())
933 	    gimple_call_set_with_bounds (edge->call_stmt, false);
934 
935 	  if (symtab->dump_file)
936 	    {
937 	      fprintf (symtab->dump_file,
938 		       "Devirtualized as: ");
939 	      print_gimple_stmt (symtab->dump_file,
940 				 edge->call_stmt, 0,
941 				 TDF_SLIM);
942 	    }
943 	}
944     }
945 }
946 
947 /* Issue appropriate warnings for the global declaration DECL.  */
948 
949 static void
950 check_global_declaration (symtab_node *snode)
951 {
952   const char *decl_file;
953   tree decl = snode->decl;
954 
955   /* Warn about any function declared static but not defined.  We don't
956      warn about variables, because many programs have static variables
957      that exist only to get some text into the object file.  */
958   if (TREE_CODE (decl) == FUNCTION_DECL
959       && DECL_INITIAL (decl) == 0
960       && DECL_EXTERNAL (decl)
961       && ! DECL_ARTIFICIAL (decl)
962       && ! TREE_NO_WARNING (decl)
963       && ! TREE_PUBLIC (decl)
964       && (warn_unused_function
965 	  || snode->referred_to_p (/*include_self=*/false)))
966     {
967       if (snode->referred_to_p (/*include_self=*/false))
968 	pedwarn (input_location, 0, "%q+F used but never defined", decl);
969       else
970 	warning (OPT_Wunused_function, "%q+F declared %<static%> but never defined", decl);
971       /* This symbol is effectively an "extern" declaration now.  */
972       TREE_PUBLIC (decl) = 1;
973     }
974 
975   /* Warn about static fns or vars defined but not used.  */
976   if (((warn_unused_function && TREE_CODE (decl) == FUNCTION_DECL)
977        || (((warn_unused_variable && ! TREE_READONLY (decl))
978 	    || (warn_unused_const_variable > 0 && TREE_READONLY (decl)
979 		&& (warn_unused_const_variable == 2
980 		    || (main_input_filename != NULL
981 			&& (decl_file = DECL_SOURCE_FILE (decl)) != NULL
982 			&& filename_cmp (main_input_filename,
983 					 decl_file) == 0))))
984 	   && VAR_P (decl)))
985       && ! DECL_IN_SYSTEM_HEADER (decl)
986       && ! snode->referred_to_p (/*include_self=*/false)
987       /* This TREE_USED check is needed in addition to referred_to_p
988 	 above, because the `__unused__' attribute is not being
989 	 considered for referred_to_p.  */
990       && ! TREE_USED (decl)
991       /* The TREE_USED bit for file-scope decls is kept in the identifier,
992 	 to handle multiple external decls in different scopes.  */
993       && ! (DECL_NAME (decl) && TREE_USED (DECL_NAME (decl)))
994       && ! DECL_EXTERNAL (decl)
995       && ! DECL_ARTIFICIAL (decl)
996       && ! DECL_ABSTRACT_ORIGIN (decl)
997       && ! TREE_PUBLIC (decl)
998       /* A volatile variable might be used in some non-obvious way.  */
999       && (! VAR_P (decl) || ! TREE_THIS_VOLATILE (decl))
1000       /* Global register variables must be declared to reserve them.  */
1001       && ! (VAR_P (decl) && DECL_REGISTER (decl))
1002       /* Global ctors and dtors are called by the runtime.  */
1003       && (TREE_CODE (decl) != FUNCTION_DECL
1004 	  || (!DECL_STATIC_CONSTRUCTOR (decl)
1005 	      && !DECL_STATIC_DESTRUCTOR (decl)))
1006       /* Otherwise, ask the language.  */
1007       && lang_hooks.decls.warn_unused_global (decl))
1008     warning_at (DECL_SOURCE_LOCATION (decl),
1009 		(TREE_CODE (decl) == FUNCTION_DECL)
1010 		? OPT_Wunused_function
1011 		: (TREE_READONLY (decl)
1012 		   ? OPT_Wunused_const_variable_
1013 		   : OPT_Wunused_variable),
1014 		"%qD defined but not used", decl);
1015 }
1016 
1017 /* Discover all functions and variables that are trivially needed, analyze
1018    them as well as all functions and variables referred by them  */
1019 static cgraph_node *first_analyzed;
1020 static varpool_node *first_analyzed_var;
1021 
1022 /* FIRST_TIME is set to TRUE for the first time we are called for a
1023    translation unit from finalize_compilation_unit() or false
1024    otherwise.  */
1025 
1026 static void
1027 analyze_functions (bool first_time)
1028 {
1029   /* Keep track of already processed nodes when called multiple times for
1030      intermodule optimization.  */
1031   cgraph_node *first_handled = first_analyzed;
1032   varpool_node *first_handled_var = first_analyzed_var;
1033   hash_set<void *> reachable_call_targets;
1034 
1035   symtab_node *node;
1036   symtab_node *next;
1037   int i;
1038   ipa_ref *ref;
1039   bool changed = true;
1040   location_t saved_loc = input_location;
1041 
1042   bitmap_obstack_initialize (NULL);
1043   symtab->state = CONSTRUCTION;
1044   input_location = UNKNOWN_LOCATION;
1045 
1046   /* Ugly, but the fixup can not happen at a time same body alias is created;
1047      C++ FE is confused about the COMDAT groups being right.  */
1048   if (symtab->cpp_implicit_aliases_done)
1049     FOR_EACH_SYMBOL (node)
1050       if (node->cpp_implicit_alias)
1051 	  node->fixup_same_cpp_alias_visibility (node->get_alias_target ());
1052   build_type_inheritance_graph ();
1053 
1054   /* Analysis adds static variables that in turn adds references to new functions.
1055      So we need to iterate the process until it stabilize.  */
1056   while (changed)
1057     {
1058       changed = false;
1059       process_function_and_variable_attributes (first_analyzed,
1060 						first_analyzed_var);
1061 
1062       /* First identify the trivially needed symbols.  */
1063       for (node = symtab->first_symbol ();
1064 	   node != first_analyzed
1065 	   && node != first_analyzed_var; node = node->next)
1066 	{
1067 	  /* Convert COMDAT group designators to IDENTIFIER_NODEs.  */
1068 	  node->get_comdat_group_id ();
1069 	  if (node->needed_p ())
1070 	    {
1071 	      enqueue_node (node);
1072 	      if (!changed && symtab->dump_file)
1073 		fprintf (symtab->dump_file, "Trivially needed symbols:");
1074 	      changed = true;
1075 	      if (symtab->dump_file)
1076 		fprintf (symtab->dump_file, " %s", node->asm_name ());
1077 	      if (!changed && symtab->dump_file)
1078 		fprintf (symtab->dump_file, "\n");
1079 	    }
1080 	  if (node == first_analyzed
1081 	      || node == first_analyzed_var)
1082 	    break;
1083 	}
1084       symtab->process_new_functions ();
1085       first_analyzed_var = symtab->first_variable ();
1086       first_analyzed = symtab->first_function ();
1087 
1088       if (changed && symtab->dump_file)
1089 	fprintf (symtab->dump_file, "\n");
1090 
1091       /* Lower representation, build callgraph edges and references for all trivially
1092          needed symbols and all symbols referred by them.  */
1093       while (queued_nodes != &symtab_terminator)
1094 	{
1095 	  changed = true;
1096 	  node = queued_nodes;
1097 	  queued_nodes = (symtab_node *)queued_nodes->aux;
1098 	  cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1099 	  if (cnode && cnode->definition)
1100 	    {
1101 	      cgraph_edge *edge;
1102 	      tree decl = cnode->decl;
1103 
1104 	      /* ??? It is possible to create extern inline function
1105 	      and later using weak alias attribute to kill its body.
1106 	      See gcc.c-torture/compile/20011119-1.c  */
1107 	      if (!DECL_STRUCT_FUNCTION (decl)
1108 		  && !cnode->alias
1109 		  && !cnode->thunk.thunk_p
1110 		  && !cnode->dispatcher_function)
1111 		{
1112 		  cnode->reset ();
1113 		  cnode->local.redefined_extern_inline = true;
1114 		  continue;
1115 		}
1116 
1117 	      if (!cnode->analyzed)
1118 		cnode->analyze ();
1119 
1120 	      for (edge = cnode->callees; edge; edge = edge->next_callee)
1121 		if (edge->callee->definition
1122 		    && (!DECL_EXTERNAL (edge->callee->decl)
1123 			/* When not optimizing, do not try to analyze extern
1124 			   inline functions.  Doing so is pointless.  */
1125 			|| opt_for_fn (edge->callee->decl, optimize)
1126 			/* Weakrefs needs to be preserved.  */
1127 			|| edge->callee->alias
1128 			/* always_inline functions are inlined aven at -O0.  */
1129 		        || lookup_attribute
1130 				 ("always_inline",
1131 			          DECL_ATTRIBUTES (edge->callee->decl))
1132 			/* Multiversioned functions needs the dispatcher to
1133 			   be produced locally even for extern functions.  */
1134 			|| edge->callee->function_version ()))
1135 		   enqueue_node (edge->callee);
1136 	      if (opt_for_fn (cnode->decl, optimize)
1137 		  && opt_for_fn (cnode->decl, flag_devirtualize))
1138 		{
1139 		  cgraph_edge *next;
1140 
1141 		  for (edge = cnode->indirect_calls; edge; edge = next)
1142 		    {
1143 		      next = edge->next_callee;
1144 		      if (edge->indirect_info->polymorphic)
1145 			walk_polymorphic_call_targets (&reachable_call_targets,
1146 						       edge);
1147 		    }
1148 		}
1149 
1150 	      /* If decl is a clone of an abstract function,
1151 		 mark that abstract function so that we don't release its body.
1152 		 The DECL_INITIAL() of that abstract function declaration
1153 		 will be later needed to output debug info.  */
1154 	      if (DECL_ABSTRACT_ORIGIN (decl))
1155 		{
1156 		  cgraph_node *origin_node
1157 		    = cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (decl));
1158 		  origin_node->used_as_abstract_origin = true;
1159 		}
1160 	      /* Preserve a functions function context node.  It will
1161 		 later be needed to output debug info.  */
1162 	      if (tree fn = decl_function_context (decl))
1163 		{
1164 		  cgraph_node *origin_node = cgraph_node::get_create (fn);
1165 		  enqueue_node (origin_node);
1166 		}
1167 	    }
1168 	  else
1169 	    {
1170 	      varpool_node *vnode = dyn_cast <varpool_node *> (node);
1171 	      if (vnode && vnode->definition && !vnode->analyzed)
1172 		vnode->analyze ();
1173 	    }
1174 
1175 	  if (node->same_comdat_group)
1176 	    {
1177 	      symtab_node *next;
1178 	      for (next = node->same_comdat_group;
1179 		   next != node;
1180 		   next = next->same_comdat_group)
1181 		if (!next->comdat_local_p ())
1182 		  enqueue_node (next);
1183 	    }
1184 	  for (i = 0; node->iterate_reference (i, ref); i++)
1185 	    if (ref->referred->definition
1186 		&& (!DECL_EXTERNAL (ref->referred->decl)
1187 		    || ((TREE_CODE (ref->referred->decl) != FUNCTION_DECL
1188 			 && optimize)
1189 			|| (TREE_CODE (ref->referred->decl) == FUNCTION_DECL
1190 			    && opt_for_fn (ref->referred->decl, optimize))
1191 		    || node->alias
1192 		    || ref->referred->alias)))
1193 	      enqueue_node (ref->referred);
1194 	  symtab->process_new_functions ();
1195 	}
1196     }
1197   update_type_inheritance_graph ();
1198 
1199   /* Collect entry points to the unit.  */
1200   if (symtab->dump_file)
1201     {
1202       fprintf (symtab->dump_file, "\n\nInitial ");
1203       symtab_node::dump_table (symtab->dump_file);
1204     }
1205 
1206   if (first_time)
1207     {
1208       symtab_node *snode;
1209       FOR_EACH_SYMBOL (snode)
1210 	check_global_declaration (snode);
1211     }
1212 
1213   if (symtab->dump_file)
1214     fprintf (symtab->dump_file, "\nRemoving unused symbols:");
1215 
1216   for (node = symtab->first_symbol ();
1217        node != first_handled
1218        && node != first_handled_var; node = next)
1219     {
1220       next = node->next;
1221       if (!node->aux && !node->referred_to_p ())
1222 	{
1223 	  if (symtab->dump_file)
1224 	    fprintf (symtab->dump_file, " %s", node->name ());
1225 
1226 	  /* See if the debugger can use anything before the DECL
1227 	     passes away.  Perhaps it can notice a DECL that is now a
1228 	     constant and can tag the early DIE with an appropriate
1229 	     attribute.
1230 
1231 	     Otherwise, this is the last chance the debug_hooks have
1232 	     at looking at optimized away DECLs, since
1233 	     late_global_decl will subsequently be called from the
1234 	     contents of the now pruned symbol table.  */
1235 	  if (VAR_P (node->decl)
1236 	      && !decl_function_context (node->decl))
1237 	    {
1238 	      /* We are reclaiming totally unreachable code and variables
1239 	         so they effectively appear as readonly.  Show that to
1240 		 the debug machinery.  */
1241 	      TREE_READONLY (node->decl) = 1;
1242 	      node->definition = false;
1243 	      (*debug_hooks->late_global_decl) (node->decl);
1244 	    }
1245 
1246 	  node->remove ();
1247 	  continue;
1248 	}
1249       if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1250 	{
1251 	  tree decl = node->decl;
1252 
1253 	  if (cnode->definition && !gimple_has_body_p (decl)
1254 	      && !cnode->alias
1255 	      && !cnode->thunk.thunk_p)
1256 	    cnode->reset ();
1257 
1258 	  gcc_assert (!cnode->definition || cnode->thunk.thunk_p
1259 		      || cnode->alias
1260 		      || gimple_has_body_p (decl)
1261 		      || cnode->native_rtl_p ());
1262 	  gcc_assert (cnode->analyzed == cnode->definition);
1263 	}
1264       node->aux = NULL;
1265     }
1266   for (;node; node = node->next)
1267     node->aux = NULL;
1268   first_analyzed = symtab->first_function ();
1269   first_analyzed_var = symtab->first_variable ();
1270   if (symtab->dump_file)
1271     {
1272       fprintf (symtab->dump_file, "\n\nReclaimed ");
1273       symtab_node::dump_table (symtab->dump_file);
1274     }
1275   bitmap_obstack_release (NULL);
1276   ggc_collect ();
1277   /* Initialize assembler name hash, in particular we want to trigger C++
1278      mangling and same body alias creation before we free DECL_ARGUMENTS
1279      used by it.  */
1280   if (!seen_error ())
1281     symtab->symtab_initialize_asm_name_hash ();
1282 
1283   input_location = saved_loc;
1284 }
1285 
1286 /* Translate the ugly representation of aliases as alias pairs into nice
1287    representation in callgraph.  We don't handle all cases yet,
1288    unfortunately.  */
1289 
1290 static void
1291 handle_alias_pairs (void)
1292 {
1293   alias_pair *p;
1294   unsigned i;
1295 
1296   for (i = 0; alias_pairs && alias_pairs->iterate (i, &p);)
1297     {
1298       symtab_node *target_node = symtab_node::get_for_asmname (p->target);
1299 
1300       /* Weakrefs with target not defined in current unit are easy to handle:
1301 	 they behave just as external variables except we need to note the
1302 	 alias flag to later output the weakref pseudo op into asm file.  */
1303       if (!target_node
1304 	  && lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL)
1305 	{
1306 	  symtab_node *node = symtab_node::get (p->decl);
1307 	  if (node)
1308 	    {
1309 	      node->alias_target = p->target;
1310 	      node->weakref = true;
1311 	      node->alias = true;
1312 	      node->transparent_alias = true;
1313 	    }
1314 	  alias_pairs->unordered_remove (i);
1315 	  continue;
1316 	}
1317       else if (!target_node)
1318 	{
1319 	  error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
1320 	  symtab_node *node = symtab_node::get (p->decl);
1321 	  if (node)
1322 	    node->alias = false;
1323 	  alias_pairs->unordered_remove (i);
1324 	  continue;
1325 	}
1326 
1327       if (DECL_EXTERNAL (target_node->decl)
1328 	  /* We use local aliases for C++ thunks to force the tailcall
1329 	     to bind locally.  This is a hack - to keep it working do
1330 	     the following (which is not strictly correct).  */
1331 	  && (TREE_CODE (target_node->decl) != FUNCTION_DECL
1332 	      || ! DECL_VIRTUAL_P (target_node->decl))
1333 	  && ! lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)))
1334 	{
1335 	  error ("%q+D aliased to external symbol %qE",
1336 		 p->decl, p->target);
1337 	}
1338 
1339       if (TREE_CODE (p->decl) == FUNCTION_DECL
1340           && target_node && is_a <cgraph_node *> (target_node))
1341 	{
1342 	  cgraph_node *src_node = cgraph_node::get (p->decl);
1343 	  if (src_node && src_node->definition)
1344 	    src_node->reset ();
1345 	  cgraph_node::create_alias (p->decl, target_node->decl);
1346 	  alias_pairs->unordered_remove (i);
1347 	}
1348       else if (VAR_P (p->decl)
1349 	       && target_node && is_a <varpool_node *> (target_node))
1350 	{
1351 	  varpool_node::create_alias (p->decl, target_node->decl);
1352 	  alias_pairs->unordered_remove (i);
1353 	}
1354       else
1355 	{
1356 	  error ("%q+D alias in between function and variable is not supported",
1357 		 p->decl);
1358 	  warning (0, "%q+D aliased declaration",
1359 		   target_node->decl);
1360 	  alias_pairs->unordered_remove (i);
1361 	}
1362     }
1363   vec_free (alias_pairs);
1364 }
1365 
1366 
1367 /* Figure out what functions we want to assemble.  */
1368 
1369 static void
1370 mark_functions_to_output (void)
1371 {
1372   bool check_same_comdat_groups = false;
1373   cgraph_node *node;
1374 
1375   if (flag_checking)
1376     FOR_EACH_FUNCTION (node)
1377       gcc_assert (!node->process);
1378 
1379   FOR_EACH_FUNCTION (node)
1380     {
1381       tree decl = node->decl;
1382 
1383       gcc_assert (!node->process || node->same_comdat_group);
1384       if (node->process)
1385 	continue;
1386 
1387       /* We need to output all local functions that are used and not
1388 	 always inlined, as well as those that are reachable from
1389 	 outside the current compilation unit.  */
1390       if (node->analyzed
1391 	  && !node->thunk.thunk_p
1392 	  && !node->alias
1393 	  && !node->global.inlined_to
1394 	  && !TREE_ASM_WRITTEN (decl)
1395 	  && !DECL_EXTERNAL (decl))
1396 	{
1397 	  node->process = 1;
1398 	  if (node->same_comdat_group)
1399 	    {
1400 	      cgraph_node *next;
1401 	      for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
1402 		   next != node;
1403 		   next = dyn_cast<cgraph_node *> (next->same_comdat_group))
1404 		if (!next->thunk.thunk_p && !next->alias
1405 		    && !next->comdat_local_p ())
1406 		  next->process = 1;
1407 	    }
1408 	}
1409       else if (node->same_comdat_group)
1410 	{
1411 	  if (flag_checking)
1412 	    check_same_comdat_groups = true;
1413 	}
1414       else
1415 	{
1416 	  /* We should've reclaimed all functions that are not needed.  */
1417 	  if (flag_checking
1418 	      && !node->global.inlined_to
1419 	      && gimple_has_body_p (decl)
1420 	      /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1421 		 are inside partition, we can end up not removing the body since we no longer
1422 		 have analyzed node pointing to it.  */
1423 	      && !node->in_other_partition
1424 	      && !node->alias
1425 	      && !node->clones
1426 	      && !DECL_EXTERNAL (decl))
1427 	    {
1428 	      node->debug ();
1429 	      internal_error ("failed to reclaim unneeded function");
1430 	    }
1431 	  gcc_assert (node->global.inlined_to
1432 		      || !gimple_has_body_p (decl)
1433 		      || node->in_other_partition
1434 		      || node->clones
1435 		      || DECL_ARTIFICIAL (decl)
1436 		      || DECL_EXTERNAL (decl));
1437 
1438 	}
1439 
1440     }
1441   if (flag_checking && check_same_comdat_groups)
1442     FOR_EACH_FUNCTION (node)
1443       if (node->same_comdat_group && !node->process)
1444 	{
1445 	  tree decl = node->decl;
1446 	  if (!node->global.inlined_to
1447 	      && gimple_has_body_p (decl)
1448 	      /* FIXME: in an ltrans unit when the offline copy is outside a
1449 		 partition but inline copies are inside a partition, we can
1450 		 end up not removing the body since we no longer have an
1451 		 analyzed node pointing to it.  */
1452 	      && !node->in_other_partition
1453 	      && !node->clones
1454 	      && !DECL_EXTERNAL (decl))
1455 	    {
1456 	      node->debug ();
1457 	      internal_error ("failed to reclaim unneeded function in same "
1458 			      "comdat group");
1459 	    }
1460 	}
1461 }
1462 
1463 /* DECL is FUNCTION_DECL.  Initialize datastructures so DECL is a function
1464    in lowered gimple form.  IN_SSA is true if the gimple is in SSA.
1465 
1466    Set current_function_decl and cfun to newly constructed empty function body.
1467    return basic block in the function body.  */
1468 
1469 basic_block
1470 init_lowered_empty_function (tree decl, bool in_ssa, gcov_type count)
1471 {
1472   basic_block bb;
1473   edge e;
1474 
1475   current_function_decl = decl;
1476   allocate_struct_function (decl, false);
1477   gimple_register_cfg_hooks ();
1478   init_empty_tree_cfg ();
1479   init_tree_ssa (cfun);
1480 
1481   if (in_ssa)
1482     {
1483       init_ssa_operands (cfun);
1484       cfun->gimple_df->in_ssa_p = true;
1485       cfun->curr_properties |= PROP_ssa;
1486     }
1487 
1488   DECL_INITIAL (decl) = make_node (BLOCK);
1489   BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1490 
1491   DECL_SAVED_TREE (decl) = error_mark_node;
1492   cfun->curr_properties |= (PROP_gimple_lcf | PROP_gimple_leh | PROP_gimple_any
1493 			    | PROP_cfg | PROP_loops);
1494 
1495   set_loops_for_fn (cfun, ggc_cleared_alloc<loops> ());
1496   init_loops_structure (cfun, loops_for_fn (cfun), 1);
1497   loops_for_fn (cfun)->state |= LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
1498 
1499   /* Create BB for body of the function and connect it properly.  */
1500   ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = count;
1501   ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency = REG_BR_PROB_BASE;
1502   EXIT_BLOCK_PTR_FOR_FN (cfun)->count = count;
1503   EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency = REG_BR_PROB_BASE;
1504   bb = create_basic_block (NULL, ENTRY_BLOCK_PTR_FOR_FN (cfun));
1505   bb->count = count;
1506   bb->frequency = BB_FREQ_MAX;
1507   e = make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FALLTHRU);
1508   e->count = count;
1509   e->probability = REG_BR_PROB_BASE;
1510   e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1511   e->count = count;
1512   e->probability = REG_BR_PROB_BASE;
1513   add_bb_to_loop (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
1514 
1515   return bb;
1516 }
1517 
1518 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1519    offset indicated by VIRTUAL_OFFSET, if that is
1520    non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1521    zero for a result adjusting thunk.  */
1522 
1523 tree
1524 thunk_adjust (gimple_stmt_iterator * bsi,
1525 	      tree ptr, bool this_adjusting,
1526 	      HOST_WIDE_INT fixed_offset, tree virtual_offset)
1527 {
1528   gassign *stmt;
1529   tree ret;
1530 
1531   if (this_adjusting
1532       && fixed_offset != 0)
1533     {
1534       stmt = gimple_build_assign
1535 		(ptr, fold_build_pointer_plus_hwi_loc (input_location,
1536 						       ptr,
1537 						       fixed_offset));
1538       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1539     }
1540 
1541   /* If there's a virtual offset, look up that value in the vtable and
1542      adjust the pointer again.  */
1543   if (virtual_offset)
1544     {
1545       tree vtabletmp;
1546       tree vtabletmp2;
1547       tree vtabletmp3;
1548 
1549       if (!vtable_entry_type)
1550 	{
1551 	  tree vfunc_type = make_node (FUNCTION_TYPE);
1552 	  TREE_TYPE (vfunc_type) = integer_type_node;
1553 	  TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1554 	  layout_type (vfunc_type);
1555 
1556 	  vtable_entry_type = build_pointer_type (vfunc_type);
1557 	}
1558 
1559       vtabletmp =
1560 	create_tmp_reg (build_pointer_type
1561 			  (build_pointer_type (vtable_entry_type)), "vptr");
1562 
1563       /* The vptr is always at offset zero in the object.  */
1564       stmt = gimple_build_assign (vtabletmp,
1565 				  build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1566 					  ptr));
1567       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1568 
1569       /* Form the vtable address.  */
1570       vtabletmp2 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp)),
1571 				     "vtableaddr");
1572       stmt = gimple_build_assign (vtabletmp2,
1573 				  build_simple_mem_ref (vtabletmp));
1574       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1575 
1576       /* Find the entry with the vcall offset.  */
1577       stmt = gimple_build_assign (vtabletmp2,
1578 				  fold_build_pointer_plus_loc (input_location,
1579 							       vtabletmp2,
1580 							       virtual_offset));
1581       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1582 
1583       /* Get the offset itself.  */
1584       vtabletmp3 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1585 				     "vcalloffset");
1586       stmt = gimple_build_assign (vtabletmp3,
1587 				  build_simple_mem_ref (vtabletmp2));
1588       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1589 
1590       /* Adjust the `this' pointer.  */
1591       ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1592       ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1593 				      GSI_CONTINUE_LINKING);
1594     }
1595 
1596   if (!this_adjusting
1597       && fixed_offset != 0)
1598     /* Adjust the pointer by the constant.  */
1599     {
1600       tree ptrtmp;
1601 
1602       if (VAR_P (ptr))
1603         ptrtmp = ptr;
1604       else
1605         {
1606           ptrtmp = create_tmp_reg (TREE_TYPE (ptr), "ptr");
1607           stmt = gimple_build_assign (ptrtmp, ptr);
1608 	  gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1609 	}
1610       ptr = fold_build_pointer_plus_hwi_loc (input_location,
1611 					     ptrtmp, fixed_offset);
1612     }
1613 
1614   /* Emit the statement and gimplify the adjustment expression.  */
1615   ret = create_tmp_reg (TREE_TYPE (ptr), "adjusted_this");
1616   stmt = gimple_build_assign (ret, ptr);
1617   gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1618 
1619   return ret;
1620 }
1621 
1622 /* Expand thunk NODE to gimple if possible.
1623    When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
1624    no assembler is produced.
1625    When OUTPUT_ASM_THUNK is true, also produce assembler for
1626    thunks that are not lowered.  */
1627 
1628 bool
1629 cgraph_node::expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)
1630 {
1631   bool this_adjusting = thunk.this_adjusting;
1632   HOST_WIDE_INT fixed_offset = thunk.fixed_offset;
1633   HOST_WIDE_INT virtual_value = thunk.virtual_value;
1634   tree virtual_offset = NULL;
1635   tree alias = callees->callee->decl;
1636   tree thunk_fndecl = decl;
1637   tree a;
1638 
1639   /* Instrumentation thunk is the same function with
1640      a different signature.  Never need to expand it.  */
1641   if (thunk.add_pointer_bounds_args)
1642     return false;
1643 
1644   if (!force_gimple_thunk && this_adjusting
1645       && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1646 					      virtual_value, alias))
1647     {
1648       const char *fnname;
1649       tree fn_block;
1650       tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1651 
1652       if (!output_asm_thunks)
1653 	{
1654 	  analyzed = true;
1655 	  return false;
1656 	}
1657 
1658       if (in_lto_p)
1659 	get_untransformed_body ();
1660       a = DECL_ARGUMENTS (thunk_fndecl);
1661 
1662       current_function_decl = thunk_fndecl;
1663 
1664       /* Ensure thunks are emitted in their correct sections.  */
1665       resolve_unique_section (thunk_fndecl, 0,
1666 			      flag_function_sections);
1667 
1668       DECL_RESULT (thunk_fndecl)
1669 	= build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1670 		      RESULT_DECL, 0, restype);
1671       DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1672       fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1673 
1674       /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1675 	 create one.  */
1676       fn_block = make_node (BLOCK);
1677       BLOCK_VARS (fn_block) = a;
1678       DECL_INITIAL (thunk_fndecl) = fn_block;
1679       BLOCK_SUPERCONTEXT (fn_block) = thunk_fndecl;
1680       allocate_struct_function (thunk_fndecl, false);
1681       init_function_start (thunk_fndecl);
1682       cfun->is_thunk = 1;
1683       insn_locations_init ();
1684       set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl));
1685       prologue_location = curr_insn_location ();
1686       assemble_start_function (thunk_fndecl, fnname);
1687 
1688       targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1689 				       fixed_offset, virtual_value, alias);
1690 
1691       assemble_end_function (thunk_fndecl, fnname);
1692       insn_locations_finalize ();
1693       init_insn_lengths ();
1694       free_after_compilation (cfun);
1695       TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1696       thunk.thunk_p = false;
1697       analyzed = false;
1698     }
1699   else if (stdarg_p (TREE_TYPE (thunk_fndecl)))
1700     {
1701       error ("generic thunk code fails for method %qD which uses %<...%>",
1702 	     thunk_fndecl);
1703       TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1704       analyzed = true;
1705       return false;
1706     }
1707   else
1708     {
1709       tree restype;
1710       basic_block bb, then_bb, else_bb, return_bb;
1711       gimple_stmt_iterator bsi;
1712       int nargs = 0;
1713       tree arg;
1714       int i;
1715       tree resdecl;
1716       tree restmp = NULL;
1717       tree resbnd = NULL;
1718 
1719       gcall *call;
1720       greturn *ret;
1721       bool alias_is_noreturn = TREE_THIS_VOLATILE (alias);
1722 
1723       /* We may be called from expand_thunk that releses body except for
1724 	 DECL_ARGUMENTS.  In this case force_gimple_thunk is true.  */
1725       if (in_lto_p && !force_gimple_thunk)
1726 	get_untransformed_body ();
1727       a = DECL_ARGUMENTS (thunk_fndecl);
1728 
1729       current_function_decl = thunk_fndecl;
1730 
1731       /* Ensure thunks are emitted in their correct sections.  */
1732       resolve_unique_section (thunk_fndecl, 0,
1733 			      flag_function_sections);
1734 
1735       DECL_IGNORED_P (thunk_fndecl) = 1;
1736       bitmap_obstack_initialize (NULL);
1737 
1738       if (thunk.virtual_offset_p)
1739         virtual_offset = size_int (virtual_value);
1740 
1741       /* Build the return declaration for the function.  */
1742       restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1743       if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1744 	{
1745 	  resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1746 	  DECL_ARTIFICIAL (resdecl) = 1;
1747 	  DECL_IGNORED_P (resdecl) = 1;
1748 	  DECL_RESULT (thunk_fndecl) = resdecl;
1749           DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1750 	}
1751       else
1752 	resdecl = DECL_RESULT (thunk_fndecl);
1753 
1754       bb = then_bb = else_bb = return_bb
1755 	= init_lowered_empty_function (thunk_fndecl, true, count);
1756 
1757       bsi = gsi_start_bb (bb);
1758 
1759       /* Build call to the function being thunked.  */
1760       if (!VOID_TYPE_P (restype)
1761 	  && (!alias_is_noreturn
1762 	      || TREE_ADDRESSABLE (restype)
1763 	      || TREE_CODE (TYPE_SIZE_UNIT (restype)) != INTEGER_CST))
1764 	{
1765 	  if (DECL_BY_REFERENCE (resdecl))
1766 	    {
1767 	      restmp = gimple_fold_indirect_ref (resdecl);
1768 	      if (!restmp)
1769 		restmp = build2 (MEM_REF,
1770 				 TREE_TYPE (TREE_TYPE (DECL_RESULT (alias))),
1771 				 resdecl,
1772 				 build_int_cst (TREE_TYPE
1773 				   (DECL_RESULT (alias)), 0));
1774 	    }
1775 	  else if (!is_gimple_reg_type (restype))
1776 	    {
1777 	      if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl)))
1778 		{
1779 		  restmp = resdecl;
1780 
1781 		  if (VAR_P (restmp))
1782 		    add_local_decl (cfun, restmp);
1783 		  BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1784 		}
1785 	      else
1786 		restmp = create_tmp_var (restype, "retval");
1787 	    }
1788 	  else
1789 	    restmp = create_tmp_reg (restype, "retval");
1790 	}
1791 
1792       for (arg = a; arg; arg = DECL_CHAIN (arg))
1793         nargs++;
1794       auto_vec<tree> vargs (nargs);
1795       i = 0;
1796       arg = a;
1797       if (this_adjusting)
1798 	{
1799 	  vargs.quick_push (thunk_adjust (&bsi, a, 1, fixed_offset,
1800 					  virtual_offset));
1801 	  arg = DECL_CHAIN (a);
1802 	  i = 1;
1803 	}
1804 
1805       if (nargs)
1806 	for (; i < nargs; i++, arg = DECL_CHAIN (arg))
1807 	  {
1808 	    tree tmp = arg;
1809 	    if (VECTOR_TYPE_P (TREE_TYPE (arg))
1810 		|| TREE_CODE (TREE_TYPE (arg)) == COMPLEX_TYPE)
1811 	      DECL_GIMPLE_REG_P (arg) = 1;
1812 
1813 	    if (!is_gimple_val (arg))
1814 	      {
1815 		tmp = create_tmp_reg (TYPE_MAIN_VARIANT
1816 				      (TREE_TYPE (arg)), "arg");
1817 		gimple *stmt = gimple_build_assign (tmp, arg);
1818 		gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1819 	      }
1820 	    vargs.quick_push (tmp);
1821 	  }
1822       call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1823       callees->call_stmt = call;
1824       gimple_call_set_from_thunk (call, true);
1825       gimple_call_set_with_bounds (call, instrumentation_clone);
1826 
1827       /* Return slot optimization is always possible and in fact requred to
1828          return values with DECL_BY_REFERENCE.  */
1829       if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl))
1830 	  && (!is_gimple_reg_type (TREE_TYPE (resdecl))
1831 	      || DECL_BY_REFERENCE (resdecl)))
1832         gimple_call_set_return_slot_opt (call, true);
1833 
1834       if (restmp)
1835 	{
1836           gimple_call_set_lhs (call, restmp);
1837 	  gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp),
1838 						 TREE_TYPE (TREE_TYPE (alias))));
1839 	}
1840       gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1841       if (!alias_is_noreturn)
1842 	{
1843 	  if (instrumentation_clone
1844 	      && !DECL_BY_REFERENCE (resdecl)
1845 	      && restmp
1846 	      && BOUNDED_P (restmp))
1847 	    {
1848 	      resbnd = chkp_insert_retbnd_call (NULL, restmp, &bsi);
1849 	      create_edge (get_create (gimple_call_fndecl (gsi_stmt (bsi))),
1850 			   as_a <gcall *> (gsi_stmt (bsi)),
1851 			   callees->count, callees->frequency);
1852 	    }
1853 
1854 	  if (restmp && !this_adjusting
1855 	      && (fixed_offset || virtual_offset))
1856 	    {
1857 	      tree true_label = NULL_TREE;
1858 
1859 	      if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1860 		{
1861 		  gimple *stmt;
1862 		  edge e;
1863 		  /* If the return type is a pointer, we need to
1864 		     protect against NULL.  We know there will be an
1865 		     adjustment, because that's why we're emitting a
1866 		     thunk.  */
1867 		  then_bb = create_basic_block (NULL, bb);
1868 		  then_bb->count = count - count / 16;
1869 		  then_bb->frequency = BB_FREQ_MAX - BB_FREQ_MAX / 16;
1870 		  return_bb = create_basic_block (NULL, then_bb);
1871 		  return_bb->count = count;
1872 		  return_bb->frequency = BB_FREQ_MAX;
1873 		  else_bb = create_basic_block (NULL, else_bb);
1874 		  then_bb->count = count / 16;
1875 		  then_bb->frequency = BB_FREQ_MAX / 16;
1876 		  add_bb_to_loop (then_bb, bb->loop_father);
1877 		  add_bb_to_loop (return_bb, bb->loop_father);
1878 		  add_bb_to_loop (else_bb, bb->loop_father);
1879 		  remove_edge (single_succ_edge (bb));
1880 		  true_label = gimple_block_label (then_bb);
1881 		  stmt = gimple_build_cond (NE_EXPR, restmp,
1882 					    build_zero_cst (TREE_TYPE (restmp)),
1883 					    NULL_TREE, NULL_TREE);
1884 		  gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1885 		  e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1886 		  e->probability = REG_BR_PROB_BASE - REG_BR_PROB_BASE / 16;
1887 		  e->count = count - count / 16;
1888 		  e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1889 		  e->probability = REG_BR_PROB_BASE / 16;
1890 		  e->count = count / 16;
1891 		  e = make_edge (return_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1892 		  e->probability = REG_BR_PROB_BASE;
1893 		  e->count = count;
1894 		  e = make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1895 		  e->probability = REG_BR_PROB_BASE;
1896 		  e->count = count - count / 16;
1897 		  e = make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1898 		  e->probability = REG_BR_PROB_BASE;
1899 		  e->count = count / 16;
1900 		  bsi = gsi_last_bb (then_bb);
1901 		}
1902 
1903 	      restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1904 				     fixed_offset, virtual_offset);
1905 	      if (true_label)
1906 		{
1907 		  gimple *stmt;
1908 		  bsi = gsi_last_bb (else_bb);
1909 		  stmt = gimple_build_assign (restmp,
1910 					      build_zero_cst (TREE_TYPE (restmp)));
1911 		  gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1912 		  bsi = gsi_last_bb (return_bb);
1913 		}
1914 	    }
1915 	  else
1916 	    gimple_call_set_tail (call, true);
1917 
1918 	  /* Build return value.  */
1919 	  if (!DECL_BY_REFERENCE (resdecl))
1920 	    ret = gimple_build_return (restmp);
1921 	  else
1922 	    ret = gimple_build_return (resdecl);
1923 	  gimple_return_set_retbnd (ret, resbnd);
1924 
1925 	  gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1926 	}
1927       else
1928 	{
1929 	  gimple_call_set_tail (call, true);
1930 	  remove_edge (single_succ_edge (bb));
1931 	}
1932 
1933       cfun->gimple_df->in_ssa_p = true;
1934       profile_status_for_fn (cfun)
1935         = count ? PROFILE_READ : PROFILE_GUESSED;
1936       /* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks.  */
1937       TREE_ASM_WRITTEN (thunk_fndecl) = false;
1938       delete_unreachable_blocks ();
1939       update_ssa (TODO_update_ssa);
1940       checking_verify_flow_info ();
1941       free_dominance_info (CDI_DOMINATORS);
1942 
1943       /* Since we want to emit the thunk, we explicitly mark its name as
1944 	 referenced.  */
1945       thunk.thunk_p = false;
1946       lowered = true;
1947       bitmap_obstack_release (NULL);
1948     }
1949   current_function_decl = NULL;
1950   set_cfun (NULL);
1951   return true;
1952 }
1953 
1954 /* Assemble thunks and aliases associated to node.  */
1955 
1956 void
1957 cgraph_node::assemble_thunks_and_aliases (void)
1958 {
1959   cgraph_edge *e;
1960   ipa_ref *ref;
1961 
1962   for (e = callers; e;)
1963     if (e->caller->thunk.thunk_p
1964 	&& !e->caller->global.inlined_to
1965 	&& !e->caller->thunk.add_pointer_bounds_args)
1966       {
1967 	cgraph_node *thunk = e->caller;
1968 
1969 	e = e->next_caller;
1970 	thunk->expand_thunk (true, false);
1971 	thunk->assemble_thunks_and_aliases ();
1972       }
1973     else
1974       e = e->next_caller;
1975 
1976   FOR_EACH_ALIAS (this, ref)
1977     {
1978       cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
1979       if (!alias->transparent_alias)
1980 	{
1981 	  bool saved_written = TREE_ASM_WRITTEN (decl);
1982 
1983 	  /* Force assemble_alias to really output the alias this time instead
1984 	     of buffering it in same alias pairs.  */
1985 	  TREE_ASM_WRITTEN (decl) = 1;
1986 	  do_assemble_alias (alias->decl,
1987 			     DECL_ASSEMBLER_NAME (decl));
1988 	  alias->assemble_thunks_and_aliases ();
1989 	  TREE_ASM_WRITTEN (decl) = saved_written;
1990 	}
1991     }
1992 }
1993 
1994 /* Expand function specified by node.  */
1995 
1996 void
1997 cgraph_node::expand (void)
1998 {
1999   location_t saved_loc;
2000 
2001   /* We ought to not compile any inline clones.  */
2002   gcc_assert (!global.inlined_to);
2003 
2004   /* __RTL functions are compiled as soon as they are parsed, so don't
2005      do it again.  */
2006   if (native_rtl_p ())
2007     return;
2008 
2009   announce_function (decl);
2010   process = 0;
2011   gcc_assert (lowered);
2012   get_untransformed_body ();
2013 
2014   /* Generate RTL for the body of DECL.  */
2015 
2016   timevar_push (TV_REST_OF_COMPILATION);
2017 
2018   gcc_assert (symtab->global_info_ready);
2019 
2020   /* Initialize the default bitmap obstack.  */
2021   bitmap_obstack_initialize (NULL);
2022 
2023   /* Initialize the RTL code for the function.  */
2024   saved_loc = input_location;
2025   input_location = DECL_SOURCE_LOCATION (decl);
2026 
2027   gcc_assert (DECL_STRUCT_FUNCTION (decl));
2028   push_cfun (DECL_STRUCT_FUNCTION (decl));
2029   init_function_start (decl);
2030 
2031   gimple_register_cfg_hooks ();
2032 
2033   bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
2034 
2035   execute_all_ipa_transforms ();
2036 
2037   /* Perform all tree transforms and optimizations.  */
2038 
2039   /* Signal the start of passes.  */
2040   invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);
2041 
2042   execute_pass_list (cfun, g->get_passes ()->all_passes);
2043 
2044   /* Signal the end of passes.  */
2045   invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);
2046 
2047   bitmap_obstack_release (&reg_obstack);
2048 
2049   /* Release the default bitmap obstack.  */
2050   bitmap_obstack_release (NULL);
2051 
2052   /* If requested, warn about function definitions where the function will
2053      return a value (usually of some struct or union type) which itself will
2054      take up a lot of stack space.  */
2055   if (warn_larger_than && !DECL_EXTERNAL (decl) && TREE_TYPE (decl))
2056     {
2057       tree ret_type = TREE_TYPE (TREE_TYPE (decl));
2058 
2059       if (ret_type && TYPE_SIZE_UNIT (ret_type)
2060 	  && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
2061 	  && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type),
2062 				   larger_than_size))
2063 	{
2064 	  unsigned int size_as_int
2065 	    = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
2066 
2067 	  if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
2068 	    warning (OPT_Wlarger_than_, "size of return value of %q+D is %u bytes",
2069                      decl, size_as_int);
2070 	  else
2071 	    warning (OPT_Wlarger_than_, "size of return value of %q+D is larger than %wd bytes",
2072                      decl, larger_than_size);
2073 	}
2074     }
2075 
2076   gimple_set_body (decl, NULL);
2077   if (DECL_STRUCT_FUNCTION (decl) == 0
2078       && !cgraph_node::get (decl)->origin)
2079     {
2080       /* Stop pointing to the local nodes about to be freed.
2081 	 But DECL_INITIAL must remain nonzero so we know this
2082 	 was an actual function definition.
2083 	 For a nested function, this is done in c_pop_function_context.
2084 	 If rest_of_compilation set this to 0, leave it 0.  */
2085       if (DECL_INITIAL (decl) != 0)
2086 	DECL_INITIAL (decl) = error_mark_node;
2087     }
2088 
2089   input_location = saved_loc;
2090 
2091   ggc_collect ();
2092   timevar_pop (TV_REST_OF_COMPILATION);
2093 
2094   /* Make sure that BE didn't give up on compiling.  */
2095   gcc_assert (TREE_ASM_WRITTEN (decl));
2096   if (cfun)
2097     pop_cfun ();
2098 
2099   /* It would make a lot more sense to output thunks before function body to get more
2100      forward and lest backwarding jumps.  This however would need solving problem
2101      with comdats. See PR48668.  Also aliases must come after function itself to
2102      make one pass assemblers, like one on AIX, happy.  See PR 50689.
2103      FIXME: Perhaps thunks should be move before function IFF they are not in comdat
2104      groups.  */
2105   assemble_thunks_and_aliases ();
2106   release_body ();
2107   /* Eliminate all call edges.  This is important so the GIMPLE_CALL no longer
2108      points to the dead function body.  */
2109   remove_callees ();
2110   remove_all_references ();
2111 }
2112 
2113 /* Node comparer that is responsible for the order that corresponds
2114    to time when a function was launched for the first time.  */
2115 
2116 static int
2117 node_cmp (const void *pa, const void *pb)
2118 {
2119   const cgraph_node *a = *(const cgraph_node * const *) pa;
2120   const cgraph_node *b = *(const cgraph_node * const *) pb;
2121 
2122   /* Functions with time profile must be before these without profile.  */
2123   if (!a->tp_first_run || !b->tp_first_run)
2124     return a->tp_first_run - b->tp_first_run;
2125 
2126   return a->tp_first_run != b->tp_first_run
2127 	 ? b->tp_first_run - a->tp_first_run
2128 	 : b->order - a->order;
2129 }
2130 
2131 /* Expand all functions that must be output.
2132 
2133    Attempt to topologically sort the nodes so function is output when
2134    all called functions are already assembled to allow data to be
2135    propagated across the callgraph.  Use a stack to get smaller distance
2136    between a function and its callees (later we may choose to use a more
2137    sophisticated algorithm for function reordering; we will likely want
2138    to use subsections to make the output functions appear in top-down
2139    order).  */
2140 
2141 static void
2142 expand_all_functions (void)
2143 {
2144   cgraph_node *node;
2145   cgraph_node **order = XCNEWVEC (cgraph_node *,
2146 					 symtab->cgraph_count);
2147   unsigned int expanded_func_count = 0, profiled_func_count = 0;
2148   int order_pos, new_order_pos = 0;
2149   int i;
2150 
2151   order_pos = ipa_reverse_postorder (order);
2152   gcc_assert (order_pos == symtab->cgraph_count);
2153 
2154   /* Garbage collector may remove inline clones we eliminate during
2155      optimization.  So we must be sure to not reference them.  */
2156   for (i = 0; i < order_pos; i++)
2157     if (order[i]->process)
2158       order[new_order_pos++] = order[i];
2159 
2160   if (flag_profile_reorder_functions)
2161     qsort (order, new_order_pos, sizeof (cgraph_node *), node_cmp);
2162 
2163   for (i = new_order_pos - 1; i >= 0; i--)
2164     {
2165       node = order[i];
2166 
2167       if (node->process)
2168 	{
2169 	  expanded_func_count++;
2170 	  if(node->tp_first_run)
2171 	    profiled_func_count++;
2172 
2173 	  if (symtab->dump_file)
2174 	    fprintf (symtab->dump_file,
2175 		     "Time profile order in expand_all_functions:%s:%d\n",
2176 		     node->asm_name (), node->tp_first_run);
2177 	  node->process = 0;
2178 	  node->expand ();
2179 	}
2180     }
2181 
2182     if (dump_file)
2183       fprintf (dump_file, "Expanded functions with time profile (%s):%u/%u\n",
2184                main_input_filename, profiled_func_count, expanded_func_count);
2185 
2186   if (symtab->dump_file && flag_profile_reorder_functions)
2187     fprintf (symtab->dump_file, "Expanded functions with time profile:%u/%u\n",
2188              profiled_func_count, expanded_func_count);
2189 
2190   symtab->process_new_functions ();
2191   free_gimplify_stack ();
2192 
2193   free (order);
2194 }
2195 
2196 /* This is used to sort the node types by the cgraph order number.  */
2197 
2198 enum cgraph_order_sort_kind
2199 {
2200   ORDER_UNDEFINED = 0,
2201   ORDER_FUNCTION,
2202   ORDER_VAR,
2203   ORDER_VAR_UNDEF,
2204   ORDER_ASM
2205 };
2206 
2207 struct cgraph_order_sort
2208 {
2209   enum cgraph_order_sort_kind kind;
2210   union
2211   {
2212     cgraph_node *f;
2213     varpool_node *v;
2214     asm_node *a;
2215   } u;
2216 };
2217 
2218 /* Output all functions, variables, and asm statements in the order
2219    according to their order fields, which is the order in which they
2220    appeared in the file.  This implements -fno-toplevel-reorder.  In
2221    this mode we may output functions and variables which don't really
2222    need to be output.
2223    When NO_REORDER is true only do this for symbols marked no reorder. */
2224 
2225 static void
2226 output_in_order (bool no_reorder)
2227 {
2228   int max;
2229   cgraph_order_sort *nodes;
2230   int i;
2231   cgraph_node *pf;
2232   varpool_node *pv;
2233   asm_node *pa;
2234   max = symtab->order;
2235   nodes = XCNEWVEC (cgraph_order_sort, max);
2236 
2237   FOR_EACH_DEFINED_FUNCTION (pf)
2238     {
2239       if (pf->process && !pf->thunk.thunk_p && !pf->alias)
2240 	{
2241 	  if (no_reorder && !pf->no_reorder)
2242 	    continue;
2243 	  i = pf->order;
2244 	  gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2245 	  nodes[i].kind = ORDER_FUNCTION;
2246 	  nodes[i].u.f = pf;
2247 	}
2248     }
2249 
2250   /* There is a similar loop in symbol_table::output_variables.
2251      Please keep them in sync.  */
2252   FOR_EACH_VARIABLE (pv)
2253     {
2254       if (no_reorder && !pv->no_reorder)
2255 	continue;
2256       if (DECL_HARD_REGISTER (pv->decl)
2257 	  || DECL_HAS_VALUE_EXPR_P (pv->decl))
2258 	continue;
2259       i = pv->order;
2260       gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2261       nodes[i].kind = pv->definition ? ORDER_VAR : ORDER_VAR_UNDEF;
2262       nodes[i].u.v = pv;
2263     }
2264 
2265   for (pa = symtab->first_asm_symbol (); pa; pa = pa->next)
2266     {
2267       i = pa->order;
2268       gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2269       nodes[i].kind = ORDER_ASM;
2270       nodes[i].u.a = pa;
2271     }
2272 
2273   /* In toplevel reorder mode we output all statics; mark them as needed.  */
2274 
2275   for (i = 0; i < max; ++i)
2276     if (nodes[i].kind == ORDER_VAR)
2277       nodes[i].u.v->finalize_named_section_flags ();
2278 
2279   for (i = 0; i < max; ++i)
2280     {
2281       switch (nodes[i].kind)
2282 	{
2283 	case ORDER_FUNCTION:
2284 	  nodes[i].u.f->process = 0;
2285 	  nodes[i].u.f->expand ();
2286 	  break;
2287 
2288 	case ORDER_VAR:
2289 	  nodes[i].u.v->assemble_decl ();
2290 	  break;
2291 
2292 	case ORDER_VAR_UNDEF:
2293 	  assemble_undefined_decl (nodes[i].u.v->decl);
2294 	  break;
2295 
2296 	case ORDER_ASM:
2297 	  assemble_asm (nodes[i].u.a->asm_str);
2298 	  break;
2299 
2300 	case ORDER_UNDEFINED:
2301 	  break;
2302 
2303 	default:
2304 	  gcc_unreachable ();
2305 	}
2306     }
2307 
2308   symtab->clear_asm_symbols ();
2309 
2310   free (nodes);
2311 }
2312 
2313 static void
2314 ipa_passes (void)
2315 {
2316   gcc::pass_manager *passes = g->get_passes ();
2317 
2318   set_cfun (NULL);
2319   current_function_decl = NULL;
2320   gimple_register_cfg_hooks ();
2321   bitmap_obstack_initialize (NULL);
2322 
2323   invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
2324 
2325   if (!in_lto_p)
2326     {
2327       execute_ipa_pass_list (passes->all_small_ipa_passes);
2328       if (seen_error ())
2329 	return;
2330     }
2331 
2332   /* This extra symtab_remove_unreachable_nodes pass tends to catch some
2333      devirtualization and other changes where removal iterate.  */
2334   symtab->remove_unreachable_nodes (symtab->dump_file);
2335 
2336   /* If pass_all_early_optimizations was not scheduled, the state of
2337      the cgraph will not be properly updated.  Update it now.  */
2338   if (symtab->state < IPA_SSA)
2339     symtab->state = IPA_SSA;
2340 
2341   if (!in_lto_p)
2342     {
2343       /* Generate coverage variables and constructors.  */
2344       coverage_finish ();
2345 
2346       /* Process new functions added.  */
2347       set_cfun (NULL);
2348       current_function_decl = NULL;
2349       symtab->process_new_functions ();
2350 
2351       execute_ipa_summary_passes
2352 	((ipa_opt_pass_d *) passes->all_regular_ipa_passes);
2353     }
2354 
2355   /* Some targets need to handle LTO assembler output specially.  */
2356   if (flag_generate_lto || flag_generate_offload)
2357     targetm.asm_out.lto_start ();
2358 
2359   if (!in_lto_p)
2360     {
2361       if (g->have_offload)
2362 	{
2363 	  section_name_prefix = OFFLOAD_SECTION_NAME_PREFIX;
2364 	  lto_stream_offload_p = true;
2365 	  ipa_write_summaries ();
2366 	  lto_stream_offload_p = false;
2367 	}
2368       if (flag_lto)
2369 	{
2370 	  section_name_prefix = LTO_SECTION_NAME_PREFIX;
2371 	  lto_stream_offload_p = false;
2372 	  ipa_write_summaries ();
2373 	}
2374     }
2375 
2376   if (flag_generate_lto || flag_generate_offload)
2377     targetm.asm_out.lto_end ();
2378 
2379   if (!flag_ltrans && (in_lto_p || !flag_lto || flag_fat_lto_objects))
2380     execute_ipa_pass_list (passes->all_regular_ipa_passes);
2381   invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
2382 
2383   bitmap_obstack_release (NULL);
2384 }
2385 
2386 
2387 /* Return string alias is alias of.  */
2388 
2389 static tree
2390 get_alias_symbol (tree decl)
2391 {
2392   tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
2393   return get_identifier (TREE_STRING_POINTER
2394 			  (TREE_VALUE (TREE_VALUE (alias))));
2395 }
2396 
2397 
2398 /* Weakrefs may be associated to external decls and thus not output
2399    at expansion time.  Emit all necessary aliases.  */
2400 
2401 void
2402 symbol_table::output_weakrefs (void)
2403 {
2404   symtab_node *node;
2405   cgraph_node *cnode;
2406   FOR_EACH_SYMBOL (node)
2407     if (node->alias
2408         && !TREE_ASM_WRITTEN (node->decl)
2409 	&& (!(cnode = dyn_cast <cgraph_node *> (node))
2410 	    || !cnode->instrumented_version
2411 	    || !TREE_ASM_WRITTEN (cnode->instrumented_version->decl))
2412 	&& node->weakref)
2413       {
2414 	tree target;
2415 
2416 	/* Weakrefs are special by not requiring target definition in current
2417 	   compilation unit.  It is thus bit hard to work out what we want to
2418 	   alias.
2419 	   When alias target is defined, we need to fetch it from symtab reference,
2420 	   otherwise it is pointed to by alias_target.  */
2421 	if (node->alias_target)
2422 	  target = (DECL_P (node->alias_target)
2423 		    ? DECL_ASSEMBLER_NAME (node->alias_target)
2424 		    : node->alias_target);
2425 	else if (node->analyzed)
2426 	  target = DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl);
2427 	else
2428 	  {
2429 	    gcc_unreachable ();
2430 	    target = get_alias_symbol (node->decl);
2431 	  }
2432         do_assemble_alias (node->decl, target);
2433       }
2434 }
2435 
2436 /* Perform simple optimizations based on callgraph.  */
2437 
2438 void
2439 symbol_table::compile (void)
2440 {
2441   if (seen_error ())
2442     return;
2443 
2444   symtab_node::checking_verify_symtab_nodes ();
2445 
2446   timevar_push (TV_CGRAPHOPT);
2447   if (pre_ipa_mem_report)
2448     {
2449       fprintf (stderr, "Memory consumption before IPA\n");
2450       dump_memory_report (false);
2451     }
2452   if (!quiet_flag)
2453     fprintf (stderr, "Performing interprocedural optimizations\n");
2454   state = IPA;
2455 
2456   /* Offloading requires LTO infrastructure.  */
2457   if (!in_lto_p && g->have_offload)
2458     flag_generate_offload = 1;
2459 
2460   /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE.  */
2461   if (flag_generate_lto || flag_generate_offload)
2462     lto_streamer_hooks_init ();
2463 
2464   /* Don't run the IPA passes if there was any error or sorry messages.  */
2465   if (!seen_error ())
2466     ipa_passes ();
2467 
2468   /* Do nothing else if any IPA pass found errors or if we are just streaming LTO.  */
2469   if (seen_error ()
2470       || (!in_lto_p && flag_lto && !flag_fat_lto_objects))
2471     {
2472       timevar_pop (TV_CGRAPHOPT);
2473       return;
2474     }
2475 
2476   global_info_ready = true;
2477   if (dump_file)
2478     {
2479       fprintf (dump_file, "Optimized ");
2480       symtab_node:: dump_table (dump_file);
2481     }
2482   if (post_ipa_mem_report)
2483     {
2484       fprintf (stderr, "Memory consumption after IPA\n");
2485       dump_memory_report (false);
2486     }
2487   timevar_pop (TV_CGRAPHOPT);
2488 
2489   /* Output everything.  */
2490   switch_to_section (text_section);
2491   (*debug_hooks->assembly_start) ();
2492   if (!quiet_flag)
2493     fprintf (stderr, "Assembling functions:\n");
2494   symtab_node::checking_verify_symtab_nodes ();
2495 
2496   bitmap_obstack_initialize (NULL);
2497   execute_ipa_pass_list (g->get_passes ()->all_late_ipa_passes);
2498   bitmap_obstack_release (NULL);
2499   mark_functions_to_output ();
2500 
2501   /* When weakref support is missing, we automatically translate all
2502      references to NODE to references to its ultimate alias target.
2503      The renaming mechanizm uses flag IDENTIFIER_TRANSPARENT_ALIAS and
2504      TREE_CHAIN.
2505 
2506      Set up this mapping before we output any assembler but once we are sure
2507      that all symbol renaming is done.
2508 
2509      FIXME: All this uglyness can go away if we just do renaming at gimple
2510      level by physically rewritting the IL.  At the moment we can only redirect
2511      calls, so we need infrastructure for renaming references as well.  */
2512 #ifndef ASM_OUTPUT_WEAKREF
2513   symtab_node *node;
2514 
2515   FOR_EACH_SYMBOL (node)
2516     if (node->alias
2517 	&& lookup_attribute ("weakref", DECL_ATTRIBUTES (node->decl)))
2518       {
2519 	IDENTIFIER_TRANSPARENT_ALIAS
2520 	   (DECL_ASSEMBLER_NAME (node->decl)) = 1;
2521 	TREE_CHAIN (DECL_ASSEMBLER_NAME (node->decl))
2522 	   = (node->alias_target ? node->alias_target
2523 	      : DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl));
2524       }
2525 #endif
2526 
2527   state = EXPANSION;
2528 
2529   if (!flag_toplevel_reorder)
2530     output_in_order (false);
2531   else
2532     {
2533       /* Output first asm statements and anything ordered. The process
2534          flag is cleared for these nodes, so we skip them later.  */
2535       output_in_order (true);
2536       expand_all_functions ();
2537       output_variables ();
2538     }
2539 
2540   process_new_functions ();
2541   state = FINISHED;
2542   output_weakrefs ();
2543 
2544   if (dump_file)
2545     {
2546       fprintf (dump_file, "\nFinal ");
2547       symtab_node::dump_table (dump_file);
2548     }
2549   if (!flag_checking)
2550     return;
2551   symtab_node::verify_symtab_nodes ();
2552   /* Double check that all inline clones are gone and that all
2553      function bodies have been released from memory.  */
2554   if (!seen_error ())
2555     {
2556       cgraph_node *node;
2557       bool error_found = false;
2558 
2559       FOR_EACH_DEFINED_FUNCTION (node)
2560 	if (node->global.inlined_to
2561 	    || gimple_has_body_p (node->decl))
2562 	  {
2563 	    error_found = true;
2564 	    node->debug ();
2565 	  }
2566       if (error_found)
2567 	internal_error ("nodes with unreleased memory found");
2568     }
2569 }
2570 
2571 
2572 /* Analyze the whole compilation unit once it is parsed completely.  */
2573 
2574 void
2575 symbol_table::finalize_compilation_unit (void)
2576 {
2577   timevar_push (TV_CGRAPH);
2578 
2579   /* If we're here there's no current function anymore.  Some frontends
2580      are lazy in clearing these.  */
2581   current_function_decl = NULL;
2582   set_cfun (NULL);
2583 
2584   /* Do not skip analyzing the functions if there were errors, we
2585      miss diagnostics for following functions otherwise.  */
2586 
2587   /* Emit size functions we didn't inline.  */
2588   finalize_size_functions ();
2589 
2590   /* Mark alias targets necessary and emit diagnostics.  */
2591   handle_alias_pairs ();
2592 
2593   if (!quiet_flag)
2594     {
2595       fprintf (stderr, "\nAnalyzing compilation unit\n");
2596       fflush (stderr);
2597     }
2598 
2599   if (flag_dump_passes)
2600     dump_passes ();
2601 
2602   /* Gimplify and lower all functions, compute reachability and
2603      remove unreachable nodes.  */
2604   analyze_functions (/*first_time=*/true);
2605 
2606   /* Mark alias targets necessary and emit diagnostics.  */
2607   handle_alias_pairs ();
2608 
2609   /* Gimplify and lower thunks.  */
2610   analyze_functions (/*first_time=*/false);
2611 
2612   if (!seen_error ())
2613     {
2614       /* Emit early debug for reachable functions, and by consequence,
2615 	 locally scoped symbols.  */
2616       struct cgraph_node *cnode;
2617       FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (cnode)
2618 	(*debug_hooks->early_global_decl) (cnode->decl);
2619 
2620       /* Clean up anything that needs cleaning up after initial debug
2621 	 generation.  */
2622       (*debug_hooks->early_finish) (main_input_filename);
2623     }
2624 
2625   /* Finally drive the pass manager.  */
2626   compile ();
2627 
2628   timevar_pop (TV_CGRAPH);
2629 }
2630 
2631 /* Reset all state within cgraphunit.c so that we can rerun the compiler
2632    within the same process.  For use by toplev::finalize.  */
2633 
2634 void
2635 cgraphunit_c_finalize (void)
2636 {
2637   gcc_assert (cgraph_new_nodes.length () == 0);
2638   cgraph_new_nodes.truncate (0);
2639 
2640   vtable_entry_type = NULL;
2641   queued_nodes = &symtab_terminator;
2642 
2643   first_analyzed = NULL;
2644   first_analyzed_var = NULL;
2645 }
2646 
2647 /* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
2648    kind of wrapper method.  */
2649 
2650 void
2651 cgraph_node::create_wrapper (cgraph_node *target)
2652 {
2653   /* Preserve DECL_RESULT so we get right by reference flag.  */
2654   tree decl_result = DECL_RESULT (decl);
2655 
2656   /* Remove the function's body but keep arguments to be reused
2657      for thunk.  */
2658   release_body (true);
2659   reset ();
2660 
2661   DECL_UNINLINABLE (decl) = false;
2662   DECL_RESULT (decl) = decl_result;
2663   DECL_INITIAL (decl) = NULL;
2664   allocate_struct_function (decl, false);
2665   set_cfun (NULL);
2666 
2667   /* Turn alias into thunk and expand it into GIMPLE representation.  */
2668   definition = true;
2669 
2670   memset (&thunk, 0, sizeof (cgraph_thunk_info));
2671   thunk.thunk_p = true;
2672   create_edge (target, NULL, count, CGRAPH_FREQ_BASE);
2673   callees->can_throw_external = !TREE_NOTHROW (target->decl);
2674 
2675   tree arguments = DECL_ARGUMENTS (decl);
2676 
2677   while (arguments)
2678     {
2679       TREE_ADDRESSABLE (arguments) = false;
2680       arguments = TREE_CHAIN (arguments);
2681     }
2682 
2683   expand_thunk (false, true);
2684 
2685   /* Inline summary set-up.  */
2686   analyze ();
2687   inline_analyze_function (this);
2688 }
2689 
2690 #include "gt-cgraphunit.h"
2691