xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/cgraphunit.c (revision bdc22b2e01993381dcefeff2bc9b56ca75a4235c)
1 /* Driver of optimization process
2    Copyright (C) 2003-2015 Free Software Foundation, Inc.
3    Contributed by Jan Hubicka
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 /* This module implements main driver of compilation process.
22 
23    The main scope of this file is to act as an interface in between
24    tree based frontends and the backend.
25 
26    The front-end is supposed to use following functionality:
27 
28     - finalize_function
29 
30       This function is called once front-end has parsed whole body of function
31       and it is certain that the function body nor the declaration will change.
32 
33       (There is one exception needed for implementing GCC extern inline
34 	function.)
35 
36     - varpool_finalize_decl
37 
38       This function has same behavior as the above but is used for static
39       variables.
40 
41     - add_asm_node
42 
43       Insert new toplevel ASM statement
44 
45     - finalize_compilation_unit
46 
47       This function is called once (source level) compilation unit is finalized
48       and it will no longer change.
49 
50       The symbol table is constructed starting from the trivially needed
51       symbols finalized by the frontend.  Functions are lowered into
52       GIMPLE representation and callgraph/reference lists are constructed.
53       Those are used to discover other necessary functions and variables.
54 
55       At the end the bodies of unreachable functions are removed.
56 
57       The function can be called multiple times when multiple source level
58       compilation units are combined.
59 
60     - compile
61 
62       This passes control to the back-end.  Optimizations are performed and
63       final assembler is generated.  This is done in the following way. Note
64       that with link time optimization the process is split into three
65       stages (compile time, linktime analysis and parallel linktime as
66       indicated bellow).
67 
68       Compile time:
69 
70 	1) Inter-procedural optimization.
71 	   (ipa_passes)
72 
73 	   This part is further split into:
74 
75 	   a) early optimizations. These are local passes executed in
76 	      the topological order on the callgraph.
77 
78 	      The purpose of early optimiations is to optimize away simple
79 	      things that may otherwise confuse IP analysis. Very simple
80 	      propagation across the callgraph is done i.e. to discover
81 	      functions without side effects and simple inlining is performed.
82 
83 	   b) early small interprocedural passes.
84 
85 	      Those are interprocedural passes executed only at compilation
86 	      time.  These include, for example, transational memory lowering,
87 	      unreachable code removal and other simple transformations.
88 
89 	   c) IP analysis stage.  All interprocedural passes do their
90 	      analysis.
91 
92 	      Interprocedural passes differ from small interprocedural
93 	      passes by their ability to operate across whole program
94 	      at linktime.  Their analysis stage is performed early to
95 	      both reduce linking times and linktime memory usage by
96 	      not having to represent whole program in memory.
97 
98 	   d) LTO sreaming.  When doing LTO, everything important gets
99 	      streamed into the object file.
100 
101        Compile time and or linktime analysis stage (WPA):
102 
103 	      At linktime units gets streamed back and symbol table is
104 	      merged.  Function bodies are not streamed in and not
105 	      available.
106 	   e) IP propagation stage.  All IP passes execute their
107 	      IP propagation. This is done based on the earlier analysis
108 	      without having function bodies at hand.
109 	   f) Ltrans streaming.  When doing WHOPR LTO, the program
110 	      is partitioned and streamed into multple object files.
111 
112        Compile time and/or parallel linktime stage (ltrans)
113 
114 	      Each of the object files is streamed back and compiled
115 	      separately.  Now the function bodies becomes available
116 	      again.
117 
118 	 2) Virtual clone materialization
119 	    (cgraph_materialize_clone)
120 
121 	    IP passes can produce copies of existing functoins (such
122 	    as versioned clones or inline clones) without actually
123 	    manipulating their bodies by creating virtual clones in
124 	    the callgraph. At this time the virtual clones are
125 	    turned into real functions
126 	 3) IP transformation
127 
128 	    All IP passes transform function bodies based on earlier
129 	    decision of the IP propagation.
130 
131 	 4) late small IP passes
132 
133 	    Simple IP passes working within single program partition.
134 
135 	 5) Expansion
136 	    (expand_all_functions)
137 
138 	    At this stage functions that needs to be output into
139 	    assembler are identified and compiled in topological order
140 	 6) Output of variables and aliases
141 	    Now it is known what variable references was not optimized
142 	    out and thus all variables are output to the file.
143 
144 	    Note that with -fno-toplevel-reorder passes 5 and 6
145 	    are combined together in cgraph_output_in_order.
146 
147    Finally there are functions to manipulate the callgraph from
148    backend.
149     - cgraph_add_new_function is used to add backend produced
150       functions introduced after the unit is finalized.
151       The functions are enqueue for later processing and inserted
152       into callgraph with cgraph_process_new_functions.
153 
154     - cgraph_function_versioning
155 
156       produces a copy of function into new one (a version)
157       and apply simple transformations
158 */
159 
160 #include "config.h"
161 #include "system.h"
162 #include "coretypes.h"
163 #include "tm.h"
164 #include "hash-set.h"
165 #include "machmode.h"
166 #include "vec.h"
167 #include "double-int.h"
168 #include "input.h"
169 #include "alias.h"
170 #include "symtab.h"
171 #include "wide-int.h"
172 #include "inchash.h"
173 #include "tree.h"
174 #include "fold-const.h"
175 #include "varasm.h"
176 #include "stor-layout.h"
177 #include "stringpool.h"
178 #include "output.h"
179 #include "rtl.h"
180 #include "predict.h"
181 #include "hard-reg-set.h"
182 #include "input.h"
183 #include "function.h"
184 #include "basic-block.h"
185 #include "tree-ssa-alias.h"
186 #include "internal-fn.h"
187 #include "gimple-fold.h"
188 #include "gimple-expr.h"
189 #include "is-a.h"
190 #include "gimple.h"
191 #include "gimplify.h"
192 #include "gimple-iterator.h"
193 #include "gimplify-me.h"
194 #include "gimple-ssa.h"
195 #include "tree-cfg.h"
196 #include "tree-into-ssa.h"
197 #include "tree-ssa.h"
198 #include "tree-inline.h"
199 #include "langhooks.h"
200 #include "toplev.h"
201 #include "flags.h"
202 #include "debug.h"
203 #include "target.h"
204 #include "diagnostic.h"
205 #include "params.h"
206 #include "intl.h"
207 #include "hash-map.h"
208 #include "plugin-api.h"
209 #include "ipa-ref.h"
210 #include "cgraph.h"
211 #include "alloc-pool.h"
212 #include "symbol-summary.h"
213 #include "ipa-prop.h"
214 #include "tree-iterator.h"
215 #include "tree-pass.h"
216 #include "tree-dump.h"
217 #include "gimple-pretty-print.h"
218 #include "output.h"
219 #include "coverage.h"
220 #include "plugin.h"
221 #include "ipa-inline.h"
222 #include "ipa-utils.h"
223 #include "lto-streamer.h"
224 #include "except.h"
225 #include "cfgloop.h"
226 #include "regset.h"     /* FIXME: For reg_obstack.  */
227 #include "context.h"
228 #include "pass_manager.h"
229 #include "tree-nested.h"
230 #include "gimplify.h"
231 #include "dbgcnt.h"
232 #include "tree-chkp.h"
233 #include "lto-section-names.h"
234 #include "omp-low.h"
235 #include "print-tree.h"
236 
237 /* Queue of cgraph nodes scheduled to be added into cgraph.  This is a
238    secondary queue used during optimization to accommodate passes that
239    may generate new functions that need to be optimized and expanded.  */
240 vec<cgraph_node *> cgraph_new_nodes;
241 
242 static void expand_all_functions (void);
243 static void mark_functions_to_output (void);
244 static void handle_alias_pairs (void);
245 
246 /* Used for vtable lookup in thunk adjusting.  */
247 static GTY (()) tree vtable_entry_type;
248 
249 /* Determine if symbol declaration is needed.  That is, visible to something
250    either outside this translation unit, something magic in the system
251    configury */
252 bool
253 symtab_node::needed_p (void)
254 {
255   /* Double check that no one output the function into assembly file
256      early.  */
257   gcc_checking_assert (!DECL_ASSEMBLER_NAME_SET_P (decl)
258 	               || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)));
259 
260   if (!definition)
261     return false;
262 
263   if (DECL_EXTERNAL (decl))
264     return false;
265 
266   /* If the user told us it is used, then it must be so.  */
267   if (force_output)
268     return true;
269 
270   /* ABI forced symbols are needed when they are external.  */
271   if (forced_by_abi && TREE_PUBLIC (decl))
272     return true;
273 
274   /* Keep constructors, destructors and virtual functions.  */
275    if (TREE_CODE (decl) == FUNCTION_DECL
276        && (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl)))
277     return true;
278 
279   /* Externally visible variables must be output.  The exception is
280      COMDAT variables that must be output only when they are needed.  */
281   if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
282     return true;
283 
284   return false;
285 }
286 
287 /* Head and terminator of the queue of nodes to be processed while building
288    callgraph.  */
289 
290 static symtab_node symtab_terminator;
291 static symtab_node *queued_nodes = &symtab_terminator;
292 
293 /* Add NODE to queue starting at QUEUED_NODES.
294    The queue is linked via AUX pointers and terminated by pointer to 1.  */
295 
296 static void
297 enqueue_node (symtab_node *node)
298 {
299   if (node->aux)
300     return;
301   gcc_checking_assert (queued_nodes);
302   node->aux = queued_nodes;
303   queued_nodes = node;
304 }
305 
306 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
307    functions into callgraph in a way so they look like ordinary reachable
308    functions inserted into callgraph already at construction time.  */
309 
310 void
311 symbol_table::process_new_functions (void)
312 {
313   tree fndecl;
314 
315   if (!cgraph_new_nodes.exists ())
316     return;
317 
318   handle_alias_pairs ();
319   /*  Note that this queue may grow as its being processed, as the new
320       functions may generate new ones.  */
321   for (unsigned i = 0; i < cgraph_new_nodes.length (); i++)
322     {
323       cgraph_node *node = cgraph_new_nodes[i];
324       fndecl = node->decl;
325       switch (state)
326 	{
327 	case CONSTRUCTION:
328 	  /* At construction time we just need to finalize function and move
329 	     it into reachable functions list.  */
330 
331 	  cgraph_node::finalize_function (fndecl, false);
332 	  call_cgraph_insertion_hooks (node);
333 	  enqueue_node (node);
334 	  break;
335 
336 	case IPA:
337 	case IPA_SSA:
338 	case IPA_SSA_AFTER_INLINING:
339 	  /* When IPA optimization already started, do all essential
340 	     transformations that has been already performed on the whole
341 	     cgraph but not on this function.  */
342 
343 	  gimple_register_cfg_hooks ();
344 	  if (!node->analyzed)
345 	    node->analyze ();
346 	  push_cfun (DECL_STRUCT_FUNCTION (fndecl));
347 	  if ((state == IPA_SSA || state == IPA_SSA_AFTER_INLINING)
348 	      && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
349 	    g->get_passes ()->execute_early_local_passes ();
350 	  else if (inline_summaries != NULL)
351 	    compute_inline_parameters (node, true);
352 	  free_dominance_info (CDI_POST_DOMINATORS);
353 	  free_dominance_info (CDI_DOMINATORS);
354 	  pop_cfun ();
355 	  call_cgraph_insertion_hooks (node);
356 	  break;
357 
358 	case EXPANSION:
359 	  /* Functions created during expansion shall be compiled
360 	     directly.  */
361 	  node->process = 0;
362 	  call_cgraph_insertion_hooks (node);
363 	  node->expand ();
364 	  break;
365 
366 	default:
367 	  gcc_unreachable ();
368 	  break;
369 	}
370     }
371 
372   cgraph_new_nodes.release ();
373 }
374 
375 /* As an GCC extension we allow redefinition of the function.  The
376    semantics when both copies of bodies differ is not well defined.
377    We replace the old body with new body so in unit at a time mode
378    we always use new body, while in normal mode we may end up with
379    old body inlined into some functions and new body expanded and
380    inlined in others.
381 
382    ??? It may make more sense to use one body for inlining and other
383    body for expanding the function but this is difficult to do.  */
384 
385 void
386 cgraph_node::reset (void)
387 {
388   /* If process is set, then we have already begun whole-unit analysis.
389      This is *not* testing for whether we've already emitted the function.
390      That case can be sort-of legitimately seen with real function redefinition
391      errors.  I would argue that the front end should never present us with
392      such a case, but don't enforce that for now.  */
393   gcc_assert (!process);
394 
395   /* Reset our data structures so we can analyze the function again.  */
396   memset (&local, 0, sizeof (local));
397   memset (&global, 0, sizeof (global));
398   memset (&rtl, 0, sizeof (rtl));
399   analyzed = false;
400   definition = false;
401   alias = false;
402   weakref = false;
403   cpp_implicit_alias = false;
404 
405   remove_callees ();
406   remove_all_references ();
407 }
408 
409 /* Return true when there are references to the node.  */
410 
411 bool
412 symtab_node::referred_to_p (void)
413 {
414   ipa_ref *ref = NULL;
415 
416   /* See if there are any references at all.  */
417   if (iterate_referring (0, ref))
418     return true;
419   /* For functions check also calls.  */
420   cgraph_node *cn = dyn_cast <cgraph_node *> (this);
421   if (cn && cn->callers)
422     return true;
423   return false;
424 }
425 
426 /* DECL has been parsed.  Take it, queue it, compile it at the whim of the
427    logic in effect.  If NO_COLLECT is true, then our caller cannot stand to have
428    the garbage collector run at the moment.  We would need to either create
429    a new GC context, or just not compile right now.  */
430 
431 void
432 cgraph_node::finalize_function (tree decl, bool no_collect)
433 {
434   cgraph_node *node = cgraph_node::get_create (decl);
435 
436   if (node->definition)
437     {
438       /* Nested functions should only be defined once.  */
439       gcc_assert (!DECL_CONTEXT (decl)
440 		  || TREE_CODE (DECL_CONTEXT (decl)) !=	FUNCTION_DECL);
441       node->reset ();
442       node->local.redefined_extern_inline = true;
443     }
444 
445   /* Set definition first before calling notice_global_symbol so that
446      it is available to notice_global_symbol.  */
447   node->definition = true;
448   notice_global_symbol (decl);
449   node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
450 
451   /* With -fkeep-inline-functions we are keeping all inline functions except
452      for extern inline ones.  */
453   if (flag_keep_inline_functions
454       && DECL_DECLARED_INLINE_P (decl)
455       && !DECL_EXTERNAL (decl)
456       && !DECL_DISREGARD_INLINE_LIMITS (decl))
457     node->force_output = 1;
458 
459   /* When not optimizing, also output the static functions. (see
460      PR24561), but don't do so for always_inline functions, functions
461      declared inline and nested functions.  These were optimized out
462      in the original implementation and it is unclear whether we want
463      to change the behavior here.  */
464   if ((!opt_for_fn (decl, optimize)
465        && !node->cpp_implicit_alias
466        && !DECL_DISREGARD_INLINE_LIMITS (decl)
467        && !DECL_DECLARED_INLINE_P (decl)
468        && !(DECL_CONTEXT (decl)
469 	    && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL))
470       && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
471     node->force_output = 1;
472 
473   /* If we've not yet emitted decl, tell the debug info about it.  */
474   if (!TREE_ASM_WRITTEN (decl))
475     (*debug_hooks->deferred_inline_function) (decl);
476 
477   /* Possibly warn about unused parameters.  */
478   if (warn_unused_parameter)
479     do_warn_unused_parameter (decl);
480 
481   if (!no_collect)
482     ggc_collect ();
483 
484   if (symtab->state == CONSTRUCTION
485       && (node->needed_p () || node->referred_to_p ()))
486     enqueue_node (node);
487 }
488 
489 /* Add the function FNDECL to the call graph.
490    Unlike finalize_function, this function is intended to be used
491    by middle end and allows insertion of new function at arbitrary point
492    of compilation.  The function can be either in high, low or SSA form
493    GIMPLE.
494 
495    The function is assumed to be reachable and have address taken (so no
496    API breaking optimizations are performed on it).
497 
498    Main work done by this function is to enqueue the function for later
499    processing to avoid need the passes to be re-entrant.  */
500 
501 void
502 cgraph_node::add_new_function (tree fndecl, bool lowered)
503 {
504   gcc::pass_manager *passes = g->get_passes ();
505   cgraph_node *node;
506   switch (symtab->state)
507     {
508       case PARSING:
509 	cgraph_node::finalize_function (fndecl, false);
510 	break;
511       case CONSTRUCTION:
512 	/* Just enqueue function to be processed at nearest occurrence.  */
513 	node = cgraph_node::get_create (fndecl);
514 	if (lowered)
515 	  node->lowered = true;
516 	cgraph_new_nodes.safe_push (node);
517         break;
518 
519       case IPA:
520       case IPA_SSA:
521       case IPA_SSA_AFTER_INLINING:
522       case EXPANSION:
523 	/* Bring the function into finalized state and enqueue for later
524 	   analyzing and compilation.  */
525 	node = cgraph_node::get_create (fndecl);
526 	node->local.local = false;
527 	node->definition = true;
528 	node->force_output = true;
529 	if (!lowered && symtab->state == EXPANSION)
530 	  {
531 	    push_cfun (DECL_STRUCT_FUNCTION (fndecl));
532 	    gimple_register_cfg_hooks ();
533 	    bitmap_obstack_initialize (NULL);
534 	    execute_pass_list (cfun, passes->all_lowering_passes);
535 	    passes->execute_early_local_passes ();
536 	    bitmap_obstack_release (NULL);
537 	    pop_cfun ();
538 
539 	    lowered = true;
540 	  }
541 	if (lowered)
542 	  node->lowered = true;
543 	cgraph_new_nodes.safe_push (node);
544         break;
545 
546       case FINISHED:
547 	/* At the very end of compilation we have to do all the work up
548 	   to expansion.  */
549 	node = cgraph_node::create (fndecl);
550 	if (lowered)
551 	  node->lowered = true;
552 	node->definition = true;
553 	node->analyze ();
554 	push_cfun (DECL_STRUCT_FUNCTION (fndecl));
555 	gimple_register_cfg_hooks ();
556 	bitmap_obstack_initialize (NULL);
557 	if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
558 	  g->get_passes ()->execute_early_local_passes ();
559 	bitmap_obstack_release (NULL);
560 	pop_cfun ();
561 	node->expand ();
562 	break;
563 
564       default:
565 	gcc_unreachable ();
566     }
567 
568   /* Set a personality if required and we already passed EH lowering.  */
569   if (lowered
570       && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
571 	  == eh_personality_lang))
572     DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
573 }
574 
575 /* Analyze the function scheduled to be output.  */
576 void
577 cgraph_node::analyze (void)
578 {
579   tree decl = this->decl;
580   location_t saved_loc = input_location;
581   input_location = DECL_SOURCE_LOCATION (decl);
582 
583   if (thunk.thunk_p)
584     {
585       cgraph_node *t = cgraph_node::get (thunk.alias);
586 
587       create_edge (t, NULL, 0, CGRAPH_FREQ_BASE);
588       callees->can_throw_external = !TREE_NOTHROW (t->decl);
589       /* Target code in expand_thunk may need the thunk's target
590 	 to be analyzed, so recurse here.  */
591       if (!t->analyzed)
592 	t->analyze ();
593       if (t->alias)
594 	{
595 	  t = t->get_alias_target ();
596 	  if (!t->analyzed)
597 	    t->analyze ();
598 	}
599       if (!expand_thunk (false, false))
600 	{
601 	  thunk.alias = NULL;
602 	  return;
603 	}
604       thunk.alias = NULL;
605     }
606   if (alias)
607     resolve_alias (cgraph_node::get (alias_target));
608   else if (dispatcher_function)
609     {
610       /* Generate the dispatcher body of multi-versioned functions.  */
611       cgraph_function_version_info *dispatcher_version_info
612 	= function_version ();
613       if (dispatcher_version_info != NULL
614           && (dispatcher_version_info->dispatcher_resolver
615 	      == NULL_TREE))
616 	{
617 	  tree resolver = NULL_TREE;
618 	  gcc_assert (targetm.generate_version_dispatcher_body);
619 	  resolver = targetm.generate_version_dispatcher_body (this);
620 	  gcc_assert (resolver != NULL_TREE);
621 	}
622     }
623   else
624     {
625       push_cfun (DECL_STRUCT_FUNCTION (decl));
626 
627       assign_assembler_name_if_neeeded (decl);
628 
629       /* Make sure to gimplify bodies only once.  During analyzing a
630 	 function we lower it, which will require gimplified nested
631 	 functions, so we can end up here with an already gimplified
632 	 body.  */
633       if (!gimple_has_body_p (decl))
634 	gimplify_function_tree (decl);
635       dump_function (TDI_generic, decl);
636 
637       /* Lower the function.  */
638       if (!lowered)
639 	{
640 	  if (nested)
641 	    lower_nested_functions (decl);
642 	  gcc_assert (!nested);
643 
644 	  gimple_register_cfg_hooks ();
645 	  bitmap_obstack_initialize (NULL);
646 	  execute_pass_list (cfun, g->get_passes ()->all_lowering_passes);
647 	  free_dominance_info (CDI_POST_DOMINATORS);
648 	  free_dominance_info (CDI_DOMINATORS);
649 	  compact_blocks ();
650 	  bitmap_obstack_release (NULL);
651 	  lowered = true;
652 	}
653 
654       pop_cfun ();
655     }
656   analyzed = true;
657 
658   input_location = saved_loc;
659 }
660 
661 /* C++ frontend produce same body aliases all over the place, even before PCH
662    gets streamed out. It relies on us linking the aliases with their function
663    in order to do the fixups, but ipa-ref is not PCH safe.  Consequentely we
664    first produce aliases without links, but once C++ FE is sure he won't sream
665    PCH we build the links via this function.  */
666 
667 void
668 symbol_table::process_same_body_aliases (void)
669 {
670   symtab_node *node;
671   FOR_EACH_SYMBOL (node)
672     if (node->cpp_implicit_alias && !node->analyzed)
673       node->resolve_alias
674 	(TREE_CODE (node->alias_target) == VAR_DECL
675 	 ? (symtab_node *)varpool_node::get_create (node->alias_target)
676 	 : (symtab_node *)cgraph_node::get_create (node->alias_target));
677   cpp_implicit_aliases_done = true;
678 }
679 
680 /* Process attributes common for vars and functions.  */
681 
682 static void
683 process_common_attributes (symtab_node *node, tree decl)
684 {
685   tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
686 
687   if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
688     {
689       warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
690 		  "%<weakref%> attribute should be accompanied with"
691 		  " an %<alias%> attribute");
692       DECL_WEAK (decl) = 0;
693       DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
694 						 DECL_ATTRIBUTES (decl));
695     }
696 
697   if (lookup_attribute ("no_reorder", DECL_ATTRIBUTES (decl)))
698     node->no_reorder = 1;
699 }
700 
701 /* Look for externally_visible and used attributes and mark cgraph nodes
702    accordingly.
703 
704    We cannot mark the nodes at the point the attributes are processed (in
705    handle_*_attribute) because the copy of the declarations available at that
706    point may not be canonical.  For example, in:
707 
708     void f();
709     void f() __attribute__((used));
710 
711    the declaration we see in handle_used_attribute will be the second
712    declaration -- but the front end will subsequently merge that declaration
713    with the original declaration and discard the second declaration.
714 
715    Furthermore, we can't mark these nodes in finalize_function because:
716 
717     void f() {}
718     void f() __attribute__((externally_visible));
719 
720    is valid.
721 
722    So, we walk the nodes at the end of the translation unit, applying the
723    attributes at that point.  */
724 
725 static void
726 process_function_and_variable_attributes (cgraph_node *first,
727                                           varpool_node *first_var)
728 {
729   cgraph_node *node;
730   varpool_node *vnode;
731 
732   for (node = symtab->first_function (); node != first;
733        node = symtab->next_function (node))
734     {
735       tree decl = node->decl;
736       if (DECL_PRESERVE_P (decl))
737 	node->mark_force_output ();
738       else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
739 	{
740 	  if (! TREE_PUBLIC (node->decl))
741 	    warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
742 			"%<externally_visible%>"
743 			" attribute have effect only on public objects");
744 	}
745       if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
746 	  && (node->definition && !node->alias))
747 	{
748 	  warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
749 		      "%<weakref%> attribute ignored"
750 		      " because function is defined");
751 	  DECL_WEAK (decl) = 0;
752 	  DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
753 						     DECL_ATTRIBUTES (decl));
754 	}
755 
756       if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
757 	  && !DECL_DECLARED_INLINE_P (decl)
758 	  /* redefining extern inline function makes it DECL_UNINLINABLE.  */
759 	  && !DECL_UNINLINABLE (decl))
760 	warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
761 		    "always_inline function might not be inlinable");
762 
763       process_common_attributes (node, decl);
764     }
765   for (vnode = symtab->first_variable (); vnode != first_var;
766        vnode = symtab->next_variable (vnode))
767     {
768       tree decl = vnode->decl;
769       if (DECL_EXTERNAL (decl)
770 	  && DECL_INITIAL (decl))
771 	varpool_node::finalize_decl (decl);
772       if (DECL_PRESERVE_P (decl))
773 	vnode->force_output = true;
774       else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
775 	{
776 	  if (! TREE_PUBLIC (vnode->decl))
777 	    warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
778 			"%<externally_visible%>"
779 			" attribute have effect only on public objects");
780 	}
781       if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
782 	  && vnode->definition
783 	  && DECL_INITIAL (decl))
784 	{
785 	  warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
786 		      "%<weakref%> attribute ignored"
787 		      " because variable is initialized");
788 	  DECL_WEAK (decl) = 0;
789 	  DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
790 						      DECL_ATTRIBUTES (decl));
791 	}
792       process_common_attributes (vnode, decl);
793     }
794 }
795 
796 /* Mark DECL as finalized.  By finalizing the declaration, frontend instruct the
797    middle end to output the variable to asm file, if needed or externally
798    visible.  */
799 
800 void
801 varpool_node::finalize_decl (tree decl)
802 {
803   varpool_node *node = varpool_node::get_create (decl);
804 
805   gcc_assert (TREE_STATIC (decl) || DECL_EXTERNAL (decl));
806 
807   if (node->definition)
808     return;
809   /* Set definition first before calling notice_global_symbol so that
810      it is available to notice_global_symbol.  */
811   node->definition = true;
812   notice_global_symbol (decl);
813   if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl)
814       /* Traditionally we do not eliminate static variables when not
815 	 optimizing and when not doing toplevel reoder.  */
816       || node->no_reorder
817       || ((!flag_toplevel_reorder
818           && !DECL_COMDAT (node->decl)
819 	   && !DECL_ARTIFICIAL (node->decl))))
820     node->force_output = true;
821 
822   if (symtab->state == CONSTRUCTION
823       && (node->needed_p () || node->referred_to_p ()))
824     enqueue_node (node);
825   if (symtab->state >= IPA_SSA)
826     node->analyze ();
827   /* Some frontends produce various interface variables after compilation
828      finished.  */
829   if (symtab->state == FINISHED
830       || (!flag_toplevel_reorder
831 	&& symtab->state == EXPANSION))
832     node->assemble_decl ();
833 
834   if (DECL_INITIAL (decl))
835     chkp_register_var_initializer (decl);
836 }
837 
838 /* EDGE is an polymorphic call.  Mark all possible targets as reachable
839    and if there is only one target, perform trivial devirtualization.
840    REACHABLE_CALL_TARGETS collects target lists we already walked to
841    avoid udplicate work.  */
842 
843 static void
844 walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
845 			       cgraph_edge *edge)
846 {
847   unsigned int i;
848   void *cache_token;
849   bool final;
850   vec <cgraph_node *>targets
851     = possible_polymorphic_call_targets
852 	(edge, &final, &cache_token);
853 
854   if (!reachable_call_targets->add (cache_token))
855     {
856       if (symtab->dump_file)
857 	dump_possible_polymorphic_call_targets
858 	  (symtab->dump_file, edge);
859 
860       for (i = 0; i < targets.length (); i++)
861 	{
862 	  /* Do not bother to mark virtual methods in anonymous namespace;
863 	     either we will find use of virtual table defining it, or it is
864 	     unused.  */
865 	  if (targets[i]->definition
866 	      && TREE_CODE
867 		  (TREE_TYPE (targets[i]->decl))
868 		   == METHOD_TYPE
869 	      && !type_in_anonymous_namespace_p
870 		   (method_class_type
871 		     (TREE_TYPE (targets[i]->decl))))
872 	  enqueue_node (targets[i]);
873 	}
874     }
875 
876   /* Very trivial devirtualization; when the type is
877      final or anonymous (so we know all its derivation)
878      and there is only one possible virtual call target,
879      make the edge direct.  */
880   if (final)
881     {
882       if (targets.length () <= 1 && dbg_cnt (devirt))
883 	{
884 	  cgraph_node *target;
885 	  if (targets.length () == 1)
886 	    target = targets[0];
887 	  else
888 	    target = cgraph_node::create
889 			(builtin_decl_implicit (BUILT_IN_UNREACHABLE));
890 
891 	  if (symtab->dump_file)
892 	    {
893 	      fprintf (symtab->dump_file,
894 		       "Devirtualizing call: ");
895 	      print_gimple_stmt (symtab->dump_file,
896 				 edge->call_stmt, 0,
897 				 TDF_SLIM);
898 	    }
899           if (dump_enabled_p ())
900             {
901 	      location_t locus = gimple_location_safe (edge->call_stmt);
902 	      dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
903 			       "devirtualizing call in %s to %s\n",
904 			       edge->caller->name (), target->name ());
905 	    }
906 
907 	  edge->make_direct (target);
908 	  edge->redirect_call_stmt_to_callee ();
909 
910 	  /* Call to __builtin_unreachable shouldn't be instrumented.  */
911 	  if (!targets.length ())
912 	    gimple_call_set_with_bounds (edge->call_stmt, false);
913 
914 	  if (symtab->dump_file)
915 	    {
916 	      fprintf (symtab->dump_file,
917 		       "Devirtualized as: ");
918 	      print_gimple_stmt (symtab->dump_file,
919 				 edge->call_stmt, 0,
920 				 TDF_SLIM);
921 	    }
922 	}
923     }
924 }
925 
926 
927 /* Discover all functions and variables that are trivially needed, analyze
928    them as well as all functions and variables referred by them  */
929 static cgraph_node *first_analyzed;
930 static varpool_node *first_analyzed_var;
931 
932 static void
933 analyze_functions (void)
934 {
935   /* Keep track of already processed nodes when called multiple times for
936      intermodule optimization.  */
937   cgraph_node *first_handled = first_analyzed;
938   varpool_node *first_handled_var = first_analyzed_var;
939   hash_set<void *> reachable_call_targets;
940 
941   symtab_node *node;
942   symtab_node *next;
943   int i;
944   ipa_ref *ref;
945   bool changed = true;
946   location_t saved_loc = input_location;
947 
948   bitmap_obstack_initialize (NULL);
949   symtab->state = CONSTRUCTION;
950   input_location = UNKNOWN_LOCATION;
951 
952   /* Ugly, but the fixup can not happen at a time same body alias is created;
953      C++ FE is confused about the COMDAT groups being right.  */
954   if (symtab->cpp_implicit_aliases_done)
955     FOR_EACH_SYMBOL (node)
956       if (node->cpp_implicit_alias)
957 	  node->fixup_same_cpp_alias_visibility (node->get_alias_target ());
958   build_type_inheritance_graph ();
959 
960   /* Analysis adds static variables that in turn adds references to new functions.
961      So we need to iterate the process until it stabilize.  */
962   while (changed)
963     {
964       changed = false;
965       process_function_and_variable_attributes (first_analyzed,
966 						first_analyzed_var);
967 
968       /* First identify the trivially needed symbols.  */
969       for (node = symtab->first_symbol ();
970 	   node != first_analyzed
971 	   && node != first_analyzed_var; node = node->next)
972 	{
973 	  /* Convert COMDAT group designators to IDENTIFIER_NODEs.  */
974 	  node->get_comdat_group_id ();
975 	  if (node->needed_p ())
976 	    {
977 	      enqueue_node (node);
978 	      if (!changed && symtab->dump_file)
979 		fprintf (symtab->dump_file, "Trivially needed symbols:");
980 	      changed = true;
981 	      if (symtab->dump_file)
982 		fprintf (symtab->dump_file, " %s", node->asm_name ());
983 	      if (!changed && symtab->dump_file)
984 		fprintf (symtab->dump_file, "\n");
985 	    }
986 	  if (node == first_analyzed
987 	      || node == first_analyzed_var)
988 	    break;
989 	}
990       symtab->process_new_functions ();
991       first_analyzed_var = symtab->first_variable ();
992       first_analyzed = symtab->first_function ();
993 
994       if (changed && symtab->dump_file)
995 	fprintf (symtab->dump_file, "\n");
996 
997       /* Lower representation, build callgraph edges and references for all trivially
998          needed symbols and all symbols referred by them.  */
999       while (queued_nodes != &symtab_terminator)
1000 	{
1001 	  changed = true;
1002 	  node = queued_nodes;
1003 	  queued_nodes = (symtab_node *)queued_nodes->aux;
1004 	  cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1005 	  if (cnode && cnode->definition)
1006 	    {
1007 	      cgraph_edge *edge;
1008 	      tree decl = cnode->decl;
1009 
1010 	      /* ??? It is possible to create extern inline function
1011 	      and later using weak alias attribute to kill its body.
1012 	      See gcc.c-torture/compile/20011119-1.c  */
1013 	      if (!DECL_STRUCT_FUNCTION (decl)
1014 		  && !cnode->alias
1015 		  && !cnode->thunk.thunk_p
1016 		  && !cnode->dispatcher_function)
1017 		{
1018 		  cnode->reset ();
1019 		  cnode->local.redefined_extern_inline = true;
1020 		  continue;
1021 		}
1022 
1023 	      if (!cnode->analyzed)
1024 		cnode->analyze ();
1025 
1026 	      for (edge = cnode->callees; edge; edge = edge->next_callee)
1027 		if (edge->callee->definition
1028 		    && (!DECL_EXTERNAL (edge->callee->decl)
1029 			/* When not optimizing, do not try to analyze extern
1030 			   inline functions.  Doing so is pointless.  */
1031 			|| opt_for_fn (edge->callee->decl, optimize)
1032 			/* Weakrefs needs to be preserved.  */
1033 			|| edge->callee->alias
1034 			/* always_inline functions are inlined aven at -O0.  */
1035 		        || lookup_attribute
1036 				 ("always_inline",
1037 			          DECL_ATTRIBUTES (edge->callee->decl))
1038 			/* Multiversioned functions needs the dispatcher to
1039 			   be produced locally even for extern functions.  */
1040 			|| edge->callee->function_version ()))
1041 		   enqueue_node (edge->callee);
1042 	      if (opt_for_fn (cnode->decl, optimize)
1043 		  && opt_for_fn (cnode->decl, flag_devirtualize))
1044 		{
1045 		  cgraph_edge *next;
1046 
1047 		  for (edge = cnode->indirect_calls; edge; edge = next)
1048 		    {
1049 		      next = edge->next_callee;
1050 		      if (edge->indirect_info->polymorphic)
1051 			walk_polymorphic_call_targets (&reachable_call_targets,
1052 						       edge);
1053 		    }
1054 		}
1055 
1056 	      /* If decl is a clone of an abstract function,
1057 	      mark that abstract function so that we don't release its body.
1058 	      The DECL_INITIAL() of that abstract function declaration
1059 	      will be later needed to output debug info.  */
1060 	      if (DECL_ABSTRACT_ORIGIN (decl))
1061 		{
1062 		  cgraph_node *origin_node
1063 		    = cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (decl));
1064 		  origin_node->used_as_abstract_origin = true;
1065 		}
1066 	    }
1067 	  else
1068 	    {
1069 	      varpool_node *vnode = dyn_cast <varpool_node *> (node);
1070 	      if (vnode && vnode->definition && !vnode->analyzed)
1071 		vnode->analyze ();
1072 	    }
1073 
1074 	  if (node->same_comdat_group)
1075 	    {
1076 	      symtab_node *next;
1077 	      for (next = node->same_comdat_group;
1078 		   next != node;
1079 		   next = next->same_comdat_group)
1080 		if (!next->comdat_local_p ())
1081 		  enqueue_node (next);
1082 	    }
1083 	  for (i = 0; node->iterate_reference (i, ref); i++)
1084 	    if (ref->referred->definition
1085 		&& (!DECL_EXTERNAL (ref->referred->decl)
1086 		    || ((TREE_CODE (ref->referred->decl) != FUNCTION_DECL
1087 			 && optimize)
1088 			|| (TREE_CODE (ref->referred->decl) == FUNCTION_DECL
1089 			    && opt_for_fn (ref->referred->decl, optimize))
1090 		    || node->alias
1091 		    || ref->referred->alias)))
1092 	      enqueue_node (ref->referred);
1093 	  symtab->process_new_functions ();
1094 	}
1095     }
1096   update_type_inheritance_graph ();
1097 
1098   /* Collect entry points to the unit.  */
1099   if (symtab->dump_file)
1100     {
1101       fprintf (symtab->dump_file, "\n\nInitial ");
1102       symtab_node::dump_table (symtab->dump_file);
1103     }
1104 
1105   if (symtab->dump_file)
1106     fprintf (symtab->dump_file, "\nRemoving unused symbols:");
1107 
1108   for (node = symtab->first_symbol ();
1109        node != first_handled
1110        && node != first_handled_var; node = next)
1111     {
1112       next = node->next;
1113       if (!node->aux && !node->referred_to_p ())
1114 	{
1115 	  if (symtab->dump_file)
1116 	    fprintf (symtab->dump_file, " %s", node->name ());
1117 	  node->remove ();
1118 	  continue;
1119 	}
1120       if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1121 	{
1122 	  tree decl = node->decl;
1123 
1124 	  if (cnode->definition && !gimple_has_body_p (decl)
1125 	      && !cnode->alias
1126 	      && !cnode->thunk.thunk_p)
1127 	    cnode->reset ();
1128 
1129 	  gcc_assert (!cnode->definition || cnode->thunk.thunk_p
1130 		      || cnode->alias
1131 		      || gimple_has_body_p (decl));
1132 	  gcc_assert (cnode->analyzed == cnode->definition);
1133 	}
1134       node->aux = NULL;
1135     }
1136   for (;node; node = node->next)
1137     node->aux = NULL;
1138   first_analyzed = symtab->first_function ();
1139   first_analyzed_var = symtab->first_variable ();
1140   if (symtab->dump_file)
1141     {
1142       fprintf (symtab->dump_file, "\n\nReclaimed ");
1143       symtab_node::dump_table (symtab->dump_file);
1144     }
1145   bitmap_obstack_release (NULL);
1146   ggc_collect ();
1147   /* Initialize assembler name hash, in particular we want to trigger C++
1148      mangling and same body alias creation before we free DECL_ARGUMENTS
1149      used by it.  */
1150   if (!seen_error ())
1151     symtab->symtab_initialize_asm_name_hash ();
1152 
1153   input_location = saved_loc;
1154 }
1155 
1156 /* Translate the ugly representation of aliases as alias pairs into nice
1157    representation in callgraph.  We don't handle all cases yet,
1158    unfortunately.  */
1159 
1160 static void
1161 handle_alias_pairs (void)
1162 {
1163   alias_pair *p;
1164   unsigned i;
1165 
1166   for (i = 0; alias_pairs && alias_pairs->iterate (i, &p);)
1167     {
1168       symtab_node *target_node = symtab_node::get_for_asmname (p->target);
1169 
1170       /* Weakrefs with target not defined in current unit are easy to handle:
1171 	 they behave just as external variables except we need to note the
1172 	 alias flag to later output the weakref pseudo op into asm file.  */
1173       if (!target_node
1174 	  && lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL)
1175 	{
1176 	  symtab_node *node = symtab_node::get (p->decl);
1177 	  if (node)
1178 	    {
1179 	      node->alias_target = p->target;
1180 	      node->weakref = true;
1181 	      node->alias = true;
1182 	    }
1183 	  alias_pairs->unordered_remove (i);
1184 	  continue;
1185 	}
1186       else if (!target_node)
1187 	{
1188 	  error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
1189 	  symtab_node *node = symtab_node::get (p->decl);
1190 	  if (node)
1191 	    node->alias = false;
1192 	  alias_pairs->unordered_remove (i);
1193 	  continue;
1194 	}
1195 
1196       if (DECL_EXTERNAL (target_node->decl)
1197 	  /* We use local aliases for C++ thunks to force the tailcall
1198 	     to bind locally.  This is a hack - to keep it working do
1199 	     the following (which is not strictly correct).  */
1200 	  && (TREE_CODE (target_node->decl) != FUNCTION_DECL
1201 	      || ! DECL_VIRTUAL_P (target_node->decl))
1202 	  && ! lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)))
1203 	{
1204 	  error ("%q+D aliased to external symbol %qE",
1205 		 p->decl, p->target);
1206 	}
1207 
1208       if (TREE_CODE (p->decl) == FUNCTION_DECL
1209           && target_node && is_a <cgraph_node *> (target_node))
1210 	{
1211 	  cgraph_node *src_node = cgraph_node::get (p->decl);
1212 	  if (src_node && src_node->definition)
1213 	    src_node->reset ();
1214 	  cgraph_node::create_alias (p->decl, target_node->decl);
1215 	  alias_pairs->unordered_remove (i);
1216 	}
1217       else if (TREE_CODE (p->decl) == VAR_DECL
1218 	       && target_node && is_a <varpool_node *> (target_node))
1219 	{
1220 	  varpool_node::create_alias (p->decl, target_node->decl);
1221 	  alias_pairs->unordered_remove (i);
1222 	}
1223       else
1224 	{
1225 	  error ("%q+D alias in between function and variable is not supported",
1226 		 p->decl);
1227 	  warning (0, "%q+D aliased declaration",
1228 		   target_node->decl);
1229 	  alias_pairs->unordered_remove (i);
1230 	}
1231     }
1232   vec_free (alias_pairs);
1233 }
1234 
1235 
1236 /* Figure out what functions we want to assemble.  */
1237 
1238 static void
1239 mark_functions_to_output (void)
1240 {
1241   cgraph_node *node;
1242 #ifdef ENABLE_CHECKING
1243   bool check_same_comdat_groups = false;
1244 
1245   FOR_EACH_FUNCTION (node)
1246     gcc_assert (!node->process);
1247 #endif
1248 
1249   FOR_EACH_FUNCTION (node)
1250     {
1251       tree decl = node->decl;
1252 
1253       gcc_assert (!node->process || node->same_comdat_group);
1254       if (node->process)
1255 	continue;
1256 
1257       /* We need to output all local functions that are used and not
1258 	 always inlined, as well as those that are reachable from
1259 	 outside the current compilation unit.  */
1260       if (node->analyzed
1261 	  && !node->thunk.thunk_p
1262 	  && !node->alias
1263 	  && !node->global.inlined_to
1264 	  && !TREE_ASM_WRITTEN (decl)
1265 	  && !DECL_EXTERNAL (decl))
1266 	{
1267 	  node->process = 1;
1268 	  if (node->same_comdat_group)
1269 	    {
1270 	      cgraph_node *next;
1271 	      for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
1272 		   next != node;
1273 		   next = dyn_cast<cgraph_node *> (next->same_comdat_group))
1274 		if (!next->thunk.thunk_p && !next->alias
1275 		    && !next->comdat_local_p ())
1276 		  next->process = 1;
1277 	    }
1278 	}
1279       else if (node->same_comdat_group)
1280 	{
1281 #ifdef ENABLE_CHECKING
1282 	  check_same_comdat_groups = true;
1283 #endif
1284 	}
1285       else
1286 	{
1287 	  /* We should've reclaimed all functions that are not needed.  */
1288 #ifdef ENABLE_CHECKING
1289 	  if (!node->global.inlined_to
1290 	      && gimple_has_body_p (decl)
1291 	      /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1292 		 are inside partition, we can end up not removing the body since we no longer
1293 		 have analyzed node pointing to it.  */
1294 	      && !node->in_other_partition
1295 	      && !node->alias
1296 	      && !node->clones
1297 	      && !DECL_EXTERNAL (decl))
1298 	    {
1299 	      node->debug ();
1300 	      internal_error ("failed to reclaim unneeded function");
1301 	    }
1302 #endif
1303 	  gcc_assert (node->global.inlined_to
1304 		      || !gimple_has_body_p (decl)
1305 		      || node->in_other_partition
1306 		      || node->clones
1307 		      || DECL_ARTIFICIAL (decl)
1308 		      || DECL_EXTERNAL (decl));
1309 
1310 	}
1311 
1312     }
1313 #ifdef ENABLE_CHECKING
1314   if (check_same_comdat_groups)
1315     FOR_EACH_FUNCTION (node)
1316       if (node->same_comdat_group && !node->process)
1317 	{
1318 	  tree decl = node->decl;
1319 	  if (!node->global.inlined_to
1320 	      && gimple_has_body_p (decl)
1321 	      /* FIXME: in an ltrans unit when the offline copy is outside a
1322 		 partition but inline copies are inside a partition, we can
1323 		 end up not removing the body since we no longer have an
1324 		 analyzed node pointing to it.  */
1325 	      && !node->in_other_partition
1326 	      && !node->clones
1327 	      && !DECL_EXTERNAL (decl))
1328 	    {
1329 	      node->debug ();
1330 	      internal_error ("failed to reclaim unneeded function in same "
1331 			      "comdat group");
1332 	    }
1333 	}
1334 #endif
1335 }
1336 
1337 /* DECL is FUNCTION_DECL.  Initialize datastructures so DECL is a function
1338    in lowered gimple form.  IN_SSA is true if the gimple is in SSA.
1339 
1340    Set current_function_decl and cfun to newly constructed empty function body.
1341    return basic block in the function body.  */
1342 
1343 basic_block
1344 init_lowered_empty_function (tree decl, bool in_ssa, gcov_type count)
1345 {
1346   basic_block bb;
1347   edge e;
1348 
1349   current_function_decl = decl;
1350   allocate_struct_function (decl, false);
1351   gimple_register_cfg_hooks ();
1352   init_empty_tree_cfg ();
1353 
1354   if (in_ssa)
1355     {
1356       init_tree_ssa (cfun);
1357       init_ssa_operands (cfun);
1358       cfun->gimple_df->in_ssa_p = true;
1359       cfun->curr_properties |= PROP_ssa;
1360     }
1361 
1362   DECL_INITIAL (decl) = make_node (BLOCK);
1363 
1364   DECL_SAVED_TREE (decl) = error_mark_node;
1365   cfun->curr_properties |= (PROP_gimple_lcf | PROP_gimple_leh | PROP_gimple_any
1366 			    | PROP_cfg | PROP_loops);
1367 
1368   set_loops_for_fn (cfun, ggc_cleared_alloc<loops> ());
1369   init_loops_structure (cfun, loops_for_fn (cfun), 1);
1370   loops_for_fn (cfun)->state |= LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
1371 
1372   /* Create BB for body of the function and connect it properly.  */
1373   ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = count;
1374   ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency = REG_BR_PROB_BASE;
1375   EXIT_BLOCK_PTR_FOR_FN (cfun)->count = count;
1376   EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency = REG_BR_PROB_BASE;
1377   bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR_FOR_FN (cfun));
1378   bb->count = count;
1379   bb->frequency = BB_FREQ_MAX;
1380   e = make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FALLTHRU);
1381   e->count = count;
1382   e->probability = REG_BR_PROB_BASE;
1383   e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1384   e->count = count;
1385   e->probability = REG_BR_PROB_BASE;
1386   add_bb_to_loop (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
1387 
1388   return bb;
1389 }
1390 
1391 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1392    offset indicated by VIRTUAL_OFFSET, if that is
1393    non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1394    zero for a result adjusting thunk.  */
1395 
1396 static tree
1397 thunk_adjust (gimple_stmt_iterator * bsi,
1398 	      tree ptr, bool this_adjusting,
1399 	      HOST_WIDE_INT fixed_offset, tree virtual_offset)
1400 {
1401   gassign *stmt;
1402   tree ret;
1403 
1404   if (this_adjusting
1405       && fixed_offset != 0)
1406     {
1407       stmt = gimple_build_assign
1408 		(ptr, fold_build_pointer_plus_hwi_loc (input_location,
1409 						       ptr,
1410 						       fixed_offset));
1411       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1412     }
1413 
1414   /* If there's a virtual offset, look up that value in the vtable and
1415      adjust the pointer again.  */
1416   if (virtual_offset)
1417     {
1418       tree vtabletmp;
1419       tree vtabletmp2;
1420       tree vtabletmp3;
1421 
1422       if (!vtable_entry_type)
1423 	{
1424 	  tree vfunc_type = make_node (FUNCTION_TYPE);
1425 	  TREE_TYPE (vfunc_type) = integer_type_node;
1426 	  TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1427 	  layout_type (vfunc_type);
1428 
1429 	  vtable_entry_type = build_pointer_type (vfunc_type);
1430 	}
1431 
1432       vtabletmp =
1433 	create_tmp_reg (build_pointer_type
1434 			  (build_pointer_type (vtable_entry_type)), "vptr");
1435 
1436       /* The vptr is always at offset zero in the object.  */
1437       stmt = gimple_build_assign (vtabletmp,
1438 				  build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1439 					  ptr));
1440       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1441 
1442       /* Form the vtable address.  */
1443       vtabletmp2 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp)),
1444 				     "vtableaddr");
1445       stmt = gimple_build_assign (vtabletmp2,
1446 				  build_simple_mem_ref (vtabletmp));
1447       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1448 
1449       /* Find the entry with the vcall offset.  */
1450       stmt = gimple_build_assign (vtabletmp2,
1451 				  fold_build_pointer_plus_loc (input_location,
1452 							       vtabletmp2,
1453 							       virtual_offset));
1454       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1455 
1456       /* Get the offset itself.  */
1457       vtabletmp3 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1458 				     "vcalloffset");
1459       stmt = gimple_build_assign (vtabletmp3,
1460 				  build_simple_mem_ref (vtabletmp2));
1461       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1462 
1463       /* Adjust the `this' pointer.  */
1464       ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
1465       ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
1466 				      GSI_CONTINUE_LINKING);
1467     }
1468 
1469   if (!this_adjusting
1470       && fixed_offset != 0)
1471     /* Adjust the pointer by the constant.  */
1472     {
1473       tree ptrtmp;
1474 
1475       if (TREE_CODE (ptr) == VAR_DECL)
1476         ptrtmp = ptr;
1477       else
1478         {
1479           ptrtmp = create_tmp_reg (TREE_TYPE (ptr), "ptr");
1480           stmt = gimple_build_assign (ptrtmp, ptr);
1481 	  gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1482 	}
1483       ptr = fold_build_pointer_plus_hwi_loc (input_location,
1484 					     ptrtmp, fixed_offset);
1485     }
1486 
1487   /* Emit the statement and gimplify the adjustment expression.  */
1488   ret = create_tmp_reg (TREE_TYPE (ptr), "adjusted_this");
1489   stmt = gimple_build_assign (ret, ptr);
1490   gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1491 
1492   return ret;
1493 }
1494 
1495 /* Expand thunk NODE to gimple if possible.
1496    When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
1497    no assembler is produced.
1498    When OUTPUT_ASM_THUNK is true, also produce assembler for
1499    thunks that are not lowered.  */
1500 
1501 bool
1502 cgraph_node::expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)
1503 {
1504   bool this_adjusting = thunk.this_adjusting;
1505   HOST_WIDE_INT fixed_offset = thunk.fixed_offset;
1506   HOST_WIDE_INT virtual_value = thunk.virtual_value;
1507   tree virtual_offset = NULL;
1508   tree alias = callees->callee->decl;
1509   tree thunk_fndecl = decl;
1510   tree a;
1511 
1512   /* Instrumentation thunk is the same function with
1513      a different signature.  Never need to expand it.  */
1514   if (thunk.add_pointer_bounds_args)
1515     return false;
1516 
1517   if (!force_gimple_thunk && this_adjusting
1518       && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1519 					      virtual_value, alias))
1520     {
1521       const char *fnname;
1522       tree fn_block;
1523       tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1524 
1525       if (!output_asm_thunks)
1526 	{
1527 	  analyzed = true;
1528 	  return false;
1529 	}
1530 
1531       if (in_lto_p)
1532 	get_untransformed_body ();
1533       a = DECL_ARGUMENTS (thunk_fndecl);
1534 
1535       current_function_decl = thunk_fndecl;
1536 
1537       /* Ensure thunks are emitted in their correct sections.  */
1538       resolve_unique_section (thunk_fndecl, 0,
1539 			      flag_function_sections);
1540 
1541       DECL_RESULT (thunk_fndecl)
1542 	= build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1543 		      RESULT_DECL, 0, restype);
1544       DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1545       fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1546 
1547       /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1548 	 create one.  */
1549       fn_block = make_node (BLOCK);
1550       BLOCK_VARS (fn_block) = a;
1551       DECL_INITIAL (thunk_fndecl) = fn_block;
1552       init_function_start (thunk_fndecl);
1553       cfun->is_thunk = 1;
1554       insn_locations_init ();
1555       set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl));
1556       prologue_location = curr_insn_location ();
1557       assemble_start_function (thunk_fndecl, fnname);
1558 
1559       targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1560 				       fixed_offset, virtual_value, alias);
1561 
1562       assemble_end_function (thunk_fndecl, fnname);
1563       insn_locations_finalize ();
1564       init_insn_lengths ();
1565       free_after_compilation (cfun);
1566       set_cfun (NULL);
1567       TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1568       thunk.thunk_p = false;
1569       analyzed = false;
1570     }
1571   else if (stdarg_p (TREE_TYPE (thunk_fndecl)))
1572     {
1573       error ("generic thunk code fails for method %qD which uses %<...%>",
1574 	     thunk_fndecl);
1575       TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1576       analyzed = true;
1577       return false;
1578     }
1579   else
1580     {
1581       tree restype;
1582       basic_block bb, then_bb, else_bb, return_bb;
1583       gimple_stmt_iterator bsi;
1584       int nargs = 0;
1585       tree arg;
1586       int i;
1587       tree resdecl;
1588       tree restmp = NULL;
1589       tree resbnd = NULL;
1590 
1591       gcall *call;
1592       greturn *ret;
1593       bool alias_is_noreturn = TREE_THIS_VOLATILE (alias);
1594 
1595       if (in_lto_p)
1596 	get_untransformed_body ();
1597       a = DECL_ARGUMENTS (thunk_fndecl);
1598 
1599       current_function_decl = thunk_fndecl;
1600 
1601       /* Ensure thunks are emitted in their correct sections.  */
1602       resolve_unique_section (thunk_fndecl, 0,
1603 			      flag_function_sections);
1604 
1605       DECL_IGNORED_P (thunk_fndecl) = 1;
1606       bitmap_obstack_initialize (NULL);
1607 
1608       if (thunk.virtual_offset_p)
1609         virtual_offset = size_int (virtual_value);
1610 
1611       /* Build the return declaration for the function.  */
1612       restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1613       if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1614 	{
1615 	  resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1616 	  DECL_ARTIFICIAL (resdecl) = 1;
1617 	  DECL_IGNORED_P (resdecl) = 1;
1618 	  DECL_RESULT (thunk_fndecl) = resdecl;
1619           DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
1620 	}
1621       else
1622 	resdecl = DECL_RESULT (thunk_fndecl);
1623 
1624       bb = then_bb = else_bb = return_bb
1625 	= init_lowered_empty_function (thunk_fndecl, true, count);
1626 
1627       bsi = gsi_start_bb (bb);
1628 
1629       /* Build call to the function being thunked.  */
1630       if (!VOID_TYPE_P (restype) && !alias_is_noreturn)
1631 	{
1632 	  if (DECL_BY_REFERENCE (resdecl))
1633 	    {
1634 	      restmp = gimple_fold_indirect_ref (resdecl);
1635 	      if (!restmp)
1636 		restmp = build2 (MEM_REF,
1637 				 TREE_TYPE (TREE_TYPE (DECL_RESULT (alias))),
1638 				 resdecl,
1639 				 build_int_cst (TREE_TYPE
1640 				   (DECL_RESULT (alias)), 0));
1641 	    }
1642 	  else if (!is_gimple_reg_type (restype))
1643 	    {
1644 	      if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl)))
1645 		{
1646 		  restmp = resdecl;
1647 
1648 		  if (TREE_CODE (restmp) == VAR_DECL)
1649 		    add_local_decl (cfun, restmp);
1650 		  BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1651 		}
1652 	      else
1653 		restmp = create_tmp_var (restype, "retval");
1654 	    }
1655 	  else
1656 	    restmp = create_tmp_reg (restype, "retval");
1657 	}
1658 
1659       for (arg = a; arg; arg = DECL_CHAIN (arg))
1660         nargs++;
1661       auto_vec<tree> vargs (nargs);
1662       i = 0;
1663       arg = a;
1664       if (this_adjusting)
1665 	{
1666 	  vargs.quick_push (thunk_adjust (&bsi, a, 1, fixed_offset,
1667 					  virtual_offset));
1668 	  arg = DECL_CHAIN (a);
1669 	  i = 1;
1670 	}
1671 
1672       if (nargs)
1673 	for (; i < nargs; i++, arg = DECL_CHAIN (arg))
1674 	  {
1675 	    tree tmp = arg;
1676 	    if (VECTOR_TYPE_P (TREE_TYPE (arg))
1677 		|| TREE_CODE (TREE_TYPE (arg)) == COMPLEX_TYPE)
1678 	      DECL_GIMPLE_REG_P (arg) = 1;
1679 
1680 	    if (!is_gimple_val (arg))
1681 	      {
1682 		tmp = create_tmp_reg (TYPE_MAIN_VARIANT
1683 				      (TREE_TYPE (arg)), "arg");
1684 		gimple stmt = gimple_build_assign (tmp, arg);
1685 		gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1686 	      }
1687 	    vargs.quick_push (tmp);
1688 	  }
1689       call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1690       callees->call_stmt = call;
1691       gimple_call_set_from_thunk (call, true);
1692       gimple_call_set_with_bounds (call, instrumentation_clone);
1693 
1694       /* Return slot optimization is always possible and in fact requred to
1695          return values with DECL_BY_REFERENCE.  */
1696       if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl))
1697 	  && (!is_gimple_reg_type (TREE_TYPE (resdecl))
1698 	      || DECL_BY_REFERENCE (resdecl)))
1699         gimple_call_set_return_slot_opt (call, true);
1700 
1701       if (restmp && !alias_is_noreturn)
1702 	{
1703           gimple_call_set_lhs (call, restmp);
1704 	  gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp),
1705 						 TREE_TYPE (TREE_TYPE (alias))));
1706 	}
1707       gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1708       if (!alias_is_noreturn)
1709 	{
1710 	  if (instrumentation_clone
1711 	      && !DECL_BY_REFERENCE (resdecl)
1712 	      && restmp
1713 	      && BOUNDED_P (restmp))
1714 	    {
1715 	      resbnd = chkp_insert_retbnd_call (NULL, restmp, &bsi);
1716 	      create_edge (get_create (gimple_call_fndecl (gsi_stmt (bsi))),
1717 			   as_a <gcall *> (gsi_stmt (bsi)),
1718 			   callees->count, callees->frequency);
1719 	    }
1720 
1721 	  if (restmp && !this_adjusting
1722 	      && (fixed_offset || virtual_offset))
1723 	    {
1724 	      tree true_label = NULL_TREE;
1725 
1726 	      if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1727 		{
1728 		  gimple stmt;
1729 		  edge e;
1730 		  /* If the return type is a pointer, we need to
1731 		     protect against NULL.  We know there will be an
1732 		     adjustment, because that's why we're emitting a
1733 		     thunk.  */
1734 		  then_bb = create_basic_block (NULL, (void *) 0, bb);
1735 		  then_bb->count = count - count / 16;
1736 		  then_bb->frequency = BB_FREQ_MAX - BB_FREQ_MAX / 16;
1737 		  return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1738 		  return_bb->count = count;
1739 		  return_bb->frequency = BB_FREQ_MAX;
1740 		  else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1741 		  then_bb->count = count / 16;
1742 		  then_bb->frequency = BB_FREQ_MAX / 16;
1743 		  add_bb_to_loop (then_bb, bb->loop_father);
1744 		  add_bb_to_loop (return_bb, bb->loop_father);
1745 		  add_bb_to_loop (else_bb, bb->loop_father);
1746 		  remove_edge (single_succ_edge (bb));
1747 		  true_label = gimple_block_label (then_bb);
1748 		  stmt = gimple_build_cond (NE_EXPR, restmp,
1749 					    build_zero_cst (TREE_TYPE (restmp)),
1750 					    NULL_TREE, NULL_TREE);
1751 		  gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1752 		  e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1753 		  e->probability = REG_BR_PROB_BASE - REG_BR_PROB_BASE / 16;
1754 		  e->count = count - count / 16;
1755 		  e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1756 		  e->probability = REG_BR_PROB_BASE / 16;
1757 		  e->count = count / 16;
1758 		  e = make_edge (return_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
1759 		  e->probability = REG_BR_PROB_BASE;
1760 		  e->count = count;
1761 		  e = make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1762 		  e->probability = REG_BR_PROB_BASE;
1763 		  e->count = count - count / 16;
1764 		  e = make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1765 		  e->probability = REG_BR_PROB_BASE;
1766 		  e->count = count / 16;
1767 		  bsi = gsi_last_bb (then_bb);
1768 		}
1769 
1770 	      restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1771 				     fixed_offset, virtual_offset);
1772 	      if (true_label)
1773 		{
1774 		  gimple stmt;
1775 		  bsi = gsi_last_bb (else_bb);
1776 		  stmt = gimple_build_assign (restmp,
1777 					      build_zero_cst (TREE_TYPE (restmp)));
1778 		  gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1779 		  bsi = gsi_last_bb (return_bb);
1780 		}
1781 	    }
1782 	  else
1783 	    gimple_call_set_tail (call, true);
1784 
1785 	  /* Build return value.  */
1786 	  if (!DECL_BY_REFERENCE (resdecl))
1787 	    ret = gimple_build_return (restmp);
1788 	  else
1789 	    ret = gimple_build_return (resdecl);
1790 	  gimple_return_set_retbnd (ret, resbnd);
1791 
1792 	  gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1793 	}
1794       else
1795 	{
1796 	  gimple_call_set_tail (call, true);
1797 	  remove_edge (single_succ_edge (bb));
1798 	}
1799 
1800       cfun->gimple_df->in_ssa_p = true;
1801       profile_status_for_fn (cfun)
1802         = count ? PROFILE_READ : PROFILE_GUESSED;
1803       /* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks.  */
1804       TREE_ASM_WRITTEN (thunk_fndecl) = false;
1805       delete_unreachable_blocks ();
1806       update_ssa (TODO_update_ssa);
1807 #ifdef ENABLE_CHECKING
1808       verify_flow_info ();
1809 #endif
1810       free_dominance_info (CDI_DOMINATORS);
1811 
1812       /* Since we want to emit the thunk, we explicitly mark its name as
1813 	 referenced.  */
1814       thunk.thunk_p = false;
1815       lowered = true;
1816       bitmap_obstack_release (NULL);
1817     }
1818   current_function_decl = NULL;
1819   set_cfun (NULL);
1820   return true;
1821 }
1822 
1823 /* Assemble thunks and aliases associated to node.  */
1824 
1825 void
1826 cgraph_node::assemble_thunks_and_aliases (void)
1827 {
1828   cgraph_edge *e;
1829   ipa_ref *ref;
1830 
1831   for (e = callers; e;)
1832     if (e->caller->thunk.thunk_p
1833 	&& !e->caller->thunk.add_pointer_bounds_args)
1834       {
1835 	cgraph_node *thunk = e->caller;
1836 
1837 	e = e->next_caller;
1838 	thunk->expand_thunk (true, false);
1839 	thunk->assemble_thunks_and_aliases ();
1840       }
1841     else
1842       e = e->next_caller;
1843 
1844   FOR_EACH_ALIAS (this, ref)
1845     {
1846       cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
1847       bool saved_written = TREE_ASM_WRITTEN (decl);
1848 
1849       /* Force assemble_alias to really output the alias this time instead
1850 	 of buffering it in same alias pairs.  */
1851       TREE_ASM_WRITTEN (decl) = 1;
1852       do_assemble_alias (alias->decl,
1853 			 DECL_ASSEMBLER_NAME (decl));
1854       alias->assemble_thunks_and_aliases ();
1855       TREE_ASM_WRITTEN (decl) = saved_written;
1856     }
1857 }
1858 
1859 /* Expand function specified by node.  */
1860 
1861 void
1862 cgraph_node::expand (void)
1863 {
1864   location_t saved_loc;
1865 
1866   /* We ought to not compile any inline clones.  */
1867   gcc_assert (!global.inlined_to);
1868 
1869   announce_function (decl);
1870   process = 0;
1871   gcc_assert (lowered);
1872   get_untransformed_body ();
1873 
1874   /* Generate RTL for the body of DECL.  */
1875 
1876   timevar_push (TV_REST_OF_COMPILATION);
1877 
1878   gcc_assert (symtab->global_info_ready);
1879 
1880   /* Initialize the default bitmap obstack.  */
1881   bitmap_obstack_initialize (NULL);
1882 
1883   /* Initialize the RTL code for the function.  */
1884   current_function_decl = decl;
1885   saved_loc = input_location;
1886   input_location = DECL_SOURCE_LOCATION (decl);
1887   init_function_start (decl);
1888 
1889   gimple_register_cfg_hooks ();
1890 
1891   bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
1892 
1893   execute_all_ipa_transforms ();
1894 
1895   /* Perform all tree transforms and optimizations.  */
1896 
1897   /* Signal the start of passes.  */
1898   invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);
1899 
1900   execute_pass_list (cfun, g->get_passes ()->all_passes);
1901 
1902   /* Signal the end of passes.  */
1903   invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);
1904 
1905   bitmap_obstack_release (&reg_obstack);
1906 
1907   /* Release the default bitmap obstack.  */
1908   bitmap_obstack_release (NULL);
1909 
1910   /* If requested, warn about function definitions where the function will
1911      return a value (usually of some struct or union type) which itself will
1912      take up a lot of stack space.  */
1913   if (warn_larger_than && !DECL_EXTERNAL (decl) && TREE_TYPE (decl))
1914     {
1915       tree ret_type = TREE_TYPE (TREE_TYPE (decl));
1916 
1917       if (ret_type && TYPE_SIZE_UNIT (ret_type)
1918 	  && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
1919 	  && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type),
1920 				   larger_than_size))
1921 	{
1922 	  unsigned int size_as_int
1923 	    = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
1924 
1925 	  if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
1926 	    warning (OPT_Wlarger_than_, "size of return value of %q+D is %u bytes",
1927                      decl, size_as_int);
1928 	  else
1929 	    warning (OPT_Wlarger_than_, "size of return value of %q+D is larger than %wd bytes",
1930                      decl, larger_than_size);
1931 	}
1932     }
1933 
1934   gimple_set_body (decl, NULL);
1935   if (DECL_STRUCT_FUNCTION (decl) == 0
1936       && !cgraph_node::get (decl)->origin)
1937     {
1938       /* Stop pointing to the local nodes about to be freed.
1939 	 But DECL_INITIAL must remain nonzero so we know this
1940 	 was an actual function definition.
1941 	 For a nested function, this is done in c_pop_function_context.
1942 	 If rest_of_compilation set this to 0, leave it 0.  */
1943       if (DECL_INITIAL (decl) != 0)
1944 	DECL_INITIAL (decl) = error_mark_node;
1945     }
1946 
1947   input_location = saved_loc;
1948 
1949   ggc_collect ();
1950   timevar_pop (TV_REST_OF_COMPILATION);
1951 
1952   /* Make sure that BE didn't give up on compiling.  */
1953   gcc_assert (TREE_ASM_WRITTEN (decl));
1954   set_cfun (NULL);
1955   current_function_decl = NULL;
1956 
1957   /* It would make a lot more sense to output thunks before function body to get more
1958      forward and lest backwarding jumps.  This however would need solving problem
1959      with comdats. See PR48668.  Also aliases must come after function itself to
1960      make one pass assemblers, like one on AIX, happy.  See PR 50689.
1961      FIXME: Perhaps thunks should be move before function IFF they are not in comdat
1962      groups.  */
1963   assemble_thunks_and_aliases ();
1964   release_body ();
1965   /* Eliminate all call edges.  This is important so the GIMPLE_CALL no longer
1966      points to the dead function body.  */
1967   remove_callees ();
1968   remove_all_references ();
1969 }
1970 
1971 /* Node comparer that is responsible for the order that corresponds
1972    to time when a function was launched for the first time.  */
1973 
1974 static int
1975 node_cmp (const void *pa, const void *pb)
1976 {
1977   const cgraph_node *a = *(const cgraph_node * const *) pa;
1978   const cgraph_node *b = *(const cgraph_node * const *) pb;
1979 
1980   /* Functions with time profile must be before these without profile.  */
1981   if (!a->tp_first_run || !b->tp_first_run)
1982     return a->tp_first_run - b->tp_first_run;
1983 
1984   return a->tp_first_run != b->tp_first_run
1985 	 ? b->tp_first_run - a->tp_first_run
1986 	 : b->order - a->order;
1987 }
1988 
1989 /* Expand all functions that must be output.
1990 
1991    Attempt to topologically sort the nodes so function is output when
1992    all called functions are already assembled to allow data to be
1993    propagated across the callgraph.  Use a stack to get smaller distance
1994    between a function and its callees (later we may choose to use a more
1995    sophisticated algorithm for function reordering; we will likely want
1996    to use subsections to make the output functions appear in top-down
1997    order).  */
1998 
1999 static void
2000 expand_all_functions (void)
2001 {
2002   cgraph_node *node;
2003   cgraph_node **order = XCNEWVEC (cgraph_node *,
2004 					 symtab->cgraph_count);
2005   unsigned int expanded_func_count = 0, profiled_func_count = 0;
2006   int order_pos, new_order_pos = 0;
2007   int i;
2008 
2009   order_pos = ipa_reverse_postorder (order);
2010   gcc_assert (order_pos == symtab->cgraph_count);
2011 
2012   /* Garbage collector may remove inline clones we eliminate during
2013      optimization.  So we must be sure to not reference them.  */
2014   for (i = 0; i < order_pos; i++)
2015     if (order[i]->process)
2016       order[new_order_pos++] = order[i];
2017 
2018   if (flag_profile_reorder_functions)
2019     qsort (order, new_order_pos, sizeof (cgraph_node *), node_cmp);
2020 
2021   for (i = new_order_pos - 1; i >= 0; i--)
2022     {
2023       node = order[i];
2024 
2025       if (node->process)
2026 	{
2027      expanded_func_count++;
2028      if(node->tp_first_run)
2029        profiled_func_count++;
2030 
2031     if (symtab->dump_file)
2032 	  fprintf (symtab->dump_file,
2033 		   "Time profile order in expand_all_functions:%s:%d\n",
2034 		   node->asm_name (), node->tp_first_run);
2035 	  node->process = 0;
2036 	  node->expand ();
2037 	}
2038     }
2039 
2040     if (dump_file)
2041       fprintf (dump_file, "Expanded functions with time profile (%s):%u/%u\n",
2042                main_input_filename, profiled_func_count, expanded_func_count);
2043 
2044   if (symtab->dump_file && flag_profile_reorder_functions)
2045     fprintf (symtab->dump_file, "Expanded functions with time profile:%u/%u\n",
2046              profiled_func_count, expanded_func_count);
2047 
2048   symtab->process_new_functions ();
2049   free_gimplify_stack ();
2050 
2051   free (order);
2052 }
2053 
2054 /* This is used to sort the node types by the cgraph order number.  */
2055 
2056 enum cgraph_order_sort_kind
2057 {
2058   ORDER_UNDEFINED = 0,
2059   ORDER_FUNCTION,
2060   ORDER_VAR,
2061   ORDER_ASM
2062 };
2063 
2064 struct cgraph_order_sort
2065 {
2066   enum cgraph_order_sort_kind kind;
2067   union
2068   {
2069     cgraph_node *f;
2070     varpool_node *v;
2071     asm_node *a;
2072   } u;
2073 };
2074 
2075 /* Output all functions, variables, and asm statements in the order
2076    according to their order fields, which is the order in which they
2077    appeared in the file.  This implements -fno-toplevel-reorder.  In
2078    this mode we may output functions and variables which don't really
2079    need to be output.
2080    When NO_REORDER is true only do this for symbols marked no reorder. */
2081 
2082 static void
2083 output_in_order (bool no_reorder)
2084 {
2085   int max;
2086   cgraph_order_sort *nodes;
2087   int i;
2088   cgraph_node *pf;
2089   varpool_node *pv;
2090   asm_node *pa;
2091   max = symtab->order;
2092   nodes = XCNEWVEC (cgraph_order_sort, max);
2093 
2094   FOR_EACH_DEFINED_FUNCTION (pf)
2095     {
2096       if (pf->process && !pf->thunk.thunk_p && !pf->alias)
2097 	{
2098 	  if (no_reorder && !pf->no_reorder)
2099 	    continue;
2100 	  i = pf->order;
2101 	  gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2102 	  nodes[i].kind = ORDER_FUNCTION;
2103 	  nodes[i].u.f = pf;
2104 	}
2105     }
2106 
2107   FOR_EACH_DEFINED_VARIABLE (pv)
2108     if (!DECL_EXTERNAL (pv->decl))
2109       {
2110 	if (no_reorder && !pv->no_reorder)
2111 	    continue;
2112 	i = pv->order;
2113 	gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2114 	nodes[i].kind = ORDER_VAR;
2115 	nodes[i].u.v = pv;
2116       }
2117 
2118   for (pa = symtab->first_asm_symbol (); pa; pa = pa->next)
2119     {
2120       i = pa->order;
2121       gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
2122       nodes[i].kind = ORDER_ASM;
2123       nodes[i].u.a = pa;
2124     }
2125 
2126   /* In toplevel reorder mode we output all statics; mark them as needed.  */
2127 
2128   for (i = 0; i < max; ++i)
2129     if (nodes[i].kind == ORDER_VAR)
2130       nodes[i].u.v->finalize_named_section_flags ();
2131 
2132   for (i = 0; i < max; ++i)
2133     {
2134       switch (nodes[i].kind)
2135 	{
2136 	case ORDER_FUNCTION:
2137 	  nodes[i].u.f->process = 0;
2138 	  nodes[i].u.f->expand ();
2139 	  break;
2140 
2141 	case ORDER_VAR:
2142 	  nodes[i].u.v->assemble_decl ();
2143 	  break;
2144 
2145 	case ORDER_ASM:
2146 	  assemble_asm (nodes[i].u.a->asm_str);
2147 	  break;
2148 
2149 	case ORDER_UNDEFINED:
2150 	  break;
2151 
2152 	default:
2153 	  gcc_unreachable ();
2154 	}
2155     }
2156 
2157   symtab->clear_asm_symbols ();
2158 
2159   free (nodes);
2160 }
2161 
2162 static void
2163 ipa_passes (void)
2164 {
2165   gcc::pass_manager *passes = g->get_passes ();
2166 
2167   set_cfun (NULL);
2168   current_function_decl = NULL;
2169   gimple_register_cfg_hooks ();
2170   bitmap_obstack_initialize (NULL);
2171 
2172   invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
2173 
2174   if (!in_lto_p)
2175     {
2176       execute_ipa_pass_list (passes->all_small_ipa_passes);
2177       if (seen_error ())
2178 	return;
2179     }
2180 
2181   /* This extra symtab_remove_unreachable_nodes pass tends to catch some
2182      devirtualization and other changes where removal iterate.  */
2183   symtab->remove_unreachable_nodes (symtab->dump_file);
2184 
2185   /* If pass_all_early_optimizations was not scheduled, the state of
2186      the cgraph will not be properly updated.  Update it now.  */
2187   if (symtab->state < IPA_SSA)
2188     symtab->state = IPA_SSA;
2189 
2190   if (!in_lto_p)
2191     {
2192       /* Generate coverage variables and constructors.  */
2193       coverage_finish ();
2194 
2195       /* Process new functions added.  */
2196       set_cfun (NULL);
2197       current_function_decl = NULL;
2198       symtab->process_new_functions ();
2199 
2200       execute_ipa_summary_passes
2201 	((ipa_opt_pass_d *) passes->all_regular_ipa_passes);
2202     }
2203 
2204   /* Some targets need to handle LTO assembler output specially.  */
2205   if (flag_generate_lto || flag_generate_offload)
2206     targetm.asm_out.lto_start ();
2207 
2208   if (!in_lto_p)
2209     {
2210       if (g->have_offload)
2211 	{
2212 	  section_name_prefix = OFFLOAD_SECTION_NAME_PREFIX;
2213 	  lto_stream_offload_p = true;
2214 	  ipa_write_summaries ();
2215 	  lto_stream_offload_p = false;
2216 	}
2217       if (flag_lto)
2218 	{
2219 	  section_name_prefix = LTO_SECTION_NAME_PREFIX;
2220 	  lto_stream_offload_p = false;
2221 	  ipa_write_summaries ();
2222 	}
2223     }
2224 
2225   if (flag_generate_lto || flag_generate_offload)
2226     targetm.asm_out.lto_end ();
2227 
2228   if (!flag_ltrans && (in_lto_p || !flag_lto || flag_fat_lto_objects))
2229     execute_ipa_pass_list (passes->all_regular_ipa_passes);
2230   invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
2231 
2232   bitmap_obstack_release (NULL);
2233 }
2234 
2235 
2236 /* Return string alias is alias of.  */
2237 
2238 static tree
2239 get_alias_symbol (tree decl)
2240 {
2241   tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
2242   return get_identifier (TREE_STRING_POINTER
2243 			  (TREE_VALUE (TREE_VALUE (alias))));
2244 }
2245 
2246 
2247 /* Weakrefs may be associated to external decls and thus not output
2248    at expansion time.  Emit all necessary aliases.  */
2249 
2250 void
2251 symbol_table::output_weakrefs (void)
2252 {
2253   symtab_node *node;
2254   cgraph_node *cnode;
2255   FOR_EACH_SYMBOL (node)
2256     if (node->alias
2257         && !TREE_ASM_WRITTEN (node->decl)
2258 	&& (!(cnode = dyn_cast <cgraph_node *> (node))
2259 	    || !cnode->instrumented_version
2260 	    || !TREE_ASM_WRITTEN (cnode->instrumented_version->decl))
2261 	&& node->weakref)
2262       {
2263 	tree target;
2264 
2265 	/* Weakrefs are special by not requiring target definition in current
2266 	   compilation unit.  It is thus bit hard to work out what we want to
2267 	   alias.
2268 	   When alias target is defined, we need to fetch it from symtab reference,
2269 	   otherwise it is pointed to by alias_target.  */
2270 	if (node->alias_target)
2271 	  target = (DECL_P (node->alias_target)
2272 		    ? DECL_ASSEMBLER_NAME (node->alias_target)
2273 		    : node->alias_target);
2274 	else if (node->analyzed)
2275 	  target = DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl);
2276 	else
2277 	  {
2278 	    gcc_unreachable ();
2279 	    target = get_alias_symbol (node->decl);
2280 	  }
2281         do_assemble_alias (node->decl, target);
2282       }
2283 }
2284 
2285 /* Perform simple optimizations based on callgraph.  */
2286 
2287 void
2288 symbol_table::compile (void)
2289 {
2290   if (seen_error ())
2291     return;
2292 
2293 #ifdef ENABLE_CHECKING
2294   symtab_node::verify_symtab_nodes ();
2295 #endif
2296 
2297   timevar_push (TV_CGRAPHOPT);
2298   if (pre_ipa_mem_report)
2299     {
2300       fprintf (stderr, "Memory consumption before IPA\n");
2301       dump_memory_report (false);
2302     }
2303   if (!quiet_flag)
2304     fprintf (stderr, "Performing interprocedural optimizations\n");
2305   state = IPA;
2306 
2307   /* Offloading requires LTO infrastructure.  */
2308   if (!in_lto_p && g->have_offload)
2309     flag_generate_offload = 1;
2310 
2311   /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE.  */
2312   if (flag_generate_lto || flag_generate_offload)
2313     lto_streamer_hooks_init ();
2314 
2315   /* Don't run the IPA passes if there was any error or sorry messages.  */
2316   if (!seen_error ())
2317     ipa_passes ();
2318 
2319   /* Do nothing else if any IPA pass found errors or if we are just streaming LTO.  */
2320   if (seen_error ()
2321       || (!in_lto_p && flag_lto && !flag_fat_lto_objects))
2322     {
2323       timevar_pop (TV_CGRAPHOPT);
2324       return;
2325     }
2326 
2327   global_info_ready = true;
2328   if (dump_file)
2329     {
2330       fprintf (dump_file, "Optimized ");
2331       symtab_node:: dump_table (dump_file);
2332     }
2333   if (post_ipa_mem_report)
2334     {
2335       fprintf (stderr, "Memory consumption after IPA\n");
2336       dump_memory_report (false);
2337     }
2338   timevar_pop (TV_CGRAPHOPT);
2339 
2340   /* Output everything.  */
2341   (*debug_hooks->assembly_start) ();
2342   if (!quiet_flag)
2343     fprintf (stderr, "Assembling functions:\n");
2344 #ifdef ENABLE_CHECKING
2345   symtab_node::verify_symtab_nodes ();
2346 #endif
2347 
2348   materialize_all_clones ();
2349   bitmap_obstack_initialize (NULL);
2350   execute_ipa_pass_list (g->get_passes ()->all_late_ipa_passes);
2351   bitmap_obstack_release (NULL);
2352   mark_functions_to_output ();
2353 
2354   /* When weakref support is missing, we autmatically translate all
2355      references to NODE to references to its ultimate alias target.
2356      The renaming mechanizm uses flag IDENTIFIER_TRANSPARENT_ALIAS and
2357      TREE_CHAIN.
2358 
2359      Set up this mapping before we output any assembler but once we are sure
2360      that all symbol renaming is done.
2361 
2362      FIXME: All this uglyness can go away if we just do renaming at gimple
2363      level by physically rewritting the IL.  At the moment we can only redirect
2364      calls, so we need infrastructure for renaming references as well.  */
2365 #ifndef ASM_OUTPUT_WEAKREF
2366   symtab_node *node;
2367 
2368   FOR_EACH_SYMBOL (node)
2369     if (node->alias
2370 	&& lookup_attribute ("weakref", DECL_ATTRIBUTES (node->decl)))
2371       {
2372 	IDENTIFIER_TRANSPARENT_ALIAS
2373 	   (DECL_ASSEMBLER_NAME (node->decl)) = 1;
2374 	TREE_CHAIN (DECL_ASSEMBLER_NAME (node->decl))
2375 	   = (node->alias_target ? node->alias_target
2376 	      : DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl));
2377       }
2378 #endif
2379 
2380   state = EXPANSION;
2381 
2382   if (!flag_toplevel_reorder)
2383     output_in_order (false);
2384   else
2385     {
2386       /* Output first asm statements and anything ordered. The process
2387          flag is cleared for these nodes, so we skip them later.  */
2388       output_in_order (true);
2389       expand_all_functions ();
2390       output_variables ();
2391     }
2392 
2393   process_new_functions ();
2394   state = FINISHED;
2395   output_weakrefs ();
2396 
2397   if (dump_file)
2398     {
2399       fprintf (dump_file, "\nFinal ");
2400       symtab_node::dump_table (dump_file);
2401     }
2402 #ifdef ENABLE_CHECKING
2403   symtab_node::verify_symtab_nodes ();
2404   /* Double check that all inline clones are gone and that all
2405      function bodies have been released from memory.  */
2406   if (!seen_error ())
2407     {
2408       cgraph_node *node;
2409       bool error_found = false;
2410 
2411       FOR_EACH_DEFINED_FUNCTION (node)
2412 	if (node->global.inlined_to
2413 	    || gimple_has_body_p (node->decl))
2414 	  {
2415 	    error_found = true;
2416 	    node->debug ();
2417 	  }
2418       if (error_found)
2419 	internal_error ("nodes with unreleased memory found");
2420     }
2421 #endif
2422 }
2423 
2424 
2425 /* Analyze the whole compilation unit once it is parsed completely.  */
2426 
2427 void
2428 symbol_table::finalize_compilation_unit (void)
2429 {
2430   timevar_push (TV_CGRAPH);
2431 
2432   /* If we're here there's no current function anymore.  Some frontends
2433      are lazy in clearing these.  */
2434   current_function_decl = NULL;
2435   set_cfun (NULL);
2436 
2437   /* Do not skip analyzing the functions if there were errors, we
2438      miss diagnostics for following functions otherwise.  */
2439 
2440   /* Emit size functions we didn't inline.  */
2441   finalize_size_functions ();
2442 
2443   /* Mark alias targets necessary and emit diagnostics.  */
2444   handle_alias_pairs ();
2445 
2446   if (!quiet_flag)
2447     {
2448       fprintf (stderr, "\nAnalyzing compilation unit\n");
2449       fflush (stderr);
2450     }
2451 
2452   if (flag_dump_passes)
2453     dump_passes ();
2454 
2455   /* Gimplify and lower all functions, compute reachability and
2456      remove unreachable nodes.  */
2457   analyze_functions ();
2458 
2459   /* Mark alias targets necessary and emit diagnostics.  */
2460   handle_alias_pairs ();
2461 
2462   /* Gimplify and lower thunks.  */
2463   analyze_functions ();
2464 
2465   /* Finally drive the pass manager.  */
2466   compile ();
2467 
2468   timevar_pop (TV_CGRAPH);
2469 }
2470 
2471 /* Reset all state within cgraphunit.c so that we can rerun the compiler
2472    within the same process.  For use by toplev::finalize.  */
2473 
2474 void
2475 cgraphunit_c_finalize (void)
2476 {
2477   gcc_assert (cgraph_new_nodes.length () == 0);
2478   cgraph_new_nodes.truncate (0);
2479 
2480   vtable_entry_type = NULL;
2481   queued_nodes = &symtab_terminator;
2482 
2483   first_analyzed = NULL;
2484   first_analyzed_var = NULL;
2485 }
2486 
2487 /* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
2488    kind of wrapper method.  */
2489 
2490 void
2491 cgraph_node::create_wrapper (cgraph_node *target)
2492 {
2493   /* Preserve DECL_RESULT so we get right by reference flag.  */
2494   tree decl_result = DECL_RESULT (decl);
2495 
2496   /* Remove the function's body but keep arguments to be reused
2497      for thunk.  */
2498   release_body (true);
2499   reset ();
2500 
2501   DECL_UNINLINABLE (decl) = false;
2502   DECL_RESULT (decl) = decl_result;
2503   DECL_INITIAL (decl) = NULL;
2504   allocate_struct_function (decl, false);
2505   set_cfun (NULL);
2506 
2507   /* Turn alias into thunk and expand it into GIMPLE representation.  */
2508   definition = true;
2509 
2510   memset (&thunk, 0, sizeof (cgraph_thunk_info));
2511   thunk.thunk_p = true;
2512   create_edge (target, NULL, count, CGRAPH_FREQ_BASE);
2513   callees->can_throw_external = !TREE_NOTHROW (target->decl);
2514 
2515   tree arguments = DECL_ARGUMENTS (decl);
2516 
2517   while (arguments)
2518     {
2519       TREE_ADDRESSABLE (arguments) = false;
2520       arguments = TREE_CHAIN (arguments);
2521     }
2522 
2523   expand_thunk (false, true);
2524 
2525   /* Inline summary set-up.  */
2526   analyze ();
2527   inline_analyze_function (this);
2528 }
2529 
2530 #include "gt-cgraphunit.h"
2531