xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/cgraph.c (revision b7b7574d3bf8eeb51a1fa3977b59142ec6434a55)
1 /* Callgraph handling code.
2    Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3    Free Software Foundation, Inc.
4    Contributed by Jan Hubicka
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 /*  This file contains basic routines manipulating call graph
23 
24 The callgraph:
25 
26     The call-graph is data structure designed for intra-procedural optimization
27     but it is also used in non-unit-at-a-time compilation to allow easier code
28     sharing.
29 
30     The call-graph consist of nodes and edges represented via linked lists.
31     Each function (external or not) corresponds to the unique node.
32 
33     The mapping from declarations to call-graph nodes is done using hash table
34     based on DECL_UID.  The call-graph nodes are created lazily using
35     cgraph_node function when called for unknown declaration.
36 
37     The callgraph at the moment does not represent indirect calls or calls
38     from other compilation unit.  Flag NEEDED is set for each node that may
39     be accessed in such an invisible way and it shall be considered an
40     entry point to the callgraph.
41 
42     Interprocedural information:
43 
44       Callgraph is place to store data needed for interprocedural optimization.
45       All data structures are divided into three components: local_info that
46       is produced while analyzing the function, global_info that is result
47       of global walking of the callgraph on the end of compilation and
48       rtl_info used by RTL backend to propagate data from already compiled
49       functions to their callers.
50 
51     Inlining plans:
52 
53       The function inlining information is decided in advance and maintained
54       in the callgraph as so called inline plan.
55       For each inlined call, the callee's node is cloned to represent the
56       new function copy produced by inliner.
57       Each inlined call gets a unique corresponding clone node of the callee
58       and the data structure is updated while inlining is performed, so
59       the clones are eliminated and their callee edges redirected to the
60       caller.
61 
62       Each edge has "inline_failed" field.  When the field is set to NULL,
63       the call will be inlined.  When it is non-NULL it contains a reason
64       why inlining wasn't performed.  */
65 
66 #include "config.h"
67 #include "system.h"
68 #include "coretypes.h"
69 #include "tm.h"
70 #include "tree.h"
71 #include "tree-inline.h"
72 #include "langhooks.h"
73 #include "hashtab.h"
74 #include "toplev.h"
75 #include "flags.h"
76 #include "ggc.h"
77 #include "debug.h"
78 #include "target.h"
79 #include "basic-block.h"
80 #include "cgraph.h"
81 #include "output.h"
82 #include "intl.h"
83 #include "gimple.h"
84 #include "tree-dump.h"
85 #include "tree-flow.h"
86 #include "value-prof.h"
87 #include "except.h"
88 #include "diagnostic.h"
89 #include "rtl.h"
90 
91 static void cgraph_node_remove_callers (struct cgraph_node *node);
92 static inline void cgraph_edge_remove_caller (struct cgraph_edge *e);
93 static inline void cgraph_edge_remove_callee (struct cgraph_edge *e);
94 
95 /* Hash table used to convert declarations into nodes.  */
96 static GTY((param_is (struct cgraph_node))) htab_t cgraph_hash;
97 /* Hash table used to convert assembler names into nodes.  */
98 static GTY((param_is (struct cgraph_node))) htab_t assembler_name_hash;
99 
100 /* The linked list of cgraph nodes.  */
101 struct cgraph_node *cgraph_nodes;
102 
103 /* Queue of cgraph nodes scheduled to be lowered.  */
104 struct cgraph_node *cgraph_nodes_queue;
105 
106 /* Queue of cgraph nodes scheduled to be added into cgraph.  This is a
107    secondary queue used during optimization to accommodate passes that
108    may generate new functions that need to be optimized and expanded.  */
109 struct cgraph_node *cgraph_new_nodes;
110 
111 /* Number of nodes in existence.  */
112 int cgraph_n_nodes;
113 
114 /* Maximal uid used in cgraph nodes.  */
115 int cgraph_max_uid;
116 
117 /* Maximal uid used in cgraph edges.  */
118 int cgraph_edge_max_uid;
119 
120 /* Maximal pid used for profiling */
121 int cgraph_max_pid;
122 
123 /* Set when whole unit has been analyzed so we can access global info.  */
124 bool cgraph_global_info_ready = false;
125 
126 /* What state callgraph is in right now.  */
127 enum cgraph_state cgraph_state = CGRAPH_STATE_CONSTRUCTION;
128 
129 /* Set when the cgraph is fully build and the basic flags are computed.  */
130 bool cgraph_function_flags_ready = false;
131 
132 /* Linked list of cgraph asm nodes.  */
133 struct cgraph_asm_node *cgraph_asm_nodes;
134 
135 /* Last node in cgraph_asm_nodes.  */
136 static GTY(()) struct cgraph_asm_node *cgraph_asm_last_node;
137 
138 /* The order index of the next cgraph node to be created.  This is
139    used so that we can sort the cgraph nodes in order by when we saw
140    them, to support -fno-toplevel-reorder.  */
141 int cgraph_order;
142 
143 /* List of hooks trigerred on cgraph_edge events.  */
144 struct cgraph_edge_hook_list {
145   cgraph_edge_hook hook;
146   void *data;
147   struct cgraph_edge_hook_list *next;
148 };
149 
150 /* List of hooks trigerred on cgraph_node events.  */
151 struct cgraph_node_hook_list {
152   cgraph_node_hook hook;
153   void *data;
154   struct cgraph_node_hook_list *next;
155 };
156 
157 /* List of hooks trigerred on events involving two cgraph_edges.  */
158 struct cgraph_2edge_hook_list {
159   cgraph_2edge_hook hook;
160   void *data;
161   struct cgraph_2edge_hook_list *next;
162 };
163 
164 /* List of hooks trigerred on events involving two cgraph_nodes.  */
165 struct cgraph_2node_hook_list {
166   cgraph_2node_hook hook;
167   void *data;
168   struct cgraph_2node_hook_list *next;
169 };
170 
171 /* List of hooks triggered when an edge is removed.  */
172 struct cgraph_edge_hook_list *first_cgraph_edge_removal_hook;
173 /* List of hooks triggered when a node is removed.  */
174 struct cgraph_node_hook_list *first_cgraph_node_removal_hook;
175 /* List of hooks triggered when an edge is duplicated.  */
176 struct cgraph_2edge_hook_list *first_cgraph_edge_duplicated_hook;
177 /* List of hooks triggered when a node is duplicated.  */
178 struct cgraph_2node_hook_list *first_cgraph_node_duplicated_hook;
179 /* List of hooks triggered when an function is inserted.  */
180 struct cgraph_node_hook_list *first_cgraph_function_insertion_hook;
181 
182 /* Head of a linked list of unused (freed) call graph nodes.
183    Do not GTY((delete)) this list so UIDs gets reliably recycled.  */
184 static GTY(()) struct cgraph_node *free_nodes;
185 /* Head of a linked list of unused (freed) call graph edges.
186    Do not GTY((delete)) this list so UIDs gets reliably recycled.  */
187 static GTY(()) struct cgraph_edge *free_edges;
188 
189 /* Macros to access the next item in the list of free cgraph nodes and
190    edges. */
191 #define NEXT_FREE_NODE(NODE) (NODE)->next
192 #define NEXT_FREE_EDGE(EDGE) (EDGE)->prev_caller
193 
194 /* Register HOOK to be called with DATA on each removed edge.  */
195 struct cgraph_edge_hook_list *
196 cgraph_add_edge_removal_hook (cgraph_edge_hook hook, void *data)
197 {
198   struct cgraph_edge_hook_list *entry;
199   struct cgraph_edge_hook_list **ptr = &first_cgraph_edge_removal_hook;
200 
201   entry = (struct cgraph_edge_hook_list *) xmalloc (sizeof (*entry));
202   entry->hook = hook;
203   entry->data = data;
204   entry->next = NULL;
205   while (*ptr)
206     ptr = &(*ptr)->next;
207   *ptr = entry;
208   return entry;
209 }
210 
211 /* Remove ENTRY from the list of hooks called on removing edges.  */
212 void
213 cgraph_remove_edge_removal_hook (struct cgraph_edge_hook_list *entry)
214 {
215   struct cgraph_edge_hook_list **ptr = &first_cgraph_edge_removal_hook;
216 
217   while (*ptr != entry)
218     ptr = &(*ptr)->next;
219   *ptr = entry->next;
220   free (entry);
221 }
222 
223 /* Call all edge removal hooks.  */
224 static void
225 cgraph_call_edge_removal_hooks (struct cgraph_edge *e)
226 {
227   struct cgraph_edge_hook_list *entry = first_cgraph_edge_removal_hook;
228   while (entry)
229   {
230     entry->hook (e, entry->data);
231     entry = entry->next;
232   }
233 }
234 
235 /* Register HOOK to be called with DATA on each removed node.  */
236 struct cgraph_node_hook_list *
237 cgraph_add_node_removal_hook (cgraph_node_hook hook, void *data)
238 {
239   struct cgraph_node_hook_list *entry;
240   struct cgraph_node_hook_list **ptr = &first_cgraph_node_removal_hook;
241 
242   entry = (struct cgraph_node_hook_list *) xmalloc (sizeof (*entry));
243   entry->hook = hook;
244   entry->data = data;
245   entry->next = NULL;
246   while (*ptr)
247     ptr = &(*ptr)->next;
248   *ptr = entry;
249   return entry;
250 }
251 
252 /* Remove ENTRY from the list of hooks called on removing nodes.  */
253 void
254 cgraph_remove_node_removal_hook (struct cgraph_node_hook_list *entry)
255 {
256   struct cgraph_node_hook_list **ptr = &first_cgraph_node_removal_hook;
257 
258   while (*ptr != entry)
259     ptr = &(*ptr)->next;
260   *ptr = entry->next;
261   free (entry);
262 }
263 
264 /* Call all node removal hooks.  */
265 static void
266 cgraph_call_node_removal_hooks (struct cgraph_node *node)
267 {
268   struct cgraph_node_hook_list *entry = first_cgraph_node_removal_hook;
269   while (entry)
270   {
271     entry->hook (node, entry->data);
272     entry = entry->next;
273   }
274 }
275 
276 /* Register HOOK to be called with DATA on each inserted node.  */
277 struct cgraph_node_hook_list *
278 cgraph_add_function_insertion_hook (cgraph_node_hook hook, void *data)
279 {
280   struct cgraph_node_hook_list *entry;
281   struct cgraph_node_hook_list **ptr = &first_cgraph_function_insertion_hook;
282 
283   entry = (struct cgraph_node_hook_list *) xmalloc (sizeof (*entry));
284   entry->hook = hook;
285   entry->data = data;
286   entry->next = NULL;
287   while (*ptr)
288     ptr = &(*ptr)->next;
289   *ptr = entry;
290   return entry;
291 }
292 
293 /* Remove ENTRY from the list of hooks called on inserted nodes.  */
294 void
295 cgraph_remove_function_insertion_hook (struct cgraph_node_hook_list *entry)
296 {
297   struct cgraph_node_hook_list **ptr = &first_cgraph_function_insertion_hook;
298 
299   while (*ptr != entry)
300     ptr = &(*ptr)->next;
301   *ptr = entry->next;
302   free (entry);
303 }
304 
305 /* Call all node insertion hooks.  */
306 void
307 cgraph_call_function_insertion_hooks (struct cgraph_node *node)
308 {
309   struct cgraph_node_hook_list *entry = first_cgraph_function_insertion_hook;
310   while (entry)
311   {
312     entry->hook (node, entry->data);
313     entry = entry->next;
314   }
315 }
316 
317 /* Register HOOK to be called with DATA on each duplicated edge.  */
318 struct cgraph_2edge_hook_list *
319 cgraph_add_edge_duplication_hook (cgraph_2edge_hook hook, void *data)
320 {
321   struct cgraph_2edge_hook_list *entry;
322   struct cgraph_2edge_hook_list **ptr = &first_cgraph_edge_duplicated_hook;
323 
324   entry = (struct cgraph_2edge_hook_list *) xmalloc (sizeof (*entry));
325   entry->hook = hook;
326   entry->data = data;
327   entry->next = NULL;
328   while (*ptr)
329     ptr = &(*ptr)->next;
330   *ptr = entry;
331   return entry;
332 }
333 
334 /* Remove ENTRY from the list of hooks called on duplicating edges.  */
335 void
336 cgraph_remove_edge_duplication_hook (struct cgraph_2edge_hook_list *entry)
337 {
338   struct cgraph_2edge_hook_list **ptr = &first_cgraph_edge_duplicated_hook;
339 
340   while (*ptr != entry)
341     ptr = &(*ptr)->next;
342   *ptr = entry->next;
343   free (entry);
344 }
345 
346 /* Call all edge duplication hooks.  */
347 static void
348 cgraph_call_edge_duplication_hooks (struct cgraph_edge *cs1,
349 				    struct cgraph_edge *cs2)
350 {
351   struct cgraph_2edge_hook_list *entry = first_cgraph_edge_duplicated_hook;
352   while (entry)
353   {
354     entry->hook (cs1, cs2, entry->data);
355     entry = entry->next;
356   }
357 }
358 
359 /* Register HOOK to be called with DATA on each duplicated node.  */
360 struct cgraph_2node_hook_list *
361 cgraph_add_node_duplication_hook (cgraph_2node_hook hook, void *data)
362 {
363   struct cgraph_2node_hook_list *entry;
364   struct cgraph_2node_hook_list **ptr = &first_cgraph_node_duplicated_hook;
365 
366   entry = (struct cgraph_2node_hook_list *) xmalloc (sizeof (*entry));
367   entry->hook = hook;
368   entry->data = data;
369   entry->next = NULL;
370   while (*ptr)
371     ptr = &(*ptr)->next;
372   *ptr = entry;
373   return entry;
374 }
375 
376 /* Remove ENTRY from the list of hooks called on duplicating nodes.  */
377 void
378 cgraph_remove_node_duplication_hook (struct cgraph_2node_hook_list *entry)
379 {
380   struct cgraph_2node_hook_list **ptr = &first_cgraph_node_duplicated_hook;
381 
382   while (*ptr != entry)
383     ptr = &(*ptr)->next;
384   *ptr = entry->next;
385   free (entry);
386 }
387 
388 /* Call all node duplication hooks.  */
389 static void
390 cgraph_call_node_duplication_hooks (struct cgraph_node *node1,
391 				    struct cgraph_node *node2)
392 {
393   struct cgraph_2node_hook_list *entry = first_cgraph_node_duplicated_hook;
394   while (entry)
395   {
396     entry->hook (node1, node2, entry->data);
397     entry = entry->next;
398   }
399 }
400 
401 /* Returns a hash code for P.  */
402 
403 static hashval_t
404 hash_node (const void *p)
405 {
406   const struct cgraph_node *n = (const struct cgraph_node *) p;
407   return (hashval_t) DECL_UID (n->decl);
408 }
409 
410 
411 /* Returns nonzero if P1 and P2 are equal.  */
412 
413 static int
414 eq_node (const void *p1, const void *p2)
415 {
416   const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
417   const struct cgraph_node *n2 = (const struct cgraph_node *) p2;
418   return DECL_UID (n1->decl) == DECL_UID (n2->decl);
419 }
420 
421 /* Allocate new callgraph node.  */
422 
423 static inline struct cgraph_node *
424 cgraph_allocate_node (void)
425 {
426   struct cgraph_node *node;
427 
428   if (free_nodes)
429     {
430       node = free_nodes;
431       free_nodes = NEXT_FREE_NODE (node);
432     }
433   else
434     {
435       node = GGC_CNEW (struct cgraph_node);
436       node->uid = cgraph_max_uid++;
437     }
438 
439   return node;
440 }
441 
442 /* Allocate new callgraph node and insert it into basic data structures.  */
443 
444 static struct cgraph_node *
445 cgraph_create_node (void)
446 {
447   struct cgraph_node *node = cgraph_allocate_node ();
448 
449   node->next = cgraph_nodes;
450   node->pid = -1;
451   node->order = cgraph_order++;
452   if (cgraph_nodes)
453     cgraph_nodes->previous = node;
454   node->previous = NULL;
455   node->global.estimated_growth = INT_MIN;
456   cgraph_nodes = node;
457   cgraph_n_nodes++;
458   return node;
459 }
460 
461 /* Return cgraph node assigned to DECL.  Create new one when needed.  */
462 
463 struct cgraph_node *
464 cgraph_node (tree decl)
465 {
466   struct cgraph_node key, *node, **slot;
467 
468   gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
469 
470   if (!cgraph_hash)
471     cgraph_hash = htab_create_ggc (10, hash_node, eq_node, NULL);
472 
473   key.decl = decl;
474 
475   slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
476 
477   if (*slot)
478     {
479       node = *slot;
480       if (node->same_body_alias)
481 	node = node->same_body;
482       return node;
483     }
484 
485   node = cgraph_create_node ();
486   node->decl = decl;
487   *slot = node;
488   if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
489     {
490       node->origin = cgraph_node (DECL_CONTEXT (decl));
491       node->next_nested = node->origin->nested;
492       node->origin->nested = node;
493     }
494   if (assembler_name_hash)
495     {
496       void **aslot;
497       tree name = DECL_ASSEMBLER_NAME (decl);
498 
499       aslot = htab_find_slot_with_hash (assembler_name_hash, name,
500 					decl_assembler_name_hash (name),
501 					INSERT);
502       /* We can have multiple declarations with same assembler name. For C++
503 	 it is __builtin_strlen and strlen, for instance.  Do we need to
504 	 record them all?  Original implementation marked just first one
505 	 so lets hope for the best.  */
506       if (*aslot == NULL)
507 	*aslot = node;
508     }
509   return node;
510 }
511 
512 /* Mark ALIAS as an alias to DECL.  */
513 
514 static struct cgraph_node *
515 cgraph_same_body_alias_1 (tree alias, tree decl)
516 {
517   struct cgraph_node key, *alias_node, *decl_node, **slot;
518 
519   gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
520   gcc_assert (TREE_CODE (alias) == FUNCTION_DECL);
521   decl_node = cgraph_node (decl);
522 
523   key.decl = alias;
524 
525   slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
526 
527   /* If the cgraph_node has been already created, fail.  */
528   if (*slot)
529     return NULL;
530 
531   alias_node = cgraph_allocate_node ();
532   alias_node->decl = alias;
533   alias_node->same_body_alias = 1;
534   alias_node->same_body = decl_node;
535   alias_node->previous = NULL;
536   if (decl_node->same_body)
537     decl_node->same_body->previous = alias_node;
538   alias_node->next = decl_node->same_body;
539   alias_node->thunk.alias = decl;
540   decl_node->same_body = alias_node;
541   *slot = alias_node;
542   return alias_node;
543 }
544 
545 /* Attempt to mark ALIAS as an alias to DECL.  Return TRUE if successful.
546    Same body aliases are output whenever the body of DECL is output,
547    and cgraph_node (ALIAS) transparently returns cgraph_node (DECL).   */
548 
549 bool
550 cgraph_same_body_alias (tree alias, tree decl)
551 {
552 #ifndef ASM_OUTPUT_DEF
553   /* If aliases aren't supported by the assembler, fail.  */
554   return false;
555 #endif
556 
557   /*gcc_assert (!assembler_name_hash);*/
558 
559   return cgraph_same_body_alias_1 (alias, decl) != NULL;
560 }
561 
562 void
563 cgraph_add_thunk (tree alias, tree decl, bool this_adjusting,
564 		  HOST_WIDE_INT fixed_offset, HOST_WIDE_INT virtual_value,
565 		  tree virtual_offset,
566 		  tree real_alias)
567 {
568   struct cgraph_node *node = cgraph_get_node (alias);
569 
570   if (node)
571     {
572       gcc_assert (node->local.finalized);
573       gcc_assert (!node->same_body);
574       cgraph_remove_node (node);
575     }
576 
577   node = cgraph_same_body_alias_1 (alias, decl);
578   gcc_assert (node);
579 #ifdef ENABLE_CHECKING
580   gcc_assert (!virtual_offset
581   	      || tree_int_cst_equal (virtual_offset, size_int (virtual_value)));
582 #endif
583   node->thunk.fixed_offset = fixed_offset;
584   node->thunk.this_adjusting = this_adjusting;
585   node->thunk.virtual_value = virtual_value;
586   node->thunk.virtual_offset_p = virtual_offset != NULL;
587   node->thunk.alias = real_alias;
588   node->thunk.thunk_p = true;
589 }
590 
591 /* Returns the cgraph node assigned to DECL or NULL if no cgraph node
592    is assigned.  */
593 
594 struct cgraph_node *
595 cgraph_get_node (tree decl)
596 {
597   struct cgraph_node key, *node = NULL, **slot;
598 
599   gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
600 
601   if (!cgraph_hash)
602     return NULL;
603 
604   key.decl = decl;
605 
606   slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key,
607 						 NO_INSERT);
608 
609   if (slot && *slot)
610     {
611       node = *slot;
612       if (node->same_body_alias)
613 	node = node->same_body;
614     }
615   return node;
616 }
617 
618 /* Insert already constructed node into hashtable.  */
619 
620 void
621 cgraph_insert_node_to_hashtable (struct cgraph_node *node)
622 {
623   struct cgraph_node **slot;
624 
625   slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, node, INSERT);
626 
627   gcc_assert (!*slot);
628   *slot = node;
629 }
630 
631 /* Returns a hash code for P.  */
632 
633 static hashval_t
634 hash_node_by_assembler_name (const void *p)
635 {
636   const struct cgraph_node *n = (const struct cgraph_node *) p;
637   return (hashval_t) decl_assembler_name_hash (DECL_ASSEMBLER_NAME (n->decl));
638 }
639 
640 /* Returns nonzero if P1 and P2 are equal.  */
641 
642 static int
643 eq_assembler_name (const void *p1, const void *p2)
644 {
645   const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
646   const_tree name = (const_tree)p2;
647   return (decl_assembler_name_equal (n1->decl, name));
648 }
649 
650 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
651    Return NULL if there's no such node.  */
652 
653 struct cgraph_node *
654 cgraph_node_for_asm (tree asmname)
655 {
656   struct cgraph_node *node;
657   void **slot;
658 
659   if (!assembler_name_hash)
660     {
661       assembler_name_hash =
662 	htab_create_ggc (10, hash_node_by_assembler_name, eq_assembler_name,
663 			 NULL);
664       for (node = cgraph_nodes; node; node = node->next)
665         if (!node->global.inlined_to)
666 	  {
667 	    tree name = DECL_ASSEMBLER_NAME (node->decl);
668 	    slot = htab_find_slot_with_hash (assembler_name_hash, name,
669 					     decl_assembler_name_hash (name),
670 					     INSERT);
671 	    /* We can have multiple declarations with same assembler name. For C++
672 	       it is __builtin_strlen and strlen, for instance.  Do we need to
673 	       record them all?  Original implementation marked just first one
674 	       so lets hope for the best.  */
675 	    if (!*slot)
676 	      *slot = node;
677 	    if (node->same_body)
678 	      {
679 		struct cgraph_node *alias;
680 
681 		for (alias = node->same_body; alias; alias = alias->next)
682 		  {
683 		    hashval_t hash;
684 		    name = DECL_ASSEMBLER_NAME (alias->decl);
685 		    hash = decl_assembler_name_hash (name);
686 		    slot = htab_find_slot_with_hash (assembler_name_hash, name,
687 						     hash,  INSERT);
688 		    if (!*slot)
689 		      *slot = alias;
690 		  }
691 	      }
692 	  }
693     }
694 
695   slot = htab_find_slot_with_hash (assembler_name_hash, asmname,
696 				   decl_assembler_name_hash (asmname),
697 				   NO_INSERT);
698 
699   if (slot)
700     {
701       node = (struct cgraph_node *) *slot;
702       if (node->same_body_alias)
703 	node = node->same_body;
704       return node;
705     }
706   return NULL;
707 }
708 
709 /* Returns a hash value for X (which really is a die_struct).  */
710 
711 static hashval_t
712 edge_hash (const void *x)
713 {
714   return htab_hash_pointer (((const struct cgraph_edge *) x)->call_stmt);
715 }
716 
717 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y.  */
718 
719 static int
720 edge_eq (const void *x, const void *y)
721 {
722   return ((const struct cgraph_edge *) x)->call_stmt == y;
723 }
724 
725 
726 /* Return the callgraph edge representing the GIMPLE_CALL statement
727    CALL_STMT.  */
728 
729 struct cgraph_edge *
730 cgraph_edge (struct cgraph_node *node, gimple call_stmt)
731 {
732   struct cgraph_edge *e, *e2;
733   int n = 0;
734 
735   if (node->call_site_hash)
736     return (struct cgraph_edge *)
737       htab_find_with_hash (node->call_site_hash, call_stmt,
738       	                   htab_hash_pointer (call_stmt));
739 
740   /* This loop may turn out to be performance problem.  In such case adding
741      hashtables into call nodes with very many edges is probably best
742      solution.  It is not good idea to add pointer into CALL_EXPR itself
743      because we want to make possible having multiple cgraph nodes representing
744      different clones of the same body before the body is actually cloned.  */
745   for (e = node->callees; e; e= e->next_callee)
746     {
747       if (e->call_stmt == call_stmt)
748 	break;
749       n++;
750     }
751 
752   if (n > 100)
753     {
754       node->call_site_hash = htab_create_ggc (120, edge_hash, edge_eq, NULL);
755       for (e2 = node->callees; e2; e2 = e2->next_callee)
756 	{
757           void **slot;
758 	  slot = htab_find_slot_with_hash (node->call_site_hash,
759 					   e2->call_stmt,
760 					   htab_hash_pointer (e2->call_stmt),
761 					   INSERT);
762 	  gcc_assert (!*slot);
763 	  *slot = e2;
764 	}
765     }
766 
767   return e;
768 }
769 
770 
771 /* Change field call_stmt of edge E to NEW_STMT.  */
772 
773 void
774 cgraph_set_call_stmt (struct cgraph_edge *e, gimple new_stmt)
775 {
776   if (e->caller->call_site_hash)
777     {
778       htab_remove_elt_with_hash (e->caller->call_site_hash,
779 				 e->call_stmt,
780 				 htab_hash_pointer (e->call_stmt));
781     }
782   e->call_stmt = new_stmt;
783   push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
784   e->can_throw_external = stmt_can_throw_external (new_stmt);
785   pop_cfun ();
786   if (e->caller->call_site_hash)
787     {
788       void **slot;
789       slot = htab_find_slot_with_hash (e->caller->call_site_hash,
790 				       e->call_stmt,
791 				       htab_hash_pointer
792 				       (e->call_stmt), INSERT);
793       gcc_assert (!*slot);
794       *slot = e;
795     }
796 }
797 
798 /* Like cgraph_set_call_stmt but walk the clone tree and update all
799    clones sharing the same function body.  */
800 
801 void
802 cgraph_set_call_stmt_including_clones (struct cgraph_node *orig,
803 				       gimple old_stmt, gimple new_stmt)
804 {
805   struct cgraph_node *node;
806   struct cgraph_edge *edge = cgraph_edge (orig, old_stmt);
807 
808   if (edge)
809     cgraph_set_call_stmt (edge, new_stmt);
810 
811   node = orig->clones;
812   if (node)
813     while (node != orig)
814       {
815 	struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
816 	if (edge)
817 	  cgraph_set_call_stmt (edge, new_stmt);
818 	if (node->clones)
819 	  node = node->clones;
820 	else if (node->next_sibling_clone)
821 	  node = node->next_sibling_clone;
822 	else
823 	  {
824 	    while (node != orig && !node->next_sibling_clone)
825 	      node = node->clone_of;
826 	    if (node != orig)
827 	      node = node->next_sibling_clone;
828 	  }
829       }
830 }
831 
832 /* Like cgraph_create_edge walk the clone tree and update all clones sharing
833    same function body.  If clones already have edge for OLD_STMT; only
834    update the edge same way as cgraph_set_call_stmt_including_clones does.
835 
836    TODO: COUNT and LOOP_DEPTH should be properly distributed based on relative
837    frequencies of the clones.  */
838 
839 void
840 cgraph_create_edge_including_clones (struct cgraph_node *orig,
841 				     struct cgraph_node *callee,
842 				     gimple old_stmt,
843 				     gimple stmt, gcov_type count,
844 				     int freq, int loop_depth,
845 				     cgraph_inline_failed_t reason)
846 {
847   struct cgraph_node *node;
848   struct cgraph_edge *edge;
849 
850   if (!cgraph_edge (orig, stmt))
851     {
852       edge = cgraph_create_edge (orig, callee, stmt, count, freq, loop_depth);
853       edge->inline_failed = reason;
854     }
855 
856   node = orig->clones;
857   if (node)
858     while (node != orig)
859       {
860 	struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
861 
862         /* It is possible that clones already contain the edge while
863 	   master didn't.  Either we promoted indirect call into direct
864 	   call in the clone or we are processing clones of unreachable
865 	   master where edges has been rmeoved.  */
866 	if (edge)
867 	  cgraph_set_call_stmt (edge, stmt);
868 	else if (!cgraph_edge (node, stmt))
869 	  {
870 	    edge = cgraph_create_edge (node, callee, stmt, count,
871 				       freq, loop_depth);
872 	    edge->inline_failed = reason;
873 	  }
874 
875 	if (node->clones)
876 	  node = node->clones;
877 	else if (node->next_sibling_clone)
878 	  node = node->next_sibling_clone;
879 	else
880 	  {
881 	    while (node != orig && !node->next_sibling_clone)
882 	      node = node->clone_of;
883 	    if (node != orig)
884 	      node = node->next_sibling_clone;
885 	  }
886       }
887 }
888 
889 /* Give initial reasons why inlining would fail on EDGE.  This gets either
890    nullified or usually overwritten by more precise reasons later.  */
891 
892 static void
893 initialize_inline_failed (struct cgraph_edge *e)
894 {
895   struct cgraph_node *callee = e->callee;
896 
897   if (!callee->analyzed)
898     e->inline_failed = CIF_BODY_NOT_AVAILABLE;
899   else if (callee->local.redefined_extern_inline)
900     e->inline_failed = CIF_REDEFINED_EXTERN_INLINE;
901   else if (!callee->local.inlinable)
902     e->inline_failed = CIF_FUNCTION_NOT_INLINABLE;
903   else if (e->call_stmt && gimple_call_cannot_inline_p (e->call_stmt))
904     e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
905   else
906     e->inline_failed = CIF_FUNCTION_NOT_CONSIDERED;
907 }
908 
909 /* Create edge from CALLER to CALLEE in the cgraph.  */
910 
911 struct cgraph_edge *
912 cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee,
913 		    gimple call_stmt, gcov_type count, int freq, int nest)
914 {
915   struct cgraph_edge *edge;
916 
917 
918   /* LTO does not actually have access to the call_stmt since these
919      have not been loaded yet.  */
920   if (call_stmt)
921     {
922 #ifdef ENABLE_CHECKING
923       /* This is rather pricely check possibly trigerring construction of
924 	 call stmt hashtable.  */
925       gcc_assert (!cgraph_edge (caller, call_stmt));
926 #endif
927 
928       gcc_assert (is_gimple_call (call_stmt));
929     }
930 
931   if (free_edges)
932     {
933       edge = free_edges;
934       free_edges = NEXT_FREE_EDGE (edge);
935     }
936   else
937     {
938       edge = GGC_NEW (struct cgraph_edge);
939       edge->uid = cgraph_edge_max_uid++;
940     }
941 
942   edge->aux = NULL;
943 
944   edge->caller = caller;
945   edge->callee = callee;
946   edge->call_stmt = call_stmt;
947   push_cfun (DECL_STRUCT_FUNCTION (caller->decl));
948   edge->can_throw_external
949     = call_stmt ? stmt_can_throw_external (call_stmt) : false;
950   pop_cfun ();
951   edge->prev_caller = NULL;
952   edge->next_caller = callee->callers;
953   if (callee->callers)
954     callee->callers->prev_caller = edge;
955   edge->prev_callee = NULL;
956   edge->next_callee = caller->callees;
957   if (caller->callees)
958     caller->callees->prev_callee = edge;
959   caller->callees = edge;
960   callee->callers = edge;
961   edge->count = count;
962   gcc_assert (count >= 0);
963   edge->frequency = freq;
964   gcc_assert (freq >= 0);
965   gcc_assert (freq <= CGRAPH_FREQ_MAX);
966   edge->loop_nest = nest;
967   edge->indirect_call = 0;
968   edge->call_stmt_cannot_inline_p =
969     (call_stmt ? gimple_call_cannot_inline_p (call_stmt) : false);
970   if (call_stmt && caller->call_site_hash)
971     {
972       void **slot;
973       slot = htab_find_slot_with_hash (caller->call_site_hash,
974 				       edge->call_stmt,
975 				       htab_hash_pointer
976 					 (edge->call_stmt),
977 				       INSERT);
978       gcc_assert (!*slot);
979       *slot = edge;
980     }
981 
982   initialize_inline_failed (edge);
983 
984   return edge;
985 }
986 
987 /* Remove the edge E from the list of the callers of the callee.  */
988 
989 static inline void
990 cgraph_edge_remove_callee (struct cgraph_edge *e)
991 {
992   if (e->prev_caller)
993     e->prev_caller->next_caller = e->next_caller;
994   if (e->next_caller)
995     e->next_caller->prev_caller = e->prev_caller;
996   if (!e->prev_caller)
997     e->callee->callers = e->next_caller;
998 }
999 
1000 /* Remove the edge E from the list of the callees of the caller.  */
1001 
1002 static inline void
1003 cgraph_edge_remove_caller (struct cgraph_edge *e)
1004 {
1005   if (e->prev_callee)
1006     e->prev_callee->next_callee = e->next_callee;
1007   if (e->next_callee)
1008     e->next_callee->prev_callee = e->prev_callee;
1009   if (!e->prev_callee)
1010     e->caller->callees = e->next_callee;
1011   if (e->caller->call_site_hash)
1012     htab_remove_elt_with_hash (e->caller->call_site_hash,
1013 			       e->call_stmt,
1014 	  		       htab_hash_pointer (e->call_stmt));
1015 }
1016 
1017 /* Put the edge onto the free list.  */
1018 
1019 static void
1020 cgraph_free_edge (struct cgraph_edge *e)
1021 {
1022   int uid = e->uid;
1023 
1024   /* Clear out the edge so we do not dangle pointers.  */
1025   memset (e, 0, sizeof (*e));
1026   e->uid = uid;
1027   NEXT_FREE_EDGE (e) = free_edges;
1028   free_edges = e;
1029 }
1030 
1031 /* Remove the edge E in the cgraph.  */
1032 
1033 void
1034 cgraph_remove_edge (struct cgraph_edge *e)
1035 {
1036   /* Call all edge removal hooks.  */
1037   cgraph_call_edge_removal_hooks (e);
1038 
1039   /* Remove from callers list of the callee.  */
1040   cgraph_edge_remove_callee (e);
1041 
1042   /* Remove from callees list of the callers.  */
1043   cgraph_edge_remove_caller (e);
1044 
1045   /* Put the edge onto the free list.  */
1046   cgraph_free_edge (e);
1047 }
1048 
1049 /* Redirect callee of E to N.  The function does not update underlying
1050    call expression.  */
1051 
1052 void
1053 cgraph_redirect_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
1054 {
1055   /* Remove from callers list of the current callee.  */
1056   cgraph_edge_remove_callee (e);
1057 
1058   /* Insert to callers list of the new callee.  */
1059   e->prev_caller = NULL;
1060   if (n->callers)
1061     n->callers->prev_caller = e;
1062   e->next_caller = n->callers;
1063   n->callers = e;
1064   e->callee = n;
1065 }
1066 
1067 
1068 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1069    OLD_STMT changed into NEW_STMT.  OLD_CALL is gimple_call_fndecl
1070    of OLD_STMT if it was previously call statement.  */
1071 
1072 static void
1073 cgraph_update_edges_for_call_stmt_node (struct cgraph_node *node,
1074 					gimple old_stmt, tree old_call, gimple new_stmt)
1075 {
1076   tree new_call = (is_gimple_call (new_stmt)) ? gimple_call_fndecl (new_stmt) : 0;
1077 
1078   /* We are seeing indirect calls, then there is nothing to update.  */
1079   if (!new_call && !old_call)
1080     return;
1081   /* See if we turned indirect call into direct call or folded call to one builtin
1082      into different bultin.  */
1083   if (old_call != new_call)
1084     {
1085       struct cgraph_edge *e = cgraph_edge (node, old_stmt);
1086       struct cgraph_edge *ne = NULL;
1087       gcov_type count;
1088       int frequency;
1089       int loop_nest;
1090 
1091       if (e)
1092 	{
1093 	  /* See if the call is already there.  It might be because of indirect
1094 	     inlining already found it.  */
1095 	  if (new_call && e->callee->decl == new_call)
1096 	    return;
1097 
1098 	  /* Otherwise remove edge and create new one; we can't simply redirect
1099 	     since function has changed, so inline plan and other information
1100 	     attached to edge is invalid.  */
1101 	  count = e->count;
1102 	  frequency = e->frequency;
1103 	  loop_nest = e->loop_nest;
1104 	  cgraph_remove_edge (e);
1105 	}
1106       else
1107 	{
1108 	  /* We are seeing new direct call; compute profile info based on BB.  */
1109 	  basic_block bb = gimple_bb (new_stmt);
1110 	  count = bb->count;
1111 	  frequency = compute_call_stmt_bb_frequency (current_function_decl,
1112 						      bb);
1113 	  loop_nest = bb->loop_depth;
1114 	}
1115 
1116       if (new_call)
1117 	{
1118 	  ne = cgraph_create_edge (node, cgraph_node (new_call),
1119 				   new_stmt, count, frequency,
1120 				   loop_nest);
1121 	  gcc_assert (ne->inline_failed);
1122 	}
1123     }
1124   /* We only updated the call stmt; update pointer in cgraph edge..  */
1125   else if (old_stmt != new_stmt)
1126     cgraph_set_call_stmt (cgraph_edge (node, old_stmt), new_stmt);
1127 }
1128 
1129 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1130    OLD_STMT changed into NEW_STMT.  OLD_DECL is gimple_call_fndecl
1131    of OLD_STMT before it was updated (updating can happen inplace).  */
1132 
1133 void
1134 cgraph_update_edges_for_call_stmt (gimple old_stmt, tree old_decl, gimple new_stmt)
1135 {
1136   struct cgraph_node *orig = cgraph_node (cfun->decl);
1137   struct cgraph_node *node;
1138 
1139   cgraph_update_edges_for_call_stmt_node (orig, old_stmt, old_decl, new_stmt);
1140   if (orig->clones)
1141     for (node = orig->clones; node != orig;)
1142       {
1143         cgraph_update_edges_for_call_stmt_node (node, old_stmt, old_decl, new_stmt);
1144 	if (node->clones)
1145 	  node = node->clones;
1146 	else if (node->next_sibling_clone)
1147 	  node = node->next_sibling_clone;
1148 	else
1149 	  {
1150 	    while (node != orig && !node->next_sibling_clone)
1151 	      node = node->clone_of;
1152 	    if (node != orig)
1153 	      node = node->next_sibling_clone;
1154 	  }
1155       }
1156 }
1157 
1158 
1159 /* Remove all callees from the node.  */
1160 
1161 void
1162 cgraph_node_remove_callees (struct cgraph_node *node)
1163 {
1164   struct cgraph_edge *e, *f;
1165 
1166   /* It is sufficient to remove the edges from the lists of callers of
1167      the callees.  The callee list of the node can be zapped with one
1168      assignment.  */
1169   for (e = node->callees; e; e = f)
1170     {
1171       f = e->next_callee;
1172       cgraph_call_edge_removal_hooks (e);
1173       cgraph_edge_remove_callee (e);
1174       cgraph_free_edge (e);
1175     }
1176   node->callees = NULL;
1177   if (node->call_site_hash)
1178     {
1179       htab_delete (node->call_site_hash);
1180       node->call_site_hash = NULL;
1181     }
1182 }
1183 
1184 /* Remove all callers from the node.  */
1185 
1186 static void
1187 cgraph_node_remove_callers (struct cgraph_node *node)
1188 {
1189   struct cgraph_edge *e, *f;
1190 
1191   /* It is sufficient to remove the edges from the lists of callees of
1192      the callers.  The caller list of the node can be zapped with one
1193      assignment.  */
1194   for (e = node->callers; e; e = f)
1195     {
1196       f = e->next_caller;
1197       cgraph_call_edge_removal_hooks (e);
1198       cgraph_edge_remove_caller (e);
1199       cgraph_free_edge (e);
1200     }
1201   node->callers = NULL;
1202 }
1203 
1204 /* Release memory used to represent body of function NODE.  */
1205 
1206 void
1207 cgraph_release_function_body (struct cgraph_node *node)
1208 {
1209   if (DECL_STRUCT_FUNCTION (node->decl))
1210     {
1211       tree old_decl = current_function_decl;
1212       push_cfun (DECL_STRUCT_FUNCTION (node->decl));
1213       if (cfun->gimple_df)
1214 	{
1215 	  current_function_decl = node->decl;
1216 	  delete_tree_ssa ();
1217 	  delete_tree_cfg_annotations ();
1218 	  cfun->eh = NULL;
1219 	  current_function_decl = old_decl;
1220 	}
1221       if (cfun->cfg)
1222 	{
1223 	  gcc_assert (dom_computed[0] == DOM_NONE);
1224 	  gcc_assert (dom_computed[1] == DOM_NONE);
1225 	  clear_edges ();
1226 	}
1227       if (cfun->value_histograms)
1228 	free_histograms ();
1229       gcc_assert (!current_loops);
1230       pop_cfun();
1231       gimple_set_body (node->decl, NULL);
1232       VEC_free (ipa_opt_pass, heap,
1233       		node->ipa_transforms_to_apply);
1234       /* Struct function hangs a lot of data that would leak if we didn't
1235          removed all pointers to it.   */
1236       ggc_free (DECL_STRUCT_FUNCTION (node->decl));
1237       DECL_STRUCT_FUNCTION (node->decl) = NULL;
1238     }
1239   DECL_SAVED_TREE (node->decl) = NULL;
1240   /* If the node is abstract and needed, then do not clear DECL_INITIAL
1241      of its associated function function declaration because it's
1242      needed to emit debug info later.  */
1243   if (!node->abstract_and_needed)
1244     DECL_INITIAL (node->decl) = error_mark_node;
1245 }
1246 
1247 /* Remove same body alias node.  */
1248 
1249 void
1250 cgraph_remove_same_body_alias (struct cgraph_node *node)
1251 {
1252   void **slot;
1253   int uid = node->uid;
1254 
1255   gcc_assert (node->same_body_alias);
1256   if (node->previous)
1257     node->previous->next = node->next;
1258   else
1259     node->same_body->same_body = node->next;
1260   if (node->next)
1261     node->next->previous = node->previous;
1262   node->next = NULL;
1263   node->previous = NULL;
1264   slot = htab_find_slot (cgraph_hash, node, NO_INSERT);
1265   if (*slot == node)
1266     htab_clear_slot (cgraph_hash, slot);
1267   if (assembler_name_hash)
1268     {
1269       tree name = DECL_ASSEMBLER_NAME (node->decl);
1270       slot = htab_find_slot_with_hash (assembler_name_hash, name,
1271 				       decl_assembler_name_hash (name),
1272 				       NO_INSERT);
1273       if (slot && *slot == node)
1274 	htab_clear_slot (assembler_name_hash, slot);
1275     }
1276 
1277   /* Clear out the node to NULL all pointers and add the node to the free
1278      list.  */
1279   memset (node, 0, sizeof(*node));
1280   node->uid = uid;
1281   NEXT_FREE_NODE (node) = free_nodes;
1282   free_nodes = node;
1283 }
1284 
1285 /* Remove the node from cgraph.  */
1286 
1287 void
1288 cgraph_remove_node (struct cgraph_node *node)
1289 {
1290   void **slot;
1291   bool kill_body = false;
1292   struct cgraph_node *n;
1293   int uid = node->uid;
1294 
1295   cgraph_call_node_removal_hooks (node);
1296   cgraph_node_remove_callers (node);
1297   cgraph_node_remove_callees (node);
1298   VEC_free (ipa_opt_pass, heap,
1299             node->ipa_transforms_to_apply);
1300 
1301   /* Incremental inlining access removed nodes stored in the postorder list.
1302      */
1303   node->needed = node->reachable = false;
1304   for (n = node->nested; n; n = n->next_nested)
1305     n->origin = NULL;
1306   node->nested = NULL;
1307   if (node->origin)
1308     {
1309       struct cgraph_node **node2 = &node->origin->nested;
1310 
1311       while (*node2 != node)
1312 	node2 = &(*node2)->next_nested;
1313       *node2 = node->next_nested;
1314     }
1315   if (node->previous)
1316     node->previous->next = node->next;
1317   else
1318     cgraph_nodes = node->next;
1319   if (node->next)
1320     node->next->previous = node->previous;
1321   node->next = NULL;
1322   node->previous = NULL;
1323   slot = htab_find_slot (cgraph_hash, node, NO_INSERT);
1324   if (*slot == node)
1325     {
1326       struct cgraph_node *next_inline_clone;
1327 
1328       for (next_inline_clone = node->clones;
1329       	   next_inline_clone && next_inline_clone->decl != node->decl;
1330 	   next_inline_clone = next_inline_clone->next_sibling_clone)
1331 	;
1332 
1333       /* If there is inline clone of the node being removed, we need
1334          to put it into the position of removed node and reorganize all
1335 	 other clones to be based on it.  */
1336       if (next_inline_clone)
1337 	{
1338 	  struct cgraph_node *n;
1339 	  struct cgraph_node *new_clones;
1340 
1341 	  *slot = next_inline_clone;
1342 
1343 	  /* Unlink inline clone from the list of clones of removed node.  */
1344 	  if (next_inline_clone->next_sibling_clone)
1345 	    next_inline_clone->next_sibling_clone->prev_sibling_clone
1346 	      = next_inline_clone->prev_sibling_clone;
1347 	  if (next_inline_clone->prev_sibling_clone)
1348 	    {
1349 	      gcc_assert (node->clones != next_inline_clone);
1350 	      next_inline_clone->prev_sibling_clone->next_sibling_clone
1351 	        = next_inline_clone->next_sibling_clone;
1352 	    }
1353 	  else
1354 	    {
1355 	      gcc_assert (node->clones == next_inline_clone);
1356 	      node->clones = next_inline_clone->next_sibling_clone;
1357 	    }
1358 
1359 	  new_clones = node->clones;
1360 	  node->clones = NULL;
1361 
1362 	  /* Copy clone info.  */
1363 	  next_inline_clone->clone = node->clone;
1364 
1365 	  /* Now place it into clone tree at same level at NODE.  */
1366 	  next_inline_clone->clone_of = node->clone_of;
1367 	  next_inline_clone->prev_sibling_clone = NULL;
1368 	  next_inline_clone->next_sibling_clone = NULL;
1369 	  if (node->clone_of)
1370 	    {
1371 	      if (node->clone_of->clones)
1372 	        node->clone_of->clones->prev_sibling_clone = next_inline_clone;
1373 	      next_inline_clone->next_sibling_clone = node->clone_of->clones;
1374 	      node->clone_of->clones = next_inline_clone;
1375 	    }
1376 
1377 	  /* Merge the clone list.  */
1378 	  if (new_clones)
1379 	    {
1380 	      if (!next_inline_clone->clones)
1381 		next_inline_clone->clones = new_clones;
1382 	      else
1383 		{
1384 		  n = next_inline_clone->clones;
1385 		  while (n->next_sibling_clone)
1386 		    n =  n->next_sibling_clone;
1387 		  n->next_sibling_clone = new_clones;
1388 		  new_clones->prev_sibling_clone = n;
1389 		}
1390 	    }
1391 
1392 	  /* Update clone_of pointers.  */
1393 	  n = new_clones;
1394 	  while (n)
1395 	    {
1396 	      n->clone_of = next_inline_clone;
1397 	      n = n->next_sibling_clone;
1398 	    }
1399 	}
1400       else
1401 	{
1402 	  htab_clear_slot (cgraph_hash, slot);
1403 	  kill_body = true;
1404 	}
1405 
1406     }
1407   if (node->prev_sibling_clone)
1408     node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
1409   else if (node->clone_of)
1410     node->clone_of->clones = node->next_sibling_clone;
1411   if (node->next_sibling_clone)
1412     node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
1413   if (node->clones)
1414     {
1415       struct cgraph_node *n, *next;
1416 
1417       if (node->clone_of)
1418         {
1419 	  for (n = node->clones; n->next_sibling_clone; n = n->next_sibling_clone)
1420 	    n->clone_of = node->clone_of;
1421 	  n->clone_of = node->clone_of;
1422 	  n->next_sibling_clone = node->clone_of->clones;
1423 	  if (node->clone_of->clones)
1424 	    node->clone_of->clones->prev_sibling_clone = n;
1425 	  node->clone_of->clones = node->clones;
1426 	}
1427       else
1428         {
1429 	  /* We are removing node with clones.  this makes clones inconsistent,
1430 	     but assume they will be removed subsequently and just keep clone
1431 	     tree intact.  This can happen in unreachable function removal since
1432 	     we remove unreachable functions in random order, not by bottom-up
1433 	     walk of clone trees.  */
1434 	  for (n = node->clones; n; n = next)
1435 	    {
1436 	       next = n->next_sibling_clone;
1437 	       n->next_sibling_clone = NULL;
1438 	       n->prev_sibling_clone = NULL;
1439 	       n->clone_of = NULL;
1440 	    }
1441 	}
1442     }
1443 
1444   while (node->same_body)
1445     cgraph_remove_same_body_alias (node->same_body);
1446 
1447   if (node->same_comdat_group)
1448     {
1449       struct cgraph_node *prev;
1450       for (prev = node->same_comdat_group;
1451 	   prev->same_comdat_group != node;
1452 	   prev = prev->same_comdat_group)
1453 	;
1454       if (node->same_comdat_group == prev)
1455 	prev->same_comdat_group = NULL;
1456       else
1457 	prev->same_comdat_group = node->same_comdat_group;
1458       node->same_comdat_group = NULL;
1459     }
1460 
1461   /* While all the clones are removed after being proceeded, the function
1462      itself is kept in the cgraph even after it is compiled.  Check whether
1463      we are done with this body and reclaim it proactively if this is the case.
1464      */
1465   if (!kill_body && *slot)
1466     {
1467       struct cgraph_node *n = (struct cgraph_node *) *slot;
1468       if (!n->clones && !n->clone_of && !n->global.inlined_to
1469 	  && (cgraph_global_info_ready
1470 	      && (TREE_ASM_WRITTEN (n->decl) || DECL_EXTERNAL (n->decl))))
1471 	kill_body = true;
1472     }
1473   if (assembler_name_hash)
1474     {
1475       tree name = DECL_ASSEMBLER_NAME (node->decl);
1476       slot = htab_find_slot_with_hash (assembler_name_hash, name,
1477 				       decl_assembler_name_hash (name),
1478 				       NO_INSERT);
1479       /* Inline clones are not hashed.  */
1480       if (slot && *slot == node)
1481         htab_clear_slot (assembler_name_hash, slot);
1482     }
1483 
1484   if (kill_body)
1485     cgraph_release_function_body (node);
1486   node->decl = NULL;
1487   if (node->call_site_hash)
1488     {
1489       htab_delete (node->call_site_hash);
1490       node->call_site_hash = NULL;
1491     }
1492   cgraph_n_nodes--;
1493 
1494   /* Clear out the node to NULL all pointers and add the node to the free
1495      list.  */
1496   memset (node, 0, sizeof(*node));
1497   node->uid = uid;
1498   NEXT_FREE_NODE (node) = free_nodes;
1499   free_nodes = node;
1500 }
1501 
1502 /* Remove the node from cgraph.  */
1503 
1504 void
1505 cgraph_remove_node_and_inline_clones (struct cgraph_node *node)
1506 {
1507   struct cgraph_edge *e, *next;
1508   for (e = node->callees; e; e = next)
1509     {
1510       next = e->next_callee;
1511       if (!e->inline_failed)
1512         cgraph_remove_node_and_inline_clones (e->callee);
1513     }
1514   cgraph_remove_node (node);
1515 }
1516 
1517 /* Notify finalize_compilation_unit that given node is reachable.  */
1518 
1519 void
1520 cgraph_mark_reachable_node (struct cgraph_node *node)
1521 {
1522   if (!node->reachable && node->local.finalized)
1523     {
1524       notice_global_symbol (node->decl);
1525       node->reachable = 1;
1526       gcc_assert (!cgraph_global_info_ready);
1527 
1528       node->next_needed = cgraph_nodes_queue;
1529       cgraph_nodes_queue = node;
1530     }
1531 }
1532 
1533 /* Likewise indicate that a node is needed, i.e. reachable via some
1534    external means.  */
1535 
1536 void
1537 cgraph_mark_needed_node (struct cgraph_node *node)
1538 {
1539   node->needed = 1;
1540   gcc_assert (!node->global.inlined_to);
1541   cgraph_mark_reachable_node (node);
1542 }
1543 
1544 /* Likewise indicate that a node is having address taken.  */
1545 
1546 void
1547 cgraph_mark_address_taken_node (struct cgraph_node *node)
1548 {
1549   node->address_taken = 1;
1550   cgraph_mark_needed_node (node);
1551 }
1552 
1553 /* Return local info for the compiled function.  */
1554 
1555 struct cgraph_local_info *
1556 cgraph_local_info (tree decl)
1557 {
1558   struct cgraph_node *node;
1559 
1560   gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1561   node = cgraph_node (decl);
1562   return &node->local;
1563 }
1564 
1565 /* Return local info for the compiled function.  */
1566 
1567 struct cgraph_global_info *
1568 cgraph_global_info (tree decl)
1569 {
1570   struct cgraph_node *node;
1571 
1572   gcc_assert (TREE_CODE (decl) == FUNCTION_DECL && cgraph_global_info_ready);
1573   node = cgraph_node (decl);
1574   return &node->global;
1575 }
1576 
1577 /* Return local info for the compiled function.  */
1578 
1579 struct cgraph_rtl_info *
1580 cgraph_rtl_info (tree decl)
1581 {
1582   struct cgraph_node *node;
1583 
1584   gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1585   node = cgraph_node (decl);
1586   if (decl != current_function_decl
1587       && !TREE_ASM_WRITTEN (node->decl))
1588     return NULL;
1589   return &node->rtl;
1590 }
1591 
1592 /* Return a string describing the failure REASON.  */
1593 
1594 const char*
1595 cgraph_inline_failed_string (cgraph_inline_failed_t reason)
1596 {
1597 #undef DEFCIFCODE
1598 #define DEFCIFCODE(code, string)	string,
1599 
1600   static const char *cif_string_table[CIF_N_REASONS] = {
1601 #include "cif-code.def"
1602   };
1603 
1604   /* Signedness of an enum type is implementation defined, so cast it
1605      to unsigned before testing. */
1606   gcc_assert ((unsigned) reason < CIF_N_REASONS);
1607   return cif_string_table[reason];
1608 }
1609 
1610 /* Return name of the node used in debug output.  */
1611 const char *
1612 cgraph_node_name (struct cgraph_node *node)
1613 {
1614   return lang_hooks.decl_printable_name (node->decl, 2);
1615 }
1616 
1617 /* Names used to print out the availability enum.  */
1618 const char * const cgraph_availability_names[] =
1619   {"unset", "not_available", "overwritable", "available", "local"};
1620 
1621 
1622 /* Dump call graph node NODE to file F.  */
1623 
1624 void
1625 dump_cgraph_node (FILE *f, struct cgraph_node *node)
1626 {
1627   struct cgraph_edge *edge;
1628   fprintf (f, "%s/%i(%i)", cgraph_node_name (node), node->uid,
1629 	   node->pid);
1630   dump_addr (f, " @", (void *)node);
1631   if (node->global.inlined_to)
1632     fprintf (f, " (inline copy in %s/%i)",
1633 	     cgraph_node_name (node->global.inlined_to),
1634 	     node->global.inlined_to->uid);
1635   if (node->clone_of)
1636     fprintf (f, " (clone of %s/%i)",
1637 	     cgraph_node_name (node->clone_of),
1638 	     node->clone_of->uid);
1639   if (cgraph_function_flags_ready)
1640     fprintf (f, " availability:%s",
1641 	     cgraph_availability_names [cgraph_function_body_availability (node)]);
1642   if (node->count)
1643     fprintf (f, " executed "HOST_WIDEST_INT_PRINT_DEC"x",
1644 	     (HOST_WIDEST_INT)node->count);
1645   if (node->local.inline_summary.self_time)
1646     fprintf (f, " %i time, %i benefit", node->local.inline_summary.self_time,
1647     					node->local.inline_summary.time_inlining_benefit);
1648   if (node->global.time && node->global.time
1649       != node->local.inline_summary.self_time)
1650     fprintf (f, " (%i after inlining)", node->global.time);
1651   if (node->local.inline_summary.self_size)
1652     fprintf (f, " %i size, %i benefit", node->local.inline_summary.self_size,
1653     					node->local.inline_summary.size_inlining_benefit);
1654   if (node->global.size && node->global.size
1655       != node->local.inline_summary.self_size)
1656     fprintf (f, " (%i after inlining)", node->global.size);
1657   if (node->local.inline_summary.estimated_self_stack_size)
1658     fprintf (f, " %i bytes stack usage", (int)node->local.inline_summary.estimated_self_stack_size);
1659   if (node->global.estimated_stack_size != node->local.inline_summary.estimated_self_stack_size)
1660     fprintf (f, " %i bytes after inlining", (int)node->global.estimated_stack_size);
1661   if (node->origin)
1662     fprintf (f, " nested in: %s", cgraph_node_name (node->origin));
1663   if (node->needed)
1664     fprintf (f, " needed");
1665   if (node->address_taken)
1666     fprintf (f, " address_taken");
1667   else if (node->reachable)
1668     fprintf (f, " reachable");
1669   if (gimple_has_body_p (node->decl))
1670     fprintf (f, " body");
1671   if (node->process)
1672     fprintf (f, " process");
1673   if (node->local.local)
1674     fprintf (f, " local");
1675   if (node->local.externally_visible)
1676     fprintf (f, " externally_visible");
1677   if (node->local.finalized)
1678     fprintf (f, " finalized");
1679   if (node->local.disregard_inline_limits)
1680     fprintf (f, " always_inline");
1681   else if (node->local.inlinable)
1682     fprintf (f, " inlinable");
1683   if (node->local.redefined_extern_inline)
1684     fprintf (f, " redefined_extern_inline");
1685   if (TREE_ASM_WRITTEN (node->decl))
1686     fprintf (f, " asm_written");
1687 
1688   fprintf (f, "\n  called by: ");
1689   for (edge = node->callers; edge; edge = edge->next_caller)
1690     {
1691       fprintf (f, "%s/%i ", cgraph_node_name (edge->caller),
1692 	       edge->caller->uid);
1693       if (edge->count)
1694 	fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
1695 		 (HOST_WIDEST_INT)edge->count);
1696       if (edge->frequency)
1697 	fprintf (f, "(%.2f per call) ",
1698 		 edge->frequency / (double)CGRAPH_FREQ_BASE);
1699       if (!edge->inline_failed)
1700 	fprintf(f, "(inlined) ");
1701       if (edge->indirect_call)
1702 	fprintf(f, "(indirect) ");
1703       if (edge->can_throw_external)
1704 	fprintf(f, "(can throw external) ");
1705     }
1706 
1707   fprintf (f, "\n  calls: ");
1708   for (edge = node->callees; edge; edge = edge->next_callee)
1709     {
1710       fprintf (f, "%s/%i ", cgraph_node_name (edge->callee),
1711 	       edge->callee->uid);
1712       if (!edge->inline_failed)
1713 	fprintf(f, "(inlined) ");
1714       if (edge->indirect_call)
1715 	fprintf(f, "(indirect) ");
1716       if (edge->count)
1717 	fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
1718 		 (HOST_WIDEST_INT)edge->count);
1719       if (edge->frequency)
1720 	fprintf (f, "(%.2f per call) ",
1721 		 edge->frequency / (double)CGRAPH_FREQ_BASE);
1722       if (edge->loop_nest)
1723 	fprintf (f, "(nested in %i loops) ", edge->loop_nest);
1724       if (edge->can_throw_external)
1725 	fprintf(f, "(can throw external) ");
1726     }
1727   fprintf (f, "\n");
1728 
1729   if (node->same_body)
1730     {
1731       struct cgraph_node *n;
1732       fprintf (f, "  aliases & thunks:");
1733       for (n = node->same_body; n; n = n->next)
1734         {
1735           fprintf (f, " %s/%i", cgraph_node_name (n), n->uid);
1736 	  if (n->thunk.thunk_p)
1737 	    {
1738 	      fprintf (f, " (thunk of %s fixed ofset %i virtual value %i has "
1739 		       "virtual offset %i",
1740 	      	       lang_hooks.decl_printable_name (n->thunk.alias, 2),
1741 		       (int)n->thunk.fixed_offset,
1742 		       (int)n->thunk.virtual_value,
1743 		       (int)n->thunk.virtual_offset_p);
1744 	      fprintf (f, ")");
1745 	    }
1746 	}
1747       fprintf (f, "\n");
1748     }
1749 }
1750 
1751 
1752 /* Dump call graph node NODE to stderr.  */
1753 
1754 void
1755 debug_cgraph_node (struct cgraph_node *node)
1756 {
1757   dump_cgraph_node (stderr, node);
1758 }
1759 
1760 
1761 /* Dump the callgraph to file F.  */
1762 
1763 void
1764 dump_cgraph (FILE *f)
1765 {
1766   struct cgraph_node *node;
1767 
1768   fprintf (f, "callgraph:\n\n");
1769   for (node = cgraph_nodes; node; node = node->next)
1770     dump_cgraph_node (f, node);
1771 }
1772 
1773 
1774 /* Dump the call graph to stderr.  */
1775 
1776 void
1777 debug_cgraph (void)
1778 {
1779   dump_cgraph (stderr);
1780 }
1781 
1782 
1783 /* Set the DECL_ASSEMBLER_NAME and update cgraph hashtables.  */
1784 
1785 void
1786 change_decl_assembler_name (tree decl, tree name)
1787 {
1788   gcc_assert (!assembler_name_hash);
1789   if (!DECL_ASSEMBLER_NAME_SET_P (decl))
1790     {
1791       SET_DECL_ASSEMBLER_NAME (decl, name);
1792       return;
1793     }
1794   if (name == DECL_ASSEMBLER_NAME (decl))
1795     return;
1796 
1797   if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))
1798       && DECL_RTL_SET_P (decl))
1799     warning (0, "%D renamed after being referenced in assembly", decl);
1800 
1801   SET_DECL_ASSEMBLER_NAME (decl, name);
1802 }
1803 
1804 /* Add a top-level asm statement to the list.  */
1805 
1806 struct cgraph_asm_node *
1807 cgraph_add_asm_node (tree asm_str)
1808 {
1809   struct cgraph_asm_node *node;
1810 
1811   node = GGC_CNEW (struct cgraph_asm_node);
1812   node->asm_str = asm_str;
1813   node->order = cgraph_order++;
1814   node->next = NULL;
1815   if (cgraph_asm_nodes == NULL)
1816     cgraph_asm_nodes = node;
1817   else
1818     cgraph_asm_last_node->next = node;
1819   cgraph_asm_last_node = node;
1820   return node;
1821 }
1822 
1823 /* Return true when the DECL can possibly be inlined.  */
1824 bool
1825 cgraph_function_possibly_inlined_p (tree decl)
1826 {
1827   if (!cgraph_global_info_ready)
1828     return !DECL_UNINLINABLE (decl);
1829   return DECL_POSSIBLY_INLINED (decl);
1830 }
1831 
1832 /* Create clone of E in the node N represented by CALL_EXPR the callgraph.  */
1833 struct cgraph_edge *
1834 cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n,
1835 		   gimple call_stmt, unsigned stmt_uid, gcov_type count_scale,
1836 		   int freq_scale, int loop_nest, bool update_original)
1837 {
1838   struct cgraph_edge *new_edge;
1839   gcov_type count = e->count * count_scale / REG_BR_PROB_BASE;
1840   gcov_type freq;
1841 
1842   /* We do not want to ignore loop nest after frequency drops to 0.  */
1843   if (!freq_scale)
1844     freq_scale = 1;
1845   freq = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE;
1846   if (freq > CGRAPH_FREQ_MAX)
1847     freq = CGRAPH_FREQ_MAX;
1848   new_edge = cgraph_create_edge (n, e->callee, call_stmt, count, freq,
1849 			    e->loop_nest + loop_nest);
1850 
1851   new_edge->inline_failed = e->inline_failed;
1852   new_edge->indirect_call = e->indirect_call;
1853   new_edge->lto_stmt_uid = stmt_uid;
1854   if (update_original)
1855     {
1856       e->count -= new_edge->count;
1857       if (e->count < 0)
1858 	e->count = 0;
1859     }
1860   cgraph_call_edge_duplication_hooks (e, new_edge);
1861   return new_edge;
1862 }
1863 
1864 /* Create node representing clone of N executed COUNT times.  Decrease
1865    the execution counts from original node too.
1866 
1867    When UPDATE_ORIGINAL is true, the counts are subtracted from the original
1868    function's profile to reflect the fact that part of execution is handled
1869    by node.  */
1870 struct cgraph_node *
1871 cgraph_clone_node (struct cgraph_node *n, gcov_type count, int freq,
1872 		   int loop_nest, bool update_original,
1873 		   VEC(cgraph_edge_p,heap) *redirect_callers)
1874 {
1875   struct cgraph_node *new_node = cgraph_create_node ();
1876   struct cgraph_edge *e;
1877   gcov_type count_scale;
1878   unsigned i;
1879 
1880   new_node->decl = n->decl;
1881   new_node->origin = n->origin;
1882   if (new_node->origin)
1883     {
1884       new_node->next_nested = new_node->origin->nested;
1885       new_node->origin->nested = new_node;
1886     }
1887   new_node->analyzed = n->analyzed;
1888   new_node->local = n->local;
1889   new_node->local.externally_visible = false;
1890   new_node->global = n->global;
1891   new_node->rtl = n->rtl;
1892   new_node->count = count;
1893   new_node->clone = n->clone;
1894   new_node->clone.tree_map = 0;
1895   if (n->count)
1896     {
1897       if (new_node->count > n->count)
1898         count_scale = REG_BR_PROB_BASE;
1899       else
1900         count_scale = new_node->count * REG_BR_PROB_BASE / n->count;
1901     }
1902   else
1903     count_scale = 0;
1904   if (update_original)
1905     {
1906       n->count -= count;
1907       if (n->count < 0)
1908 	n->count = 0;
1909     }
1910 
1911   for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
1912     {
1913       /* Redirect calls to the old version node to point to its new
1914 	 version.  */
1915       cgraph_redirect_edge_callee (e, new_node);
1916     }
1917 
1918 
1919   for (e = n->callees;e; e=e->next_callee)
1920     cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
1921 		       count_scale, freq, loop_nest, update_original);
1922 
1923   new_node->next_sibling_clone = n->clones;
1924   if (n->clones)
1925     n->clones->prev_sibling_clone = new_node;
1926   n->clones = new_node;
1927   new_node->clone_of = n;
1928 
1929   cgraph_call_node_duplication_hooks (n, new_node);
1930   return new_node;
1931 }
1932 
1933 /* Create a new name for omp child function.  Returns an identifier.  */
1934 
1935 static GTY(()) unsigned int clone_fn_id_num;
1936 
1937 tree
1938 clone_function_name (tree decl)
1939 {
1940   tree name = DECL_ASSEMBLER_NAME (decl);
1941   size_t len = IDENTIFIER_LENGTH (name);
1942   char *tmp_name, *prefix;
1943 
1944   prefix = XALLOCAVEC (char, len + strlen ("_clone") + 1);
1945   memcpy (prefix, IDENTIFIER_POINTER (name), len);
1946   strcpy (prefix + len, "_clone");
1947 #ifndef NO_DOT_IN_LABEL
1948   prefix[len] = '.';
1949 #elif !defined NO_DOLLAR_IN_LABEL
1950   prefix[len] = '$';
1951 #endif
1952   ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix, clone_fn_id_num++);
1953   return get_identifier (tmp_name);
1954 }
1955 
1956 /* Create callgraph node clone with new declaration.  The actual body will
1957    be copied later at compilation stage.
1958 
1959    TODO: after merging in ipa-sra use function call notes instead of args_to_skip
1960    bitmap interface.
1961    */
1962 struct cgraph_node *
1963 cgraph_create_virtual_clone (struct cgraph_node *old_node,
1964 			     VEC(cgraph_edge_p,heap) *redirect_callers,
1965 			     VEC(ipa_replace_map_p,gc) *tree_map,
1966 			     bitmap args_to_skip)
1967 {
1968   tree old_decl = old_node->decl;
1969   struct cgraph_node *new_node = NULL;
1970   tree new_decl;
1971   struct cgraph_node key, **slot;
1972 
1973   gcc_assert  (tree_versionable_function_p (old_decl));
1974 
1975   /* Make a new FUNCTION_DECL tree node */
1976   if (!args_to_skip)
1977     new_decl = copy_node (old_decl);
1978   else
1979     new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
1980   DECL_STRUCT_FUNCTION (new_decl) = NULL;
1981 
1982   /* Generate a new name for the new version. */
1983   DECL_NAME (new_decl) = clone_function_name (old_decl);
1984   SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
1985   SET_DECL_RTL (new_decl, NULL);
1986 
1987   new_node = cgraph_clone_node (old_node, old_node->count,
1988 				CGRAPH_FREQ_BASE, 0, false,
1989 				redirect_callers);
1990   new_node->decl = new_decl;
1991   /* Update the properties.
1992      Make clone visible only within this translation unit.  Make sure
1993      that is not weak also.
1994      ??? We cannot use COMDAT linkage because there is no
1995      ABI support for this.  */
1996   DECL_EXTERNAL (new_node->decl) = 0;
1997   if (DECL_ONE_ONLY (old_decl))
1998     DECL_SECTION_NAME (new_node->decl) = NULL;
1999   DECL_COMDAT_GROUP (new_node->decl) = 0;
2000   TREE_PUBLIC (new_node->decl) = 0;
2001   DECL_COMDAT (new_node->decl) = 0;
2002   DECL_WEAK (new_node->decl) = 0;
2003   new_node->clone.tree_map = tree_map;
2004   new_node->clone.args_to_skip = args_to_skip;
2005   if (!args_to_skip)
2006     new_node->clone.combined_args_to_skip = old_node->clone.combined_args_to_skip;
2007   else if (old_node->clone.combined_args_to_skip)
2008     {
2009       int newi = 0, oldi = 0;
2010       tree arg;
2011       bitmap new_args_to_skip = BITMAP_GGC_ALLOC ();
2012       struct cgraph_node *orig_node;
2013       for (orig_node = old_node; orig_node->clone_of; orig_node = orig_node->clone_of)
2014         ;
2015       for (arg = DECL_ARGUMENTS (orig_node->decl); arg; arg = TREE_CHAIN (arg), oldi++)
2016 	{
2017 	  if (bitmap_bit_p (old_node->clone.combined_args_to_skip, oldi))
2018 	    {
2019 	      bitmap_set_bit (new_args_to_skip, oldi);
2020 	      continue;
2021 	    }
2022 	  if (bitmap_bit_p (args_to_skip, newi))
2023 	    bitmap_set_bit (new_args_to_skip, oldi);
2024 	  newi++;
2025 	}
2026       new_node->clone.combined_args_to_skip = new_args_to_skip;
2027     }
2028   else
2029     new_node->clone.combined_args_to_skip = args_to_skip;
2030   new_node->local.externally_visible = 0;
2031   new_node->local.local = 1;
2032   new_node->lowered = true;
2033   new_node->reachable = true;
2034 
2035   key.decl = new_decl;
2036   slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
2037   gcc_assert (!*slot);
2038   *slot = new_node;
2039   if (assembler_name_hash)
2040     {
2041       void **aslot;
2042       tree name = DECL_ASSEMBLER_NAME (new_decl);
2043 
2044       aslot = htab_find_slot_with_hash (assembler_name_hash, name,
2045 					decl_assembler_name_hash (name),
2046 					INSERT);
2047       gcc_assert (!*aslot);
2048       *aslot = new_node;
2049     }
2050 
2051   return new_node;
2052 }
2053 
2054 /* NODE is no longer nested function; update cgraph accordingly.  */
2055 void
2056 cgraph_unnest_node (struct cgraph_node *node)
2057 {
2058   struct cgraph_node **node2 = &node->origin->nested;
2059   gcc_assert (node->origin);
2060 
2061   while (*node2 != node)
2062     node2 = &(*node2)->next_nested;
2063   *node2 = node->next_nested;
2064   node->origin = NULL;
2065 }
2066 
2067 /* Return function availability.  See cgraph.h for description of individual
2068    return values.  */
2069 enum availability
2070 cgraph_function_body_availability (struct cgraph_node *node)
2071 {
2072   enum availability avail;
2073   gcc_assert (cgraph_function_flags_ready);
2074   if (!node->analyzed)
2075     avail = AVAIL_NOT_AVAILABLE;
2076   else if (node->local.local)
2077     avail = AVAIL_LOCAL;
2078   else if (!node->local.externally_visible)
2079     avail = AVAIL_AVAILABLE;
2080   /* Inline functions are safe to be analyzed even if their sybol can
2081      be overwritten at runtime.  It is not meaningful to enfore any sane
2082      behaviour on replacing inline function by different body.  */
2083   else if (DECL_DECLARED_INLINE_P (node->decl))
2084     avail = AVAIL_AVAILABLE;
2085 
2086   /* If the function can be overwritten, return OVERWRITABLE.  Take
2087      care at least of two notable extensions - the COMDAT functions
2088      used to share template instantiations in C++ (this is symmetric
2089      to code cp_cannot_inline_tree_fn and probably shall be shared and
2090      the inlinability hooks completely eliminated).
2091 
2092      ??? Does the C++ one definition rule allow us to always return
2093      AVAIL_AVAILABLE here?  That would be good reason to preserve this
2094      bit.  */
2095 
2096   else if (decl_replaceable_p (node->decl) && !DECL_EXTERNAL (node->decl))
2097     avail = AVAIL_OVERWRITABLE;
2098   else avail = AVAIL_AVAILABLE;
2099 
2100   return avail;
2101 }
2102 
2103 /* Add the function FNDECL to the call graph.
2104    Unlike cgraph_finalize_function, this function is intended to be used
2105    by middle end and allows insertion of new function at arbitrary point
2106    of compilation.  The function can be either in high, low or SSA form
2107    GIMPLE.
2108 
2109    The function is assumed to be reachable and have address taken (so no
2110    API breaking optimizations are performed on it).
2111 
2112    Main work done by this function is to enqueue the function for later
2113    processing to avoid need the passes to be re-entrant.  */
2114 
2115 void
2116 cgraph_add_new_function (tree fndecl, bool lowered)
2117 {
2118   struct cgraph_node *node;
2119   switch (cgraph_state)
2120     {
2121       case CGRAPH_STATE_CONSTRUCTION:
2122 	/* Just enqueue function to be processed at nearest occurrence.  */
2123 	node = cgraph_node (fndecl);
2124 	node->next_needed = cgraph_new_nodes;
2125 	if (lowered)
2126 	  node->lowered = true;
2127 	cgraph_new_nodes = node;
2128         break;
2129 
2130       case CGRAPH_STATE_IPA:
2131       case CGRAPH_STATE_IPA_SSA:
2132       case CGRAPH_STATE_EXPANSION:
2133 	/* Bring the function into finalized state and enqueue for later
2134 	   analyzing and compilation.  */
2135 	node = cgraph_node (fndecl);
2136 	node->local.local = false;
2137 	node->local.finalized = true;
2138 	node->reachable = node->needed = true;
2139 	if (!lowered && cgraph_state == CGRAPH_STATE_EXPANSION)
2140 	  {
2141 	    push_cfun (DECL_STRUCT_FUNCTION (fndecl));
2142 	    current_function_decl = fndecl;
2143 	    gimple_register_cfg_hooks ();
2144 	    tree_lowering_passes (fndecl);
2145 	    bitmap_obstack_initialize (NULL);
2146 	    if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
2147 	      execute_pass_list (pass_early_local_passes.pass.sub);
2148 	    bitmap_obstack_release (NULL);
2149 	    pop_cfun ();
2150 	    current_function_decl = NULL;
2151 
2152 	    lowered = true;
2153 	  }
2154 	if (lowered)
2155 	  node->lowered = true;
2156 	node->next_needed = cgraph_new_nodes;
2157 	cgraph_new_nodes = node;
2158         break;
2159 
2160       case CGRAPH_STATE_FINISHED:
2161 	/* At the very end of compilation we have to do all the work up
2162 	   to expansion.  */
2163 	push_cfun (DECL_STRUCT_FUNCTION (fndecl));
2164 	current_function_decl = fndecl;
2165 	gimple_register_cfg_hooks ();
2166 	if (!lowered)
2167           tree_lowering_passes (fndecl);
2168 	bitmap_obstack_initialize (NULL);
2169 	if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
2170 	  execute_pass_list (pass_early_local_passes.pass.sub);
2171 	bitmap_obstack_release (NULL);
2172 	tree_rest_of_compilation (fndecl);
2173 	pop_cfun ();
2174 	current_function_decl = NULL;
2175 	break;
2176     }
2177 
2178   /* Set a personality if required and we already passed EH lowering.  */
2179   if (lowered
2180       && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
2181 	  == eh_personality_lang))
2182     DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
2183 }
2184 
2185 /* Return true if NODE can be made local for API change.
2186    Extern inline functions and C++ COMDAT functions can be made local
2187    at the expense of possible code size growth if function is used in multiple
2188    compilation units.  */
2189 bool
2190 cgraph_node_can_be_local_p (struct cgraph_node *node)
2191 {
2192   return (!node->needed
2193 	  && ((DECL_COMDAT (node->decl) && !node->same_comdat_group)
2194 	      || !node->local.externally_visible));
2195 }
2196 
2197 /* Make DECL local.  FIXME: We shouldn't need to mess with rtl this early,
2198    but other code such as notice_global_symbol generates rtl.  */
2199 void
2200 cgraph_make_decl_local (tree decl)
2201 {
2202   rtx rtl, symbol;
2203 
2204   if (TREE_CODE (decl) == VAR_DECL)
2205     DECL_COMMON (decl) = 0;
2206   else if (TREE_CODE (decl) == FUNCTION_DECL)
2207     {
2208       DECL_COMDAT (decl) = 0;
2209       DECL_COMDAT_GROUP (decl) = 0;
2210       DECL_WEAK (decl) = 0;
2211       DECL_EXTERNAL (decl) = 0;
2212     }
2213   else
2214     gcc_unreachable ();
2215   TREE_PUBLIC (decl) = 0;
2216   if (!DECL_RTL_SET_P (decl))
2217     return;
2218 
2219   /* Update rtl flags.  */
2220   make_decl_rtl (decl);
2221 
2222   rtl = DECL_RTL (decl);
2223   if (!MEM_P (rtl))
2224     return;
2225 
2226   symbol = XEXP (rtl, 0);
2227   if (GET_CODE (symbol) != SYMBOL_REF)
2228     return;
2229 
2230   SYMBOL_REF_WEAK (symbol) = DECL_WEAK (decl);
2231 }
2232 
2233 /* Bring NODE local.  */
2234 void
2235 cgraph_make_node_local (struct cgraph_node *node)
2236 {
2237   gcc_assert (cgraph_node_can_be_local_p (node));
2238   if (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
2239     {
2240       struct cgraph_node *alias;
2241       cgraph_make_decl_local (node->decl);
2242 
2243       for (alias = node->same_body; alias; alias = alias->next)
2244 	cgraph_make_decl_local (alias->decl);
2245 
2246       node->local.externally_visible = false;
2247       node->local.local = true;
2248       gcc_assert (cgraph_function_body_availability (node) == AVAIL_LOCAL);
2249     }
2250 }
2251 
2252 /* Set TREE_NOTHROW on NODE's decl and on same_body aliases of NODE
2253    if any to NOTHROW.  */
2254 
2255 void
2256 cgraph_set_nothrow_flag (struct cgraph_node *node, bool nothrow)
2257 {
2258   struct cgraph_node *alias;
2259   TREE_NOTHROW (node->decl) = nothrow;
2260   for (alias = node->same_body; alias; alias = alias->next)
2261     TREE_NOTHROW (alias->decl) = nothrow;
2262 }
2263 
2264 /* Set TREE_READONLY on NODE's decl and on same_body aliases of NODE
2265    if any to READONLY.  */
2266 
2267 void
2268 cgraph_set_readonly_flag (struct cgraph_node *node, bool readonly)
2269 {
2270   struct cgraph_node *alias;
2271   TREE_READONLY (node->decl) = readonly;
2272   for (alias = node->same_body; alias; alias = alias->next)
2273     TREE_READONLY (alias->decl) = readonly;
2274 }
2275 
2276 /* Set DECL_PURE_P on NODE's decl and on same_body aliases of NODE
2277    if any to PURE.  */
2278 
2279 void
2280 cgraph_set_pure_flag (struct cgraph_node *node, bool pure)
2281 {
2282   struct cgraph_node *alias;
2283   DECL_PURE_P (node->decl) = pure;
2284   for (alias = node->same_body; alias; alias = alias->next)
2285     DECL_PURE_P (alias->decl) = pure;
2286 }
2287 
2288 /* Set DECL_LOOPING_CONST_OR_PURE_P on NODE's decl and on
2289    same_body aliases of NODE if any to LOOPING_CONST_OR_PURE.  */
2290 
2291 void
2292 cgraph_set_looping_const_or_pure_flag (struct cgraph_node *node,
2293 				       bool looping_const_or_pure)
2294 {
2295   struct cgraph_node *alias;
2296   DECL_LOOPING_CONST_OR_PURE_P (node->decl) = looping_const_or_pure;
2297   for (alias = node->same_body; alias; alias = alias->next)
2298     DECL_LOOPING_CONST_OR_PURE_P (alias->decl) = looping_const_or_pure;
2299 }
2300 
2301 #include "gt-cgraph.h"
2302