1 /* Callgraph handling code.
2 Copyright (C) 2003-2020 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* This file contains basic routines manipulating call graph
22
23 The call-graph is a data structure designed for inter-procedural
24 optimization. It represents a multi-graph where nodes are functions
25 (symbols within symbol table) and edges are call sites. */
26
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "backend.h"
31 #include "target.h"
32 #include "rtl.h"
33 #include "tree.h"
34 #include "gimple.h"
35 #include "predict.h"
36 #include "alloc-pool.h"
37 #include "gimple-ssa.h"
38 #include "cgraph.h"
39 #include "lto-streamer.h"
40 #include "fold-const.h"
41 #include "varasm.h"
42 #include "calls.h"
43 #include "print-tree.h"
44 #include "langhooks.h"
45 #include "intl.h"
46 #include "tree-eh.h"
47 #include "gimple-iterator.h"
48 #include "tree-cfg.h"
49 #include "tree-ssa.h"
50 #include "value-prof.h"
51 #include "ipa-utils.h"
52 #include "symbol-summary.h"
53 #include "tree-vrp.h"
54 #include "ipa-prop.h"
55 #include "ipa-fnsummary.h"
56 #include "cfgloop.h"
57 #include "gimple-pretty-print.h"
58 #include "tree-dfa.h"
59 #include "profile.h"
60 #include "context.h"
61 #include "gimplify.h"
62 #include "stringpool.h"
63 #include "attribs.h"
64 #include "selftest.h"
65 #include "tree-into-ssa.h"
66 #include "ipa-inline.h"
67
68 /* FIXME: Only for PROP_loops, but cgraph shouldn't have to know about this. */
69 #include "tree-pass.h"
70
71 /* Queue of cgraph nodes scheduled to be lowered. */
72 symtab_node *x_cgraph_nodes_queue;
73 #define cgraph_nodes_queue ((cgraph_node *)x_cgraph_nodes_queue)
74
75 /* Symbol table global context. */
76 symbol_table *symtab;
77
78 /* List of hooks triggered on cgraph_edge events. */
79 struct cgraph_edge_hook_list {
80 cgraph_edge_hook hook;
81 void *data;
82 struct cgraph_edge_hook_list *next;
83 };
84
85 /* List of hooks triggered on cgraph_node events. */
86 struct cgraph_node_hook_list {
87 cgraph_node_hook hook;
88 void *data;
89 struct cgraph_node_hook_list *next;
90 };
91
92 /* List of hooks triggered on events involving two cgraph_edges. */
93 struct cgraph_2edge_hook_list {
94 cgraph_2edge_hook hook;
95 void *data;
96 struct cgraph_2edge_hook_list *next;
97 };
98
99 /* List of hooks triggered on events involving two cgraph_nodes. */
100 struct cgraph_2node_hook_list {
101 cgraph_2node_hook hook;
102 void *data;
103 struct cgraph_2node_hook_list *next;
104 };
105
106 /* Hash descriptor for cgraph_function_version_info. */
107
108 struct function_version_hasher : ggc_ptr_hash<cgraph_function_version_info>
109 {
110 static hashval_t hash (cgraph_function_version_info *);
111 static bool equal (cgraph_function_version_info *,
112 cgraph_function_version_info *);
113 };
114
115 /* Map a cgraph_node to cgraph_function_version_info using this htab.
116 The cgraph_function_version_info has a THIS_NODE field that is the
117 corresponding cgraph_node.. */
118
119 static GTY(()) hash_table<function_version_hasher> *cgraph_fnver_htab = NULL;
120
121 /* Hash function for cgraph_fnver_htab. */
122 hashval_t
hash(cgraph_function_version_info * ptr)123 function_version_hasher::hash (cgraph_function_version_info *ptr)
124 {
125 int uid = ptr->this_node->get_uid ();
126 return (hashval_t)(uid);
127 }
128
129 /* eq function for cgraph_fnver_htab. */
130 bool
equal(cgraph_function_version_info * n1,cgraph_function_version_info * n2)131 function_version_hasher::equal (cgraph_function_version_info *n1,
132 cgraph_function_version_info *n2)
133 {
134 return n1->this_node->get_uid () == n2->this_node->get_uid ();
135 }
136
137 /* Mark as GC root all allocated nodes. */
138 static GTY(()) struct cgraph_function_version_info *
139 version_info_node = NULL;
140
141 /* Return true if NODE's address can be compared. */
142
143 bool
address_can_be_compared_p()144 symtab_node::address_can_be_compared_p ()
145 {
146 /* Address of virtual tables and functions is never compared. */
147 if (DECL_VIRTUAL_P (decl))
148 return false;
149 /* Address of C++ cdtors is never compared. */
150 if (is_a <cgraph_node *> (this)
151 && (DECL_CXX_CONSTRUCTOR_P (decl)
152 || DECL_CXX_DESTRUCTOR_P (decl)))
153 return false;
154 /* Constant pool symbols addresses are never compared.
155 flag_merge_constants permits us to assume the same on readonly vars. */
156 if (is_a <varpool_node *> (this)
157 && (DECL_IN_CONSTANT_POOL (decl)
158 || (flag_merge_constants >= 2
159 && TREE_READONLY (decl) && !TREE_THIS_VOLATILE (decl))))
160 return false;
161 return true;
162 }
163
164 /* Get the cgraph_function_version_info node corresponding to node. */
165 cgraph_function_version_info *
function_version(void)166 cgraph_node::function_version (void)
167 {
168 cgraph_function_version_info key;
169 key.this_node = this;
170
171 if (cgraph_fnver_htab == NULL)
172 return NULL;
173
174 return cgraph_fnver_htab->find (&key);
175 }
176
177 /* Insert a new cgraph_function_version_info node into cgraph_fnver_htab
178 corresponding to cgraph_node NODE. */
179 cgraph_function_version_info *
insert_new_function_version(void)180 cgraph_node::insert_new_function_version (void)
181 {
182 version_info_node = NULL;
183 version_info_node = ggc_cleared_alloc<cgraph_function_version_info> ();
184 version_info_node->this_node = this;
185
186 if (cgraph_fnver_htab == NULL)
187 cgraph_fnver_htab = hash_table<function_version_hasher>::create_ggc (2);
188
189 *cgraph_fnver_htab->find_slot (version_info_node, INSERT)
190 = version_info_node;
191 return version_info_node;
192 }
193
194 /* Remove the cgraph_function_version_info node given by DECL_V. */
195 static void
delete_function_version(cgraph_function_version_info * decl_v)196 delete_function_version (cgraph_function_version_info *decl_v)
197 {
198 if (decl_v == NULL)
199 return;
200
201 if (version_info_node == decl_v)
202 version_info_node = NULL;
203
204 if (decl_v->prev != NULL)
205 decl_v->prev->next = decl_v->next;
206
207 if (decl_v->next != NULL)
208 decl_v->next->prev = decl_v->prev;
209
210 if (cgraph_fnver_htab != NULL)
211 cgraph_fnver_htab->remove_elt (decl_v);
212 }
213
214 /* Remove the cgraph_function_version_info and cgraph_node for DECL. This
215 DECL is a duplicate declaration. */
216 void
delete_function_version_by_decl(tree decl)217 cgraph_node::delete_function_version_by_decl (tree decl)
218 {
219 cgraph_node *decl_node = cgraph_node::get (decl);
220
221 if (decl_node == NULL)
222 return;
223
224 delete_function_version (decl_node->function_version ());
225
226 decl_node->remove ();
227 }
228
229 /* Record that DECL1 and DECL2 are semantically identical function
230 versions. */
231 void
record_function_versions(tree decl1,tree decl2)232 cgraph_node::record_function_versions (tree decl1, tree decl2)
233 {
234 cgraph_node *decl1_node = cgraph_node::get_create (decl1);
235 cgraph_node *decl2_node = cgraph_node::get_create (decl2);
236 cgraph_function_version_info *decl1_v = NULL;
237 cgraph_function_version_info *decl2_v = NULL;
238 cgraph_function_version_info *before;
239 cgraph_function_version_info *after;
240
241 gcc_assert (decl1_node != NULL && decl2_node != NULL);
242 decl1_v = decl1_node->function_version ();
243 decl2_v = decl2_node->function_version ();
244
245 if (decl1_v != NULL && decl2_v != NULL)
246 return;
247
248 if (decl1_v == NULL)
249 decl1_v = decl1_node->insert_new_function_version ();
250
251 if (decl2_v == NULL)
252 decl2_v = decl2_node->insert_new_function_version ();
253
254 /* Chain decl2_v and decl1_v. All semantically identical versions
255 will be chained together. */
256
257 before = decl1_v;
258 after = decl2_v;
259
260 while (before->next != NULL)
261 before = before->next;
262
263 while (after->prev != NULL)
264 after= after->prev;
265
266 before->next = after;
267 after->prev = before;
268 }
269
270 /* Initialize callgraph dump file. */
271
272 void
initialize(void)273 symbol_table::initialize (void)
274 {
275 if (!dump_file)
276 dump_file = dump_begin (TDI_cgraph, NULL);
277
278 if (!ipa_clones_dump_file)
279 ipa_clones_dump_file = dump_begin (TDI_clones, NULL);
280 }
281
282 /* Allocate new callgraph node and insert it into basic data structures. */
283
284 cgraph_node *
create_empty(void)285 symbol_table::create_empty (void)
286 {
287 cgraph_count++;
288 return new (ggc_alloc<cgraph_node> ()) cgraph_node (cgraph_max_uid++);
289 }
290
291 /* Register HOOK to be called with DATA on each removed edge. */
292 cgraph_edge_hook_list *
add_edge_removal_hook(cgraph_edge_hook hook,void * data)293 symbol_table::add_edge_removal_hook (cgraph_edge_hook hook, void *data)
294 {
295 cgraph_edge_hook_list *entry;
296 cgraph_edge_hook_list **ptr = &m_first_edge_removal_hook;
297
298 entry = (cgraph_edge_hook_list *) xmalloc (sizeof (*entry));
299 entry->hook = hook;
300 entry->data = data;
301 entry->next = NULL;
302 while (*ptr)
303 ptr = &(*ptr)->next;
304 *ptr = entry;
305 return entry;
306 }
307
308 /* Remove ENTRY from the list of hooks called on removing edges. */
309 void
remove_edge_removal_hook(cgraph_edge_hook_list * entry)310 symbol_table::remove_edge_removal_hook (cgraph_edge_hook_list *entry)
311 {
312 cgraph_edge_hook_list **ptr = &m_first_edge_removal_hook;
313
314 while (*ptr != entry)
315 ptr = &(*ptr)->next;
316 *ptr = entry->next;
317 free (entry);
318 }
319
320 /* Call all edge removal hooks. */
321 void
call_edge_removal_hooks(cgraph_edge * e)322 symbol_table::call_edge_removal_hooks (cgraph_edge *e)
323 {
324 cgraph_edge_hook_list *entry = m_first_edge_removal_hook;
325 while (entry)
326 {
327 entry->hook (e, entry->data);
328 entry = entry->next;
329 }
330 }
331
332 /* Register HOOK to be called with DATA on each removed node. */
333 cgraph_node_hook_list *
add_cgraph_removal_hook(cgraph_node_hook hook,void * data)334 symbol_table::add_cgraph_removal_hook (cgraph_node_hook hook, void *data)
335 {
336 cgraph_node_hook_list *entry;
337 cgraph_node_hook_list **ptr = &m_first_cgraph_removal_hook;
338
339 entry = (cgraph_node_hook_list *) xmalloc (sizeof (*entry));
340 entry->hook = hook;
341 entry->data = data;
342 entry->next = NULL;
343 while (*ptr)
344 ptr = &(*ptr)->next;
345 *ptr = entry;
346 return entry;
347 }
348
349 /* Remove ENTRY from the list of hooks called on removing nodes. */
350 void
remove_cgraph_removal_hook(cgraph_node_hook_list * entry)351 symbol_table::remove_cgraph_removal_hook (cgraph_node_hook_list *entry)
352 {
353 cgraph_node_hook_list **ptr = &m_first_cgraph_removal_hook;
354
355 while (*ptr != entry)
356 ptr = &(*ptr)->next;
357 *ptr = entry->next;
358 free (entry);
359 }
360
361 /* Call all node removal hooks. */
362 void
call_cgraph_removal_hooks(cgraph_node * node)363 symbol_table::call_cgraph_removal_hooks (cgraph_node *node)
364 {
365 cgraph_node_hook_list *entry = m_first_cgraph_removal_hook;
366 while (entry)
367 {
368 entry->hook (node, entry->data);
369 entry = entry->next;
370 }
371 }
372
373 /* Call all node removal hooks. */
374 void
call_cgraph_insertion_hooks(cgraph_node * node)375 symbol_table::call_cgraph_insertion_hooks (cgraph_node *node)
376 {
377 cgraph_node_hook_list *entry = m_first_cgraph_insertion_hook;
378 while (entry)
379 {
380 entry->hook (node, entry->data);
381 entry = entry->next;
382 }
383 }
384
385
386 /* Register HOOK to be called with DATA on each inserted node. */
387 cgraph_node_hook_list *
add_cgraph_insertion_hook(cgraph_node_hook hook,void * data)388 symbol_table::add_cgraph_insertion_hook (cgraph_node_hook hook, void *data)
389 {
390 cgraph_node_hook_list *entry;
391 cgraph_node_hook_list **ptr = &m_first_cgraph_insertion_hook;
392
393 entry = (cgraph_node_hook_list *) xmalloc (sizeof (*entry));
394 entry->hook = hook;
395 entry->data = data;
396 entry->next = NULL;
397 while (*ptr)
398 ptr = &(*ptr)->next;
399 *ptr = entry;
400 return entry;
401 }
402
403 /* Remove ENTRY from the list of hooks called on inserted nodes. */
404 void
remove_cgraph_insertion_hook(cgraph_node_hook_list * entry)405 symbol_table::remove_cgraph_insertion_hook (cgraph_node_hook_list *entry)
406 {
407 cgraph_node_hook_list **ptr = &m_first_cgraph_insertion_hook;
408
409 while (*ptr != entry)
410 ptr = &(*ptr)->next;
411 *ptr = entry->next;
412 free (entry);
413 }
414
415 /* Register HOOK to be called with DATA on each duplicated edge. */
416 cgraph_2edge_hook_list *
add_edge_duplication_hook(cgraph_2edge_hook hook,void * data)417 symbol_table::add_edge_duplication_hook (cgraph_2edge_hook hook, void *data)
418 {
419 cgraph_2edge_hook_list *entry;
420 cgraph_2edge_hook_list **ptr = &m_first_edge_duplicated_hook;
421
422 entry = (cgraph_2edge_hook_list *) xmalloc (sizeof (*entry));
423 entry->hook = hook;
424 entry->data = data;
425 entry->next = NULL;
426 while (*ptr)
427 ptr = &(*ptr)->next;
428 *ptr = entry;
429 return entry;
430 }
431
432 /* Remove ENTRY from the list of hooks called on duplicating edges. */
433 void
remove_edge_duplication_hook(cgraph_2edge_hook_list * entry)434 symbol_table::remove_edge_duplication_hook (cgraph_2edge_hook_list *entry)
435 {
436 cgraph_2edge_hook_list **ptr = &m_first_edge_duplicated_hook;
437
438 while (*ptr != entry)
439 ptr = &(*ptr)->next;
440 *ptr = entry->next;
441 free (entry);
442 }
443
444 /* Call all edge duplication hooks. */
445 void
call_edge_duplication_hooks(cgraph_edge * cs1,cgraph_edge * cs2)446 symbol_table::call_edge_duplication_hooks (cgraph_edge *cs1, cgraph_edge *cs2)
447 {
448 cgraph_2edge_hook_list *entry = m_first_edge_duplicated_hook;
449 while (entry)
450 {
451 entry->hook (cs1, cs2, entry->data);
452 entry = entry->next;
453 }
454 }
455
456 /* Register HOOK to be called with DATA on each duplicated node. */
457 cgraph_2node_hook_list *
add_cgraph_duplication_hook(cgraph_2node_hook hook,void * data)458 symbol_table::add_cgraph_duplication_hook (cgraph_2node_hook hook, void *data)
459 {
460 cgraph_2node_hook_list *entry;
461 cgraph_2node_hook_list **ptr = &m_first_cgraph_duplicated_hook;
462
463 entry = (cgraph_2node_hook_list *) xmalloc (sizeof (*entry));
464 entry->hook = hook;
465 entry->data = data;
466 entry->next = NULL;
467 while (*ptr)
468 ptr = &(*ptr)->next;
469 *ptr = entry;
470 return entry;
471 }
472
473 /* Remove ENTRY from the list of hooks called on duplicating nodes. */
474 void
remove_cgraph_duplication_hook(cgraph_2node_hook_list * entry)475 symbol_table::remove_cgraph_duplication_hook (cgraph_2node_hook_list *entry)
476 {
477 cgraph_2node_hook_list **ptr = &m_first_cgraph_duplicated_hook;
478
479 while (*ptr != entry)
480 ptr = &(*ptr)->next;
481 *ptr = entry->next;
482 free (entry);
483 }
484
485 /* Call all node duplication hooks. */
486 void
call_cgraph_duplication_hooks(cgraph_node * node,cgraph_node * node2)487 symbol_table::call_cgraph_duplication_hooks (cgraph_node *node,
488 cgraph_node *node2)
489 {
490 cgraph_2node_hook_list *entry = m_first_cgraph_duplicated_hook;
491 while (entry)
492 {
493 entry->hook (node, node2, entry->data);
494 entry = entry->next;
495 }
496 }
497
498 /* Return cgraph node assigned to DECL. Create new one when needed. */
499
500 cgraph_node *
create(tree decl)501 cgraph_node::create (tree decl)
502 {
503 cgraph_node *node = symtab->create_empty ();
504 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
505
506 node->decl = decl;
507
508 if ((flag_openacc || flag_openmp)
509 && lookup_attribute ("omp declare target", DECL_ATTRIBUTES (decl)))
510 {
511 node->offloadable = 1;
512 if (ENABLE_OFFLOADING)
513 g->have_offload = true;
514 }
515
516 if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (decl)))
517 node->ifunc_resolver = true;
518
519 node->register_symbol ();
520
521 if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
522 {
523 node->origin = cgraph_node::get_create (DECL_CONTEXT (decl));
524 node->next_nested = node->origin->nested;
525 node->origin->nested = node;
526 }
527 return node;
528 }
529
530 /* Try to find a call graph node for declaration DECL and if it does not exist
531 or if it corresponds to an inline clone, create a new one. */
532
533 cgraph_node *
get_create(tree decl)534 cgraph_node::get_create (tree decl)
535 {
536 cgraph_node *first_clone = cgraph_node::get (decl);
537
538 if (first_clone && !first_clone->inlined_to)
539 return first_clone;
540
541 cgraph_node *node = cgraph_node::create (decl);
542 if (first_clone)
543 {
544 first_clone->clone_of = node;
545 node->clones = first_clone;
546 node->order = first_clone->order;
547 symtab->symtab_prevail_in_asm_name_hash (node);
548 node->decl->decl_with_vis.symtab_node = node;
549 if (dump_file)
550 fprintf (dump_file, "Introduced new external node "
551 "(%s) and turned into root of the clone tree.\n",
552 node->dump_name ());
553 }
554 else if (dump_file)
555 fprintf (dump_file, "Introduced new external node "
556 "(%s).\n", node->dump_name ());
557 return node;
558 }
559
560 /* Mark ALIAS as an alias to DECL. DECL_NODE is cgraph node representing
561 the function body is associated with
562 (not necessarily cgraph_node (DECL)). */
563
564 cgraph_node *
create_alias(tree alias,tree target)565 cgraph_node::create_alias (tree alias, tree target)
566 {
567 cgraph_node *alias_node;
568
569 gcc_assert (TREE_CODE (target) == FUNCTION_DECL
570 || TREE_CODE (target) == IDENTIFIER_NODE);
571 gcc_assert (TREE_CODE (alias) == FUNCTION_DECL);
572 alias_node = cgraph_node::get_create (alias);
573 gcc_assert (!alias_node->definition);
574 alias_node->alias_target = target;
575 alias_node->definition = true;
576 alias_node->alias = true;
577 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (alias)) != NULL)
578 alias_node->transparent_alias = alias_node->weakref = true;
579 if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (alias)))
580 alias_node->ifunc_resolver = true;
581 return alias_node;
582 }
583
584 /* Attempt to mark ALIAS as an alias to DECL. Return alias node if successful
585 and NULL otherwise.
586 Same body aliases are output whenever the body of DECL is output,
587 and cgraph_node::get (ALIAS) transparently returns
588 cgraph_node::get (DECL). */
589
590 cgraph_node *
create_same_body_alias(tree alias,tree decl)591 cgraph_node::create_same_body_alias (tree alias, tree decl)
592 {
593 cgraph_node *n;
594
595 /* If aliases aren't supported by the assembler, fail. */
596 if (!TARGET_SUPPORTS_ALIASES)
597 return NULL;
598
599 /* Langhooks can create same body aliases of symbols not defined.
600 Those are useless. Drop them on the floor. */
601 if (symtab->global_info_ready)
602 return NULL;
603
604 n = cgraph_node::create_alias (alias, decl);
605 n->cpp_implicit_alias = true;
606 if (symtab->cpp_implicit_aliases_done)
607 n->resolve_alias (cgraph_node::get (decl));
608 return n;
609 }
610
611 /* Add thunk alias into callgraph. The alias declaration is ALIAS and it
612 aliases DECL with an adjustments made into the first parameter.
613 See comments in struct cgraph_thunk_info for detail on the parameters. */
614
615 cgraph_node *
create_thunk(tree alias,tree,bool this_adjusting,HOST_WIDE_INT fixed_offset,HOST_WIDE_INT virtual_value,HOST_WIDE_INT indirect_offset,tree virtual_offset,tree real_alias)616 cgraph_node::create_thunk (tree alias, tree, bool this_adjusting,
617 HOST_WIDE_INT fixed_offset,
618 HOST_WIDE_INT virtual_value,
619 HOST_WIDE_INT indirect_offset,
620 tree virtual_offset,
621 tree real_alias)
622 {
623 cgraph_node *node;
624
625 node = cgraph_node::get (alias);
626 if (node)
627 node->reset ();
628 else
629 node = cgraph_node::create (alias);
630
631 /* Make sure that if VIRTUAL_OFFSET is in sync with VIRTUAL_VALUE. */
632 gcc_checking_assert (virtual_offset
633 ? virtual_value == wi::to_wide (virtual_offset)
634 : virtual_value == 0);
635
636 node->thunk.fixed_offset = fixed_offset;
637 node->thunk.virtual_value = virtual_value;
638 node->thunk.indirect_offset = indirect_offset;
639 node->thunk.alias = real_alias;
640 node->thunk.this_adjusting = this_adjusting;
641 node->thunk.virtual_offset_p = virtual_offset != NULL;
642 node->thunk.thunk_p = true;
643 node->definition = true;
644
645 return node;
646 }
647
648 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
649 Return NULL if there's no such node. */
650
651 cgraph_node *
get_for_asmname(tree asmname)652 cgraph_node::get_for_asmname (tree asmname)
653 {
654 /* We do not want to look at inline clones. */
655 for (symtab_node *node = symtab_node::get_for_asmname (asmname);
656 node;
657 node = node->next_sharing_asm_name)
658 {
659 cgraph_node *cn = dyn_cast <cgraph_node *> (node);
660 if (cn && !cn->inlined_to)
661 return cn;
662 }
663 return NULL;
664 }
665
666 /* Returns a hash value for X (which really is a cgraph_edge). */
667
668 hashval_t
hash(cgraph_edge * e)669 cgraph_edge_hasher::hash (cgraph_edge *e)
670 {
671 /* This is a really poor hash function, but it is what htab_hash_pointer
672 uses. */
673 return (hashval_t) ((intptr_t)e->call_stmt >> 3);
674 }
675
676 /* Returns a hash value for X (which really is a cgraph_edge). */
677
678 hashval_t
hash(gimple * call_stmt)679 cgraph_edge_hasher::hash (gimple *call_stmt)
680 {
681 /* This is a really poor hash function, but it is what htab_hash_pointer
682 uses. */
683 return (hashval_t) ((intptr_t)call_stmt >> 3);
684 }
685
686 /* Return nonzero if the call_stmt of cgraph_edge X is stmt *Y. */
687
688 inline bool
equal(cgraph_edge * x,gimple * y)689 cgraph_edge_hasher::equal (cgraph_edge *x, gimple *y)
690 {
691 return x->call_stmt == y;
692 }
693
694 /* Add call graph edge E to call site hash of its caller. */
695
696 static inline void
cgraph_update_edge_in_call_site_hash(cgraph_edge * e)697 cgraph_update_edge_in_call_site_hash (cgraph_edge *e)
698 {
699 gimple *call = e->call_stmt;
700 *e->caller->call_site_hash->find_slot_with_hash
701 (call, cgraph_edge_hasher::hash (call), INSERT) = e;
702 }
703
704 /* Add call graph edge E to call site hash of its caller. */
705
706 static inline void
cgraph_add_edge_to_call_site_hash(cgraph_edge * e)707 cgraph_add_edge_to_call_site_hash (cgraph_edge *e)
708 {
709 /* There are two speculative edges for every statement (one direct,
710 one indirect); always hash the direct one. */
711 if (e->speculative && e->indirect_unknown_callee)
712 return;
713 cgraph_edge **slot = e->caller->call_site_hash->find_slot_with_hash
714 (e->call_stmt, cgraph_edge_hasher::hash (e->call_stmt), INSERT);
715 if (*slot)
716 {
717 gcc_assert (((cgraph_edge *)*slot)->speculative);
718 if (e->callee && (!e->prev_callee
719 || !e->prev_callee->speculative
720 || e->prev_callee->call_stmt != e->call_stmt))
721 *slot = e;
722 return;
723 }
724 gcc_assert (!*slot || e->speculative);
725 *slot = e;
726 }
727
728 /* Return the callgraph edge representing the GIMPLE_CALL statement
729 CALL_STMT. */
730
731 cgraph_edge *
get_edge(gimple * call_stmt)732 cgraph_node::get_edge (gimple *call_stmt)
733 {
734 cgraph_edge *e, *e2;
735 int n = 0;
736
737 if (call_site_hash)
738 return call_site_hash->find_with_hash
739 (call_stmt, cgraph_edge_hasher::hash (call_stmt));
740
741 /* This loop may turn out to be performance problem. In such case adding
742 hashtables into call nodes with very many edges is probably best
743 solution. It is not good idea to add pointer into CALL_EXPR itself
744 because we want to make possible having multiple cgraph nodes representing
745 different clones of the same body before the body is actually cloned. */
746 for (e = callees; e; e = e->next_callee)
747 {
748 if (e->call_stmt == call_stmt)
749 break;
750 n++;
751 }
752
753 if (!e)
754 for (e = indirect_calls; e; e = e->next_callee)
755 {
756 if (e->call_stmt == call_stmt)
757 break;
758 n++;
759 }
760
761 if (n > 100)
762 {
763 call_site_hash = hash_table<cgraph_edge_hasher>::create_ggc (120);
764 for (e2 = callees; e2; e2 = e2->next_callee)
765 cgraph_add_edge_to_call_site_hash (e2);
766 for (e2 = indirect_calls; e2; e2 = e2->next_callee)
767 cgraph_add_edge_to_call_site_hash (e2);
768 }
769
770 return e;
771 }
772
773
774 /* Change field call_stmt of edge E to NEW_STMT. If UPDATE_SPECULATIVE and E
775 is any component of speculative edge, then update all components.
776 Speculations can be resolved in the process and EDGE can be removed and
777 deallocated. Return the edge that now represents the call. */
778
779 cgraph_edge *
set_call_stmt(cgraph_edge * e,gcall * new_stmt,bool update_speculative)780 cgraph_edge::set_call_stmt (cgraph_edge *e, gcall *new_stmt,
781 bool update_speculative)
782 {
783 tree decl;
784
785 cgraph_node *new_direct_callee = NULL;
786 if ((e->indirect_unknown_callee || e->speculative)
787 && (decl = gimple_call_fndecl (new_stmt)))
788 {
789 /* Constant propagation and especially inlining can turn an indirect call
790 into a direct one. */
791 new_direct_callee = cgraph_node::get (decl);
792 gcc_checking_assert (new_direct_callee);
793 }
794
795 /* Speculative edges has three component, update all of them
796 when asked to. */
797 if (update_speculative && e->speculative
798 /* If we are about to resolve the speculation by calling make_direct
799 below, do not bother going over all the speculative edges now. */
800 && !new_direct_callee)
801 {
802 cgraph_edge *direct, *indirect, *next;
803 ipa_ref *ref;
804 bool e_indirect = e->indirect_unknown_callee;
805 int n = 0;
806
807 direct = e->first_speculative_call_target ();
808 indirect = e->speculative_call_indirect_edge ();
809
810 gcall *old_stmt = direct->call_stmt;
811 for (cgraph_edge *d = direct; d; d = next)
812 {
813 next = d->next_speculative_call_target ();
814 cgraph_edge *d2 = set_call_stmt (d, new_stmt, false);
815 gcc_assert (d2 == d);
816 n++;
817 }
818 gcc_checking_assert (indirect->num_speculative_call_targets_p () == n);
819 for (unsigned int i = 0; e->caller->iterate_reference (i, ref); i++)
820 if (ref->speculative && ref->stmt == old_stmt)
821 {
822 ref->stmt = new_stmt;
823 n--;
824 }
825
826 indirect = set_call_stmt (indirect, new_stmt, false);
827 return e_indirect ? indirect : direct;
828 }
829
830 if (new_direct_callee)
831 e = make_direct (e, new_direct_callee);
832
833 /* Only direct speculative edges go to call_site_hash. */
834 if (e->caller->call_site_hash
835 && (!e->speculative || !e->indirect_unknown_callee)
836 /* It is possible that edge was previously speculative. In this case
837 we have different value in call stmt hash which needs preserving. */
838 && e->caller->get_edge (e->call_stmt) == e)
839 e->caller->call_site_hash->remove_elt_with_hash
840 (e->call_stmt, cgraph_edge_hasher::hash (e->call_stmt));
841
842 e->call_stmt = new_stmt;
843
844 function *fun = DECL_STRUCT_FUNCTION (e->caller->decl);
845 e->can_throw_external = stmt_can_throw_external (fun, new_stmt);
846 /* Update call stite hash. For speculative calls we only record the first
847 direct edge. */
848 if (e->caller->call_site_hash
849 && (!e->speculative
850 || (e->callee
851 && (!e->prev_callee || !e->prev_callee->speculative
852 || e->prev_callee->call_stmt != e->call_stmt))
853 || (e->speculative && !e->callee)))
854 cgraph_add_edge_to_call_site_hash (e);
855 return e;
856 }
857
858 /* Allocate a cgraph_edge structure and fill it with data according to the
859 parameters of which only CALLEE can be NULL (when creating an indirect call
860 edge). CLONING_P should be set if properties that are copied from an
861 original edge should not be calculated. */
862
863 cgraph_edge *
create_edge(cgraph_node * caller,cgraph_node * callee,gcall * call_stmt,profile_count count,bool indir_unknown_callee,bool cloning_p)864 symbol_table::create_edge (cgraph_node *caller, cgraph_node *callee,
865 gcall *call_stmt, profile_count count,
866 bool indir_unknown_callee, bool cloning_p)
867 {
868 cgraph_edge *edge;
869
870 /* LTO does not actually have access to the call_stmt since these
871 have not been loaded yet. */
872 if (call_stmt)
873 {
874 /* This is a rather expensive check possibly triggering
875 construction of call stmt hashtable. */
876 cgraph_edge *e;
877 gcc_checking_assert (!(e = caller->get_edge (call_stmt))
878 || e->speculative);
879
880 gcc_assert (is_gimple_call (call_stmt));
881 }
882
883 edge = ggc_alloc<cgraph_edge> ();
884 edge->m_summary_id = -1;
885 edges_count++;
886
887 gcc_assert (++edges_max_uid != 0);
888 edge->m_uid = edges_max_uid;
889 edge->aux = NULL;
890 edge->caller = caller;
891 edge->callee = callee;
892 edge->prev_caller = NULL;
893 edge->next_caller = NULL;
894 edge->prev_callee = NULL;
895 edge->next_callee = NULL;
896 edge->lto_stmt_uid = 0;
897 edge->speculative_id = 0;
898
899 edge->count = count;
900 edge->call_stmt = call_stmt;
901 edge->indirect_info = NULL;
902 edge->indirect_inlining_edge = 0;
903 edge->speculative = false;
904 edge->indirect_unknown_callee = indir_unknown_callee;
905 if (call_stmt && caller->call_site_hash)
906 cgraph_add_edge_to_call_site_hash (edge);
907
908 if (cloning_p)
909 return edge;
910
911 edge->can_throw_external
912 = call_stmt ? stmt_can_throw_external (DECL_STRUCT_FUNCTION (caller->decl),
913 call_stmt) : false;
914 edge->inline_failed = CIF_FUNCTION_NOT_CONSIDERED;
915 edge->call_stmt_cannot_inline_p = false;
916
917 if (opt_for_fn (edge->caller->decl, flag_devirtualize)
918 && call_stmt && DECL_STRUCT_FUNCTION (caller->decl))
919 edge->in_polymorphic_cdtor
920 = decl_maybe_in_construction_p (NULL, NULL, call_stmt,
921 caller->decl);
922 else
923 edge->in_polymorphic_cdtor = caller->thunk.thunk_p;
924
925 if (callee && symtab->state != LTO_STREAMING
926 && edge->callee->comdat_local_p ())
927 edge->caller->calls_comdat_local = true;
928
929 return edge;
930 }
931
932 /* Create edge from a given function to CALLEE in the cgraph. CLONING_P should
933 be set if properties that are copied from an original edge should not be
934 calculated. */
935
936 cgraph_edge *
create_edge(cgraph_node * callee,gcall * call_stmt,profile_count count,bool cloning_p)937 cgraph_node::create_edge (cgraph_node *callee,
938 gcall *call_stmt, profile_count count, bool cloning_p)
939 {
940 cgraph_edge *edge = symtab->create_edge (this, callee, call_stmt, count,
941 false, cloning_p);
942
943 if (!cloning_p)
944 initialize_inline_failed (edge);
945
946 edge->next_caller = callee->callers;
947 if (callee->callers)
948 callee->callers->prev_caller = edge;
949 edge->next_callee = callees;
950 if (callees)
951 callees->prev_callee = edge;
952 callees = edge;
953 callee->callers = edge;
954
955 return edge;
956 }
957
958 /* Allocate cgraph_indirect_call_info and set its fields to default values. */
959
960 cgraph_indirect_call_info *
cgraph_allocate_init_indirect_info(void)961 cgraph_allocate_init_indirect_info (void)
962 {
963 cgraph_indirect_call_info *ii;
964
965 ii = ggc_cleared_alloc<cgraph_indirect_call_info> ();
966 ii->param_index = -1;
967 return ii;
968 }
969
970 /* Create an indirect edge with a yet-undetermined callee where the call
971 statement destination is a formal parameter of the caller with index
972 PARAM_INDEX. CLONING_P should be set if properties that are copied from an
973 original edge should not be calculated and indirect_info structure should
974 not be calculated. */
975
976 cgraph_edge *
create_indirect_edge(gcall * call_stmt,int ecf_flags,profile_count count,bool cloning_p)977 cgraph_node::create_indirect_edge (gcall *call_stmt, int ecf_flags,
978 profile_count count,
979 bool cloning_p)
980 {
981 cgraph_edge *edge = symtab->create_edge (this, NULL, call_stmt, count, true,
982 cloning_p);
983 tree target;
984
985 if (!cloning_p)
986 initialize_inline_failed (edge);
987
988 edge->indirect_info = cgraph_allocate_init_indirect_info ();
989 edge->indirect_info->ecf_flags = ecf_flags;
990 edge->indirect_info->vptr_changed = true;
991
992 /* Record polymorphic call info. */
993 if (!cloning_p
994 && call_stmt
995 && (target = gimple_call_fn (call_stmt))
996 && virtual_method_call_p (target))
997 {
998 ipa_polymorphic_call_context context (decl, target, call_stmt);
999
1000 /* Only record types can have virtual calls. */
1001 edge->indirect_info->polymorphic = true;
1002 edge->indirect_info->param_index = -1;
1003 edge->indirect_info->otr_token
1004 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
1005 edge->indirect_info->otr_type = obj_type_ref_class (target);
1006 gcc_assert (TREE_CODE (edge->indirect_info->otr_type) == RECORD_TYPE);
1007 edge->indirect_info->context = context;
1008 }
1009
1010 edge->next_callee = indirect_calls;
1011 if (indirect_calls)
1012 indirect_calls->prev_callee = edge;
1013 indirect_calls = edge;
1014
1015 return edge;
1016 }
1017
1018 /* Remove the edge from the list of the callees of the caller. */
1019
1020 void
remove_caller(void)1021 cgraph_edge::remove_caller (void)
1022 {
1023 if (prev_callee)
1024 prev_callee->next_callee = next_callee;
1025 if (next_callee)
1026 next_callee->prev_callee = prev_callee;
1027 if (!prev_callee)
1028 {
1029 if (indirect_unknown_callee)
1030 caller->indirect_calls = next_callee;
1031 else
1032 caller->callees = next_callee;
1033 }
1034 if (caller->call_site_hash
1035 && this == caller->get_edge (call_stmt))
1036 caller->call_site_hash->remove_elt_with_hash
1037 (call_stmt, cgraph_edge_hasher::hash (call_stmt));
1038 }
1039
1040 /* Put the edge onto the free list. */
1041
1042 void
free_edge(cgraph_edge * e)1043 symbol_table::free_edge (cgraph_edge *e)
1044 {
1045 edges_count--;
1046 if (e->m_summary_id != -1)
1047 edge_released_summary_ids.safe_push (e->m_summary_id);
1048
1049 if (e->indirect_info)
1050 ggc_free (e->indirect_info);
1051 ggc_free (e);
1052 }
1053
1054 /* Remove the edge in the cgraph. */
1055
1056 void
remove(cgraph_edge * edge)1057 cgraph_edge::remove (cgraph_edge *edge)
1058 {
1059 /* Call all edge removal hooks. */
1060 symtab->call_edge_removal_hooks (edge);
1061
1062 if (!edge->indirect_unknown_callee)
1063 /* Remove from callers list of the callee. */
1064 edge->remove_callee ();
1065
1066 /* Remove from callees list of the callers. */
1067 edge->remove_caller ();
1068
1069 /* Put the edge onto the free list. */
1070 symtab->free_edge (edge);
1071 }
1072
1073 /* Turn edge into speculative call calling N2. Update
1074 the profile so the direct call is taken COUNT times
1075 with FREQUENCY.
1076
1077 At clone materialization time, the indirect call E will
1078 be expanded as:
1079
1080 if (call_dest == N2)
1081 n2 ();
1082 else
1083 call call_dest
1084
1085 At this time the function just creates the direct call,
1086 the reference representing the if conditional and attaches
1087 them all to the original indirect call statement.
1088
1089 speculative_id is used to link direct calls with their corresponding
1090 IPA_REF_ADDR references when representing speculative calls.
1091
1092 Return direct edge created. */
1093
1094 cgraph_edge *
make_speculative(cgraph_node * n2,profile_count direct_count,unsigned int speculative_id)1095 cgraph_edge::make_speculative (cgraph_node *n2, profile_count direct_count,
1096 unsigned int speculative_id)
1097 {
1098 cgraph_node *n = caller;
1099 ipa_ref *ref = NULL;
1100 cgraph_edge *e2;
1101
1102 if (dump_file)
1103 fprintf (dump_file, "Indirect call -> speculative call %s => %s\n",
1104 n->dump_name (), n2->dump_name ());
1105 speculative = true;
1106 e2 = n->create_edge (n2, call_stmt, direct_count);
1107 initialize_inline_failed (e2);
1108 e2->speculative = true;
1109 if (TREE_NOTHROW (n2->decl))
1110 e2->can_throw_external = false;
1111 else
1112 e2->can_throw_external = can_throw_external;
1113 e2->lto_stmt_uid = lto_stmt_uid;
1114 e2->speculative_id = speculative_id;
1115 e2->in_polymorphic_cdtor = in_polymorphic_cdtor;
1116 indirect_info->num_speculative_call_targets++;
1117 count -= e2->count;
1118 symtab->call_edge_duplication_hooks (this, e2);
1119 ref = n->create_reference (n2, IPA_REF_ADDR, call_stmt);
1120 ref->lto_stmt_uid = lto_stmt_uid;
1121 ref->speculative_id = speculative_id;
1122 ref->speculative = speculative;
1123 n2->mark_address_taken ();
1124 return e2;
1125 }
1126
1127 /* Speculative call consists of an indirect edge and one or more
1128 direct edge+ref pairs.
1129
1130 Given an edge which is part of speculative call, return the first
1131 direct call edge in the speculative call sequence. */
1132
1133 cgraph_edge *
first_speculative_call_target()1134 cgraph_edge::first_speculative_call_target ()
1135 {
1136 cgraph_edge *e = this;
1137
1138 gcc_checking_assert (e->speculative);
1139 if (e->callee)
1140 {
1141 while (e->prev_callee && e->prev_callee->speculative
1142 && e->prev_callee->call_stmt == e->call_stmt
1143 && e->prev_callee->lto_stmt_uid == e->lto_stmt_uid)
1144 e = e->prev_callee;
1145 return e;
1146 }
1147 /* Call stmt site hash always points to the first target of the
1148 speculative call sequence. */
1149 if (e->call_stmt)
1150 return e->caller->get_edge (e->call_stmt);
1151 for (cgraph_edge *e2 = e->caller->callees; true; e2 = e2->next_callee)
1152 if (e2->speculative
1153 && e->call_stmt == e2->call_stmt
1154 && e->lto_stmt_uid == e2->lto_stmt_uid)
1155 return e2;
1156 }
1157
1158 /* We always maintain first direct edge in the call site hash, if one
1159 exists. E is going to be removed. See if it is first one and update
1160 hash accordingly. INDIRECT is the indirect edge of speculative call.
1161 We assume that INDIRECT->num_speculative_call_targets_p () is already
1162 updated for removal of E. */
1163 static void
update_call_stmt_hash_for_removing_direct_edge(cgraph_edge * e,cgraph_edge * indirect)1164 update_call_stmt_hash_for_removing_direct_edge (cgraph_edge *e,
1165 cgraph_edge *indirect)
1166 {
1167 if (e->caller->call_site_hash)
1168 {
1169 if (e->caller->get_edge (e->call_stmt) != e)
1170 ;
1171 else if (!indirect->num_speculative_call_targets_p ())
1172 cgraph_update_edge_in_call_site_hash (indirect);
1173 else
1174 {
1175 gcc_checking_assert (e->next_callee && e->next_callee->speculative
1176 && e->next_callee->call_stmt == e->call_stmt);
1177 cgraph_update_edge_in_call_site_hash (e->next_callee);
1178 }
1179 }
1180 }
1181
1182 /* Speculative call EDGE turned out to be direct call to CALLEE_DECL. Remove
1183 the speculative call sequence and return edge representing the call, the
1184 original EDGE can be removed and deallocated. Return the edge that now
1185 represents the call.
1186
1187 For "speculative" indirect call that contains multiple "speculative"
1188 targets (i.e. edge->indirect_info->num_speculative_call_targets > 1),
1189 decrease the count and only remove current direct edge.
1190
1191 If no speculative direct call left to the speculative indirect call, remove
1192 the speculative of both the indirect call and corresponding direct edge.
1193
1194 It is up to caller to iteratively resolve each "speculative" direct call and
1195 redirect the call as appropriate. */
1196
1197 cgraph_edge *
resolve_speculation(cgraph_edge * edge,tree callee_decl)1198 cgraph_edge::resolve_speculation (cgraph_edge *edge, tree callee_decl)
1199 {
1200 cgraph_edge *e2;
1201 ipa_ref *ref;
1202
1203 gcc_assert (edge->speculative && (!callee_decl || edge->callee));
1204 if (!edge->callee)
1205 e2 = edge->first_speculative_call_target ();
1206 else
1207 e2 = edge;
1208 ref = e2->speculative_call_target_ref ();
1209 edge = edge->speculative_call_indirect_edge ();
1210 if (!callee_decl
1211 || !ref->referred->semantically_equivalent_p
1212 (symtab_node::get (callee_decl)))
1213 {
1214 if (dump_file)
1215 {
1216 if (callee_decl)
1217 {
1218 fprintf (dump_file, "Speculative indirect call %s => %s has "
1219 "turned out to have contradicting known target ",
1220 edge->caller->dump_name (),
1221 e2->callee->dump_name ());
1222 print_generic_expr (dump_file, callee_decl);
1223 fprintf (dump_file, "\n");
1224 }
1225 else
1226 {
1227 fprintf (dump_file, "Removing speculative call %s => %s\n",
1228 edge->caller->dump_name (),
1229 e2->callee->dump_name ());
1230 }
1231 }
1232 }
1233 else
1234 {
1235 cgraph_edge *tmp = edge;
1236 if (dump_file)
1237 fprintf (dump_file, "Speculative call turned into direct call.\n");
1238 edge = e2;
1239 e2 = tmp;
1240 /* FIXME: If EDGE is inlined, we should scale up the frequencies
1241 and counts in the functions inlined through it. */
1242 }
1243 edge->count += e2->count;
1244 if (edge->num_speculative_call_targets_p ())
1245 {
1246 /* The indirect edge has multiple speculative targets, don't remove
1247 speculative until all related direct edges are resolved. */
1248 edge->indirect_info->num_speculative_call_targets--;
1249 if (!edge->indirect_info->num_speculative_call_targets)
1250 edge->speculative = false;
1251 }
1252 else
1253 edge->speculative = false;
1254 e2->speculative = false;
1255 update_call_stmt_hash_for_removing_direct_edge (e2, edge);
1256 ref->remove_reference ();
1257 if (e2->indirect_unknown_callee || e2->inline_failed)
1258 remove (e2);
1259 else
1260 e2->callee->remove_symbol_and_inline_clones ();
1261 return edge;
1262 }
1263
1264 /* Return edge corresponding to speculative call to a given target.
1265 NULL if speculative call does not have one. */
1266
1267 cgraph_edge *
speculative_call_for_target(cgraph_node * target)1268 cgraph_edge::speculative_call_for_target (cgraph_node *target)
1269 {
1270 for (cgraph_edge *direct = first_speculative_call_target ();
1271 direct;
1272 direct = direct->next_speculative_call_target ())
1273 if (direct->speculative_call_target_ref ()
1274 ->referred->semantically_equivalent_p (target))
1275 return direct;
1276 return NULL;
1277 }
1278
1279 /* Make an indirect or speculative EDGE with an unknown callee an ordinary edge
1280 leading to CALLEE. Speculations can be resolved in the process and EDGE can
1281 be removed and deallocated. Return the edge that now represents the
1282 call. */
1283
1284 cgraph_edge *
make_direct(cgraph_edge * edge,cgraph_node * callee)1285 cgraph_edge::make_direct (cgraph_edge *edge, cgraph_node *callee)
1286 {
1287 gcc_assert (edge->indirect_unknown_callee || edge->speculative);
1288
1289 /* If we are redirecting speculative call, make it non-speculative. */
1290 if (edge->speculative)
1291 {
1292 cgraph_edge *found = NULL;
1293 cgraph_edge *direct, *next;
1294
1295 edge = edge->speculative_call_indirect_edge ();
1296
1297 /* Look all speculative targets and remove all but one corresponding
1298 to callee (if it exists). */
1299 for (direct = edge->first_speculative_call_target ();
1300 direct;
1301 direct = next)
1302 {
1303 next = direct->next_speculative_call_target ();
1304
1305 /* Compare ref not direct->callee. Direct edge is possibly
1306 inlined or redirected. */
1307 if (!direct->speculative_call_target_ref ()
1308 ->referred->semantically_equivalent_p (callee))
1309 edge = direct->resolve_speculation (direct, NULL);
1310 else
1311 {
1312 gcc_checking_assert (!found);
1313 found = direct;
1314 }
1315 }
1316
1317 /* On successful speculation just remove the indirect edge and
1318 return the pre existing direct edge.
1319 It is important to not remove it and redirect because the direct
1320 edge may be inlined or redirected. */
1321 if (found)
1322 {
1323 cgraph_edge *e2 = resolve_speculation (found, callee->decl);
1324 gcc_checking_assert (!found->speculative && e2 == found);
1325 return found;
1326 }
1327 gcc_checking_assert (!edge->speculative);
1328 }
1329
1330 edge->indirect_unknown_callee = 0;
1331 ggc_free (edge->indirect_info);
1332 edge->indirect_info = NULL;
1333
1334 /* Get the edge out of the indirect edge list. */
1335 if (edge->prev_callee)
1336 edge->prev_callee->next_callee = edge->next_callee;
1337 if (edge->next_callee)
1338 edge->next_callee->prev_callee = edge->prev_callee;
1339 if (!edge->prev_callee)
1340 edge->caller->indirect_calls = edge->next_callee;
1341
1342 /* Put it into the normal callee list */
1343 edge->prev_callee = NULL;
1344 edge->next_callee = edge->caller->callees;
1345 if (edge->caller->callees)
1346 edge->caller->callees->prev_callee = edge;
1347 edge->caller->callees = edge;
1348
1349 /* Insert to callers list of the new callee. */
1350 edge->set_callee (callee);
1351
1352 /* We need to re-determine the inlining status of the edge. */
1353 initialize_inline_failed (edge);
1354 return edge;
1355 }
1356
1357 /* Redirect callee of the edge to N. The function does not update underlying
1358 call expression. */
1359
1360 void
redirect_callee(cgraph_node * n)1361 cgraph_edge::redirect_callee (cgraph_node *n)
1362 {
1363 bool loc = callee->comdat_local_p ();
1364 /* Remove from callers list of the current callee. */
1365 remove_callee ();
1366
1367 /* Insert to callers list of the new callee. */
1368 set_callee (n);
1369
1370 if (!inline_failed)
1371 return;
1372 if (!loc && n->comdat_local_p ())
1373 {
1374 cgraph_node *to = caller->inlined_to ? caller->inlined_to : caller;
1375 to->calls_comdat_local = true;
1376 }
1377 else if (loc && !n->comdat_local_p ())
1378 {
1379 cgraph_node *to = caller->inlined_to ? caller->inlined_to : caller;
1380 gcc_checking_assert (to->calls_comdat_local);
1381 to->calls_comdat_local = to->check_calls_comdat_local_p ();
1382 }
1383 }
1384
1385 /* If necessary, change the function declaration in the call statement
1386 associated with E so that it corresponds to the edge callee. Speculations
1387 can be resolved in the process and EDGE can be removed and deallocated.
1388
1389 The edge could be one of speculative direct call generated from speculative
1390 indirect call. In this circumstance, decrease the speculative targets
1391 count (i.e. num_speculative_call_targets) and redirect call stmt to the
1392 corresponding i-th target. If no speculative direct call left to the
1393 speculative indirect call, remove "speculative" of the indirect call and
1394 also redirect stmt to it's final direct target.
1395
1396 It is up to caller to iteratively transform each "speculative"
1397 direct call as appropriate. */
1398
1399 gimple *
redirect_call_stmt_to_callee(cgraph_edge * e)1400 cgraph_edge::redirect_call_stmt_to_callee (cgraph_edge *e)
1401 {
1402 tree decl = gimple_call_fndecl (e->call_stmt);
1403 gcall *new_stmt;
1404 gimple_stmt_iterator gsi;
1405
1406 if (e->speculative)
1407 {
1408 /* If there already is an direct call (i.e. as a result of inliner's
1409 substitution), forget about speculating. */
1410 if (decl)
1411 e = make_direct (e->speculative_call_indirect_edge (),
1412 cgraph_node::get (decl));
1413 else
1414 {
1415 /* Be sure we redirect all speculative targets before poking
1416 abou tindirect edge. */
1417 gcc_checking_assert (e->callee);
1418 cgraph_edge *indirect = e->speculative_call_indirect_edge ();
1419 gcall *new_stmt;
1420 ipa_ref *ref;
1421
1422 /* Expand speculation into GIMPLE code. */
1423 if (dump_file)
1424 {
1425 fprintf (dump_file,
1426 "Expanding speculative call of %s -> %s count: ",
1427 e->caller->dump_name (),
1428 e->callee->dump_name ());
1429 e->count.dump (dump_file);
1430 fprintf (dump_file, "\n");
1431 }
1432 push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
1433
1434 profile_count all = indirect->count;
1435 for (cgraph_edge *e2 = e->first_speculative_call_target ();
1436 e2;
1437 e2 = e2->next_speculative_call_target ())
1438 all = all + e2->count;
1439 profile_probability prob = e->count.probability_in (all);
1440 if (!prob.initialized_p ())
1441 prob = profile_probability::even ();
1442 ref = e->speculative_call_target_ref ();
1443 new_stmt = gimple_ic (e->call_stmt,
1444 dyn_cast<cgraph_node *> (ref->referred),
1445 prob);
1446 e->speculative = false;
1447 if (indirect->num_speculative_call_targets_p ())
1448 {
1449 /* The indirect edge has multiple speculative targets, don't
1450 remove speculative until all related direct edges are
1451 redirected. */
1452 indirect->indirect_info->num_speculative_call_targets--;
1453 if (!indirect->indirect_info->num_speculative_call_targets)
1454 indirect->speculative = false;
1455 }
1456 else
1457 indirect->speculative = false;
1458 /* Indirect edges are not both in the call site hash.
1459 get it updated. */
1460 update_call_stmt_hash_for_removing_direct_edge (e, indirect);
1461 cgraph_edge::set_call_stmt (e, new_stmt, false);
1462 e->count = gimple_bb (e->call_stmt)->count;
1463
1464 /* Once we are done with expanding the sequence, update also indirect
1465 call probability. Until then the basic block accounts for the
1466 sum of indirect edge and all non-expanded speculations. */
1467 if (!indirect->speculative)
1468 indirect->count = gimple_bb (indirect->call_stmt)->count;
1469 ref->speculative = false;
1470 ref->stmt = NULL;
1471 pop_cfun ();
1472 /* Continue redirecting E to proper target. */
1473 }
1474 }
1475
1476
1477 if (e->indirect_unknown_callee
1478 || decl == e->callee->decl)
1479 return e->call_stmt;
1480
1481 if (decl && ipa_saved_clone_sources)
1482 {
1483 tree *p = ipa_saved_clone_sources->get (e->callee);
1484 if (p && decl == *p)
1485 {
1486 gimple_call_set_fndecl (e->call_stmt, e->callee->decl);
1487 return e->call_stmt;
1488 }
1489 }
1490
1491 if (flag_checking && decl)
1492 {
1493 cgraph_node *node = cgraph_node::get (decl);
1494 gcc_assert (!node || !node->clone.param_adjustments);
1495 }
1496
1497 if (symtab->dump_file)
1498 {
1499 fprintf (symtab->dump_file, "updating call of %s -> %s: ",
1500 e->caller->dump_name (), e->callee->dump_name ());
1501 print_gimple_stmt (symtab->dump_file, e->call_stmt, 0, dump_flags);
1502 if (e->callee->clone.param_adjustments)
1503 e->callee->clone.param_adjustments->dump (symtab->dump_file);
1504 unsigned performed_len
1505 = vec_safe_length (e->caller->clone.performed_splits);
1506 if (performed_len > 0)
1507 fprintf (symtab->dump_file, "Performed splits records:\n");
1508 for (unsigned i = 0; i < performed_len; i++)
1509 {
1510 ipa_param_performed_split *sm
1511 = &(*e->caller->clone.performed_splits)[i];
1512 print_node_brief (symtab->dump_file, " dummy_decl: ", sm->dummy_decl,
1513 TDF_UID);
1514 fprintf (symtab->dump_file, ", unit_offset: %u\n", sm->unit_offset);
1515 }
1516 }
1517
1518 if (ipa_param_adjustments *padjs = e->callee->clone.param_adjustments)
1519 {
1520 /* We need to defer cleaning EH info on the new statement to
1521 fixup-cfg. We may not have dominator information at this point
1522 and thus would end up with unreachable blocks and have no way
1523 to communicate that we need to run CFG cleanup then. */
1524 int lp_nr = lookup_stmt_eh_lp (e->call_stmt);
1525 if (lp_nr != 0)
1526 remove_stmt_from_eh_lp (e->call_stmt);
1527
1528 tree old_fntype = gimple_call_fntype (e->call_stmt);
1529 new_stmt = padjs->modify_call (e->call_stmt,
1530 e->caller->clone.performed_splits,
1531 e->callee->decl, false);
1532 cgraph_node *origin = e->callee;
1533 while (origin->clone_of)
1534 origin = origin->clone_of;
1535
1536 if ((origin->former_clone_of
1537 && old_fntype == TREE_TYPE (origin->former_clone_of))
1538 || old_fntype == TREE_TYPE (origin->decl))
1539 gimple_call_set_fntype (new_stmt, TREE_TYPE (e->callee->decl));
1540 else
1541 {
1542 tree new_fntype = padjs->build_new_function_type (old_fntype, true);
1543 gimple_call_set_fntype (new_stmt, new_fntype);
1544 }
1545
1546 if (lp_nr != 0)
1547 add_stmt_to_eh_lp (new_stmt, lp_nr);
1548 }
1549 else
1550 {
1551 new_stmt = e->call_stmt;
1552 gimple_call_set_fndecl (new_stmt, e->callee->decl);
1553 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1554 }
1555
1556 /* If changing the call to __cxa_pure_virtual or similar noreturn function,
1557 adjust gimple_call_fntype too. */
1558 if (gimple_call_noreturn_p (new_stmt)
1559 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (e->callee->decl)))
1560 && TYPE_ARG_TYPES (TREE_TYPE (e->callee->decl))
1561 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (e->callee->decl)))
1562 == void_type_node))
1563 gimple_call_set_fntype (new_stmt, TREE_TYPE (e->callee->decl));
1564
1565 /* If the call becomes noreturn, remove the LHS if possible. */
1566 tree lhs = gimple_call_lhs (new_stmt);
1567 if (lhs
1568 && gimple_call_noreturn_p (new_stmt)
1569 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (new_stmt)))
1570 || should_remove_lhs_p (lhs)))
1571 {
1572 if (TREE_CODE (lhs) == SSA_NAME)
1573 {
1574 tree var = create_tmp_reg_fn (DECL_STRUCT_FUNCTION (e->caller->decl),
1575 TREE_TYPE (lhs), NULL);
1576 var = get_or_create_ssa_default_def
1577 (DECL_STRUCT_FUNCTION (e->caller->decl), var);
1578 gimple *set_stmt = gimple_build_assign (lhs, var);
1579 gsi = gsi_for_stmt (new_stmt);
1580 gsi_insert_before_without_update (&gsi, set_stmt, GSI_SAME_STMT);
1581 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), set_stmt);
1582 }
1583 gimple_call_set_lhs (new_stmt, NULL_TREE);
1584 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1585 }
1586
1587 /* If new callee has no static chain, remove it. */
1588 if (gimple_call_chain (new_stmt) && !DECL_STATIC_CHAIN (e->callee->decl))
1589 {
1590 gimple_call_set_chain (new_stmt, NULL);
1591 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1592 }
1593
1594 maybe_remove_unused_call_args (DECL_STRUCT_FUNCTION (e->caller->decl),
1595 new_stmt);
1596
1597 e->caller->set_call_stmt_including_clones (e->call_stmt, new_stmt, false);
1598
1599 if (symtab->dump_file)
1600 {
1601 fprintf (symtab->dump_file, " updated to:");
1602 print_gimple_stmt (symtab->dump_file, e->call_stmt, 0, dump_flags);
1603 }
1604 return new_stmt;
1605 }
1606
1607 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1608 OLD_STMT changed into NEW_STMT. OLD_CALL is gimple_call_fndecl
1609 of OLD_STMT if it was previously call statement.
1610 If NEW_STMT is NULL, the call has been dropped without any
1611 replacement. */
1612
1613 static void
cgraph_update_edges_for_call_stmt_node(cgraph_node * node,gimple * old_stmt,tree old_call,gimple * new_stmt)1614 cgraph_update_edges_for_call_stmt_node (cgraph_node *node,
1615 gimple *old_stmt, tree old_call,
1616 gimple *new_stmt)
1617 {
1618 tree new_call = (new_stmt && is_gimple_call (new_stmt))
1619 ? gimple_call_fndecl (new_stmt) : 0;
1620
1621 /* We are seeing indirect calls, then there is nothing to update. */
1622 if (!new_call && !old_call)
1623 return;
1624 /* See if we turned indirect call into direct call or folded call to one builtin
1625 into different builtin. */
1626 if (old_call != new_call)
1627 {
1628 cgraph_edge *e = node->get_edge (old_stmt);
1629 cgraph_edge *ne = NULL;
1630 profile_count count;
1631
1632 if (e)
1633 {
1634 /* Keep calls marked as dead dead. */
1635 if (new_stmt && is_gimple_call (new_stmt) && e->callee
1636 && fndecl_built_in_p (e->callee->decl, BUILT_IN_UNREACHABLE))
1637 {
1638 cgraph_edge::set_call_stmt (node->get_edge (old_stmt),
1639 as_a <gcall *> (new_stmt));
1640 return;
1641 }
1642 /* See if the edge is already there and has the correct callee. It
1643 might be so because of indirect inlining has already updated
1644 it. We also might've cloned and redirected the edge. */
1645 if (new_call && e->callee)
1646 {
1647 cgraph_node *callee = e->callee;
1648 while (callee)
1649 {
1650 if (callee->decl == new_call
1651 || callee->former_clone_of == new_call)
1652 {
1653 cgraph_edge::set_call_stmt (e, as_a <gcall *> (new_stmt));
1654 return;
1655 }
1656 callee = callee->clone_of;
1657 }
1658 }
1659
1660 /* Otherwise remove edge and create new one; we can't simply redirect
1661 since function has changed, so inline plan and other information
1662 attached to edge is invalid. */
1663 count = e->count;
1664 if (e->indirect_unknown_callee || e->inline_failed)
1665 cgraph_edge::remove (e);
1666 else
1667 e->callee->remove_symbol_and_inline_clones ();
1668 }
1669 else if (new_call)
1670 {
1671 /* We are seeing new direct call; compute profile info based on BB. */
1672 basic_block bb = gimple_bb (new_stmt);
1673 count = bb->count;
1674 }
1675
1676 if (new_call)
1677 {
1678 ne = node->create_edge (cgraph_node::get_create (new_call),
1679 as_a <gcall *> (new_stmt), count);
1680 gcc_assert (ne->inline_failed);
1681 }
1682 }
1683 /* We only updated the call stmt; update pointer in cgraph edge.. */
1684 else if (old_stmt != new_stmt)
1685 cgraph_edge::set_call_stmt (node->get_edge (old_stmt),
1686 as_a <gcall *> (new_stmt));
1687 }
1688
1689 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1690 OLD_STMT changed into NEW_STMT. OLD_DECL is gimple_call_fndecl
1691 of OLD_STMT before it was updated (updating can happen inplace). */
1692
1693 void
cgraph_update_edges_for_call_stmt(gimple * old_stmt,tree old_decl,gimple * new_stmt)1694 cgraph_update_edges_for_call_stmt (gimple *old_stmt, tree old_decl,
1695 gimple *new_stmt)
1696 {
1697 cgraph_node *orig = cgraph_node::get (cfun->decl);
1698 cgraph_node *node;
1699
1700 gcc_checking_assert (orig);
1701 cgraph_update_edges_for_call_stmt_node (orig, old_stmt, old_decl, new_stmt);
1702 if (orig->clones)
1703 for (node = orig->clones; node != orig;)
1704 {
1705 cgraph_update_edges_for_call_stmt_node (node, old_stmt, old_decl, new_stmt);
1706 if (node->clones)
1707 node = node->clones;
1708 else if (node->next_sibling_clone)
1709 node = node->next_sibling_clone;
1710 else
1711 {
1712 while (node != orig && !node->next_sibling_clone)
1713 node = node->clone_of;
1714 if (node != orig)
1715 node = node->next_sibling_clone;
1716 }
1717 }
1718 }
1719
1720
1721 /* Remove all callees from the node. */
1722
1723 void
remove_callees(void)1724 cgraph_node::remove_callees (void)
1725 {
1726 cgraph_edge *e, *f;
1727
1728 calls_comdat_local = false;
1729
1730 /* It is sufficient to remove the edges from the lists of callers of
1731 the callees. The callee list of the node can be zapped with one
1732 assignment. */
1733 for (e = callees; e; e = f)
1734 {
1735 f = e->next_callee;
1736 symtab->call_edge_removal_hooks (e);
1737 if (!e->indirect_unknown_callee)
1738 e->remove_callee ();
1739 symtab->free_edge (e);
1740 }
1741 for (e = indirect_calls; e; e = f)
1742 {
1743 f = e->next_callee;
1744 symtab->call_edge_removal_hooks (e);
1745 if (!e->indirect_unknown_callee)
1746 e->remove_callee ();
1747 symtab->free_edge (e);
1748 }
1749 indirect_calls = NULL;
1750 callees = NULL;
1751 if (call_site_hash)
1752 {
1753 call_site_hash->empty ();
1754 call_site_hash = NULL;
1755 }
1756 }
1757
1758 /* Remove all callers from the node. */
1759
1760 void
remove_callers(void)1761 cgraph_node::remove_callers (void)
1762 {
1763 cgraph_edge *e, *f;
1764
1765 /* It is sufficient to remove the edges from the lists of callees of
1766 the callers. The caller list of the node can be zapped with one
1767 assignment. */
1768 for (e = callers; e; e = f)
1769 {
1770 f = e->next_caller;
1771 symtab->call_edge_removal_hooks (e);
1772 e->remove_caller ();
1773 symtab->free_edge (e);
1774 }
1775 callers = NULL;
1776 }
1777
1778 /* Helper function for cgraph_release_function_body and free_lang_data.
1779 It releases body from function DECL without having to inspect its
1780 possibly non-existent symtab node. */
1781
1782 void
release_function_body(tree decl)1783 release_function_body (tree decl)
1784 {
1785 function *fn = DECL_STRUCT_FUNCTION (decl);
1786 if (fn)
1787 {
1788 if (fn->cfg
1789 && loops_for_fn (fn))
1790 {
1791 fn->curr_properties &= ~PROP_loops;
1792 loop_optimizer_finalize (fn);
1793 }
1794 if (fn->gimple_df)
1795 {
1796 delete_tree_ssa (fn);
1797 fn->eh = NULL;
1798 }
1799 if (fn->cfg)
1800 {
1801 gcc_assert (!dom_info_available_p (fn, CDI_DOMINATORS));
1802 gcc_assert (!dom_info_available_p (fn, CDI_POST_DOMINATORS));
1803 delete_tree_cfg_annotations (fn);
1804 clear_edges (fn);
1805 fn->cfg = NULL;
1806 }
1807 if (fn->value_histograms)
1808 free_histograms (fn);
1809 gimple_set_body (decl, NULL);
1810 /* Struct function hangs a lot of data that would leak if we didn't
1811 removed all pointers to it. */
1812 ggc_free (fn);
1813 DECL_STRUCT_FUNCTION (decl) = NULL;
1814 }
1815 DECL_SAVED_TREE (decl) = NULL;
1816 }
1817
1818 /* Release memory used to represent body of function.
1819 Use this only for functions that are released before being translated to
1820 target code (i.e. RTL). Functions that are compiled to RTL and beyond
1821 are free'd in final.c via free_after_compilation().
1822 KEEP_ARGUMENTS are useful only if you want to rebuild body as thunk. */
1823
1824 void
release_body(bool keep_arguments)1825 cgraph_node::release_body (bool keep_arguments)
1826 {
1827 ipa_transforms_to_apply.release ();
1828 if (!used_as_abstract_origin && symtab->state != PARSING)
1829 {
1830 DECL_RESULT (decl) = NULL;
1831
1832 if (!keep_arguments)
1833 DECL_ARGUMENTS (decl) = NULL;
1834 }
1835 /* If the node is abstract and needed, then do not clear
1836 DECL_INITIAL of its associated function declaration because it's
1837 needed to emit debug info later. */
1838 if (!used_as_abstract_origin && DECL_INITIAL (decl))
1839 DECL_INITIAL (decl) = error_mark_node;
1840 release_function_body (decl);
1841 if (lto_file_data)
1842 {
1843 lto_free_function_in_decl_state_for_node (this);
1844 lto_file_data = NULL;
1845 }
1846 }
1847
1848 /* Remove function from symbol table. */
1849
1850 void
remove(void)1851 cgraph_node::remove (void)
1852 {
1853 if (symtab->ipa_clones_dump_file && symtab->cloned_nodes.contains (this))
1854 fprintf (symtab->ipa_clones_dump_file,
1855 "Callgraph removal;%s;%d;%s;%d;%d\n", asm_name (), order,
1856 DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl),
1857 DECL_SOURCE_COLUMN (decl));
1858
1859 symtab->call_cgraph_removal_hooks (this);
1860 remove_callers ();
1861 remove_callees ();
1862 ipa_transforms_to_apply.release ();
1863 delete_function_version (function_version ());
1864
1865 /* Incremental inlining access removed nodes stored in the postorder list.
1866 */
1867 force_output = false;
1868 forced_by_abi = false;
1869 cgraph_node *next;
1870 for (cgraph_node *n = nested; n; n = next)
1871 {
1872 next = n->next_nested;
1873 n->origin = NULL;
1874 n->next_nested = NULL;
1875 }
1876 nested = NULL;
1877 if (origin)
1878 {
1879 cgraph_node **node2 = &origin->nested;
1880
1881 while (*node2 != this)
1882 node2 = &(*node2)->next_nested;
1883 *node2 = next_nested;
1884 }
1885 unregister ();
1886 if (prev_sibling_clone)
1887 prev_sibling_clone->next_sibling_clone = next_sibling_clone;
1888 else if (clone_of)
1889 clone_of->clones = next_sibling_clone;
1890 if (next_sibling_clone)
1891 next_sibling_clone->prev_sibling_clone = prev_sibling_clone;
1892 if (clones)
1893 {
1894 cgraph_node *n, *next;
1895
1896 if (clone_of)
1897 {
1898 for (n = clones; n->next_sibling_clone; n = n->next_sibling_clone)
1899 n->clone_of = clone_of;
1900 n->clone_of = clone_of;
1901 n->next_sibling_clone = clone_of->clones;
1902 if (clone_of->clones)
1903 clone_of->clones->prev_sibling_clone = n;
1904 clone_of->clones = clones;
1905 }
1906 else
1907 {
1908 /* We are removing node with clones. This makes clones inconsistent,
1909 but assume they will be removed subsequently and just keep clone
1910 tree intact. This can happen in unreachable function removal since
1911 we remove unreachable functions in random order, not by bottom-up
1912 walk of clone trees. */
1913 for (n = clones; n; n = next)
1914 {
1915 next = n->next_sibling_clone;
1916 n->next_sibling_clone = NULL;
1917 n->prev_sibling_clone = NULL;
1918 n->clone_of = NULL;
1919 }
1920 }
1921 }
1922
1923 /* While all the clones are removed after being proceeded, the function
1924 itself is kept in the cgraph even after it is compiled. Check whether
1925 we are done with this body and reclaim it proactively if this is the case.
1926 */
1927 if (symtab->state != LTO_STREAMING)
1928 {
1929 cgraph_node *n = cgraph_node::get (decl);
1930 if (!n
1931 || (!n->clones && !n->clone_of && !n->inlined_to
1932 && ((symtab->global_info_ready || in_lto_p)
1933 && (TREE_ASM_WRITTEN (n->decl)
1934 || DECL_EXTERNAL (n->decl)
1935 || !n->analyzed
1936 || (!flag_wpa && n->in_other_partition)))))
1937 release_body ();
1938 }
1939 else
1940 {
1941 lto_free_function_in_decl_state_for_node (this);
1942 lto_file_data = NULL;
1943 }
1944
1945 decl = NULL;
1946 if (call_site_hash)
1947 {
1948 call_site_hash->empty ();
1949 call_site_hash = NULL;
1950 }
1951
1952 symtab->release_symbol (this);
1953 }
1954
1955 /* Likewise indicate that a node is having address taken. */
1956
1957 void
mark_address_taken(void)1958 cgraph_node::mark_address_taken (void)
1959 {
1960 /* Indirect inlining can figure out that all uses of the address are
1961 inlined. */
1962 if (inlined_to)
1963 {
1964 gcc_assert (cfun->after_inlining);
1965 gcc_assert (callers->indirect_inlining_edge);
1966 return;
1967 }
1968 /* FIXME: address_taken flag is used both as a shortcut for testing whether
1969 IPA_REF_ADDR reference exists (and thus it should be set on node
1970 representing alias we take address of) and as a test whether address
1971 of the object was taken (and thus it should be set on node alias is
1972 referring to). We should remove the first use and the remove the
1973 following set. */
1974 address_taken = 1;
1975 cgraph_node *node = ultimate_alias_target ();
1976 node->address_taken = 1;
1977 }
1978
1979 /* Return local info node for the compiled function. */
1980
1981 cgraph_node *
local_info_node(tree decl)1982 cgraph_node::local_info_node (tree decl)
1983 {
1984 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1985 cgraph_node *node = get (decl);
1986 if (!node)
1987 return NULL;
1988 return node->ultimate_alias_target ();
1989 }
1990
1991 /* Return RTL info for the compiled function. */
1992
1993 cgraph_rtl_info *
rtl_info(const_tree decl)1994 cgraph_node::rtl_info (const_tree decl)
1995 {
1996 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1997 cgraph_node *node = get (decl);
1998 if (!node)
1999 return NULL;
2000 enum availability avail;
2001 node = node->ultimate_alias_target (&avail);
2002 if (decl != current_function_decl
2003 && (avail < AVAIL_AVAILABLE
2004 || (node->decl != current_function_decl
2005 && !TREE_ASM_WRITTEN (node->decl))))
2006 return NULL;
2007 /* Allocate if it doesn't exist. */
2008 if (node->rtl == NULL)
2009 {
2010 node->rtl = ggc_cleared_alloc<cgraph_rtl_info> ();
2011 SET_HARD_REG_SET (node->rtl->function_used_regs);
2012 }
2013 return node->rtl;
2014 }
2015
2016 /* Return a string describing the failure REASON. */
2017
2018 const char*
cgraph_inline_failed_string(cgraph_inline_failed_t reason)2019 cgraph_inline_failed_string (cgraph_inline_failed_t reason)
2020 {
2021 #undef DEFCIFCODE
2022 #define DEFCIFCODE(code, type, string) string,
2023
2024 static const char *cif_string_table[CIF_N_REASONS] = {
2025 #include "cif-code.def"
2026 };
2027
2028 /* Signedness of an enum type is implementation defined, so cast it
2029 to unsigned before testing. */
2030 gcc_assert ((unsigned) reason < CIF_N_REASONS);
2031 return cif_string_table[reason];
2032 }
2033
2034 /* Return a type describing the failure REASON. */
2035
2036 cgraph_inline_failed_type_t
cgraph_inline_failed_type(cgraph_inline_failed_t reason)2037 cgraph_inline_failed_type (cgraph_inline_failed_t reason)
2038 {
2039 #undef DEFCIFCODE
2040 #define DEFCIFCODE(code, type, string) type,
2041
2042 static cgraph_inline_failed_type_t cif_type_table[CIF_N_REASONS] = {
2043 #include "cif-code.def"
2044 };
2045
2046 /* Signedness of an enum type is implementation defined, so cast it
2047 to unsigned before testing. */
2048 gcc_assert ((unsigned) reason < CIF_N_REASONS);
2049 return cif_type_table[reason];
2050 }
2051
2052 /* Names used to print out the availability enum. */
2053 const char * const cgraph_availability_names[] =
2054 {"unset", "not_available", "overwritable", "available", "local"};
2055
2056 /* Output flags of edge to a file F. */
2057
2058 void
dump_edge_flags(FILE * f)2059 cgraph_edge::dump_edge_flags (FILE *f)
2060 {
2061 if (speculative)
2062 fprintf (f, "(speculative) ");
2063 if (!inline_failed)
2064 fprintf (f, "(inlined) ");
2065 if (call_stmt_cannot_inline_p)
2066 fprintf (f, "(call_stmt_cannot_inline_p) ");
2067 if (indirect_inlining_edge)
2068 fprintf (f, "(indirect_inlining) ");
2069 if (count.initialized_p ())
2070 {
2071 fprintf (f, "(");
2072 count.dump (f);
2073 fprintf (f, ",");
2074 fprintf (f, "%.2f per call) ", sreal_frequency ().to_double ());
2075 }
2076 if (can_throw_external)
2077 fprintf (f, "(can throw external) ");
2078 }
2079
2080 /* Dump call graph node to file F. */
2081
2082 void
dump(FILE * f)2083 cgraph_node::dump (FILE *f)
2084 {
2085 cgraph_edge *edge;
2086
2087 dump_base (f);
2088
2089 if (inlined_to)
2090 fprintf (f, " Function %s is inline copy in %s\n",
2091 dump_name (),
2092 inlined_to->dump_name ());
2093 if (clone_of)
2094 fprintf (f, " Clone of %s\n", clone_of->dump_asm_name ());
2095 if (symtab->function_flags_ready)
2096 fprintf (f, " Availability: %s\n",
2097 cgraph_availability_names [get_availability ()]);
2098
2099 if (profile_id)
2100 fprintf (f, " Profile id: %i\n",
2101 profile_id);
2102 if (unit_id)
2103 fprintf (f, " Unit id: %i\n",
2104 unit_id);
2105 cgraph_function_version_info *vi = function_version ();
2106 if (vi != NULL)
2107 {
2108 fprintf (f, " Version info: ");
2109 if (vi->prev != NULL)
2110 {
2111 fprintf (f, "prev: ");
2112 fprintf (f, "%s ", vi->prev->this_node->dump_asm_name ());
2113 }
2114 if (vi->next != NULL)
2115 {
2116 fprintf (f, "next: ");
2117 fprintf (f, "%s ", vi->next->this_node->dump_asm_name ());
2118 }
2119 if (vi->dispatcher_resolver != NULL_TREE)
2120 fprintf (f, "dispatcher: %s",
2121 lang_hooks.decl_printable_name (vi->dispatcher_resolver, 2));
2122
2123 fprintf (f, "\n");
2124 }
2125 fprintf (f, " Function flags:");
2126 if (count.initialized_p ())
2127 {
2128 fprintf (f, " count:");
2129 count.dump (f);
2130 }
2131 if (tp_first_run > 0)
2132 fprintf (f, " first_run:%" PRId64, (int64_t) tp_first_run);
2133 if (origin)
2134 fprintf (f, " nested in:%s", origin->dump_asm_name ());
2135 if (gimple_has_body_p (decl))
2136 fprintf (f, " body");
2137 if (process)
2138 fprintf (f, " process");
2139 if (local)
2140 fprintf (f, " local");
2141 if (redefined_extern_inline)
2142 fprintf (f, " redefined_extern_inline");
2143 if (only_called_at_startup)
2144 fprintf (f, " only_called_at_startup");
2145 if (only_called_at_exit)
2146 fprintf (f, " only_called_at_exit");
2147 if (tm_clone)
2148 fprintf (f, " tm_clone");
2149 if (calls_comdat_local)
2150 fprintf (f, " calls_comdat_local");
2151 if (icf_merged)
2152 fprintf (f, " icf_merged");
2153 if (merged_comdat)
2154 fprintf (f, " merged_comdat");
2155 if (merged_extern_inline)
2156 fprintf (f, " merged_extern_inline");
2157 if (split_part)
2158 fprintf (f, " split_part");
2159 if (indirect_call_target)
2160 fprintf (f, " indirect_call_target");
2161 if (nonfreeing_fn)
2162 fprintf (f, " nonfreeing_fn");
2163 if (DECL_STATIC_CONSTRUCTOR (decl))
2164 fprintf (f," static_constructor (priority:%i)", get_init_priority ());
2165 if (DECL_STATIC_DESTRUCTOR (decl))
2166 fprintf (f," static_destructor (priority:%i)", get_fini_priority ());
2167 if (frequency == NODE_FREQUENCY_HOT)
2168 fprintf (f, " hot");
2169 if (frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
2170 fprintf (f, " unlikely_executed");
2171 if (frequency == NODE_FREQUENCY_EXECUTED_ONCE)
2172 fprintf (f, " executed_once");
2173 if (opt_for_fn (decl, optimize_size))
2174 fprintf (f, " optimize_size");
2175 if (parallelized_function)
2176 fprintf (f, " parallelized_function");
2177 if (DECL_IS_OPERATOR_NEW_P (decl))
2178 fprintf (f, " %soperator_new",
2179 DECL_IS_REPLACEABLE_OPERATOR (decl) ? "replaceable_" : "");
2180 if (DECL_IS_OPERATOR_DELETE_P (decl))
2181 fprintf (f, " %soperator_delete",
2182 DECL_IS_REPLACEABLE_OPERATOR (decl) ? "replaceable_" : "");
2183
2184 fprintf (f, "\n");
2185
2186 if (thunk.thunk_p)
2187 {
2188 fprintf (f, " Thunk");
2189 if (thunk.alias)
2190 fprintf (f, " of %s (asm:%s)",
2191 lang_hooks.decl_printable_name (thunk.alias, 2),
2192 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk.alias)));
2193 fprintf (f, " fixed offset %i virtual value %i indirect_offset %i "
2194 "has virtual offset %i\n",
2195 (int)thunk.fixed_offset,
2196 (int)thunk.virtual_value,
2197 (int)thunk.indirect_offset,
2198 (int)thunk.virtual_offset_p);
2199 }
2200 else if (former_thunk_p ())
2201 fprintf (f, " Former thunk fixed offset %i virtual value %i "
2202 "indirect_offset %i has virtual offset %i\n",
2203 (int)thunk.fixed_offset,
2204 (int)thunk.virtual_value,
2205 (int)thunk.indirect_offset,
2206 (int)thunk.virtual_offset_p);
2207 if (alias && thunk.alias
2208 && DECL_P (thunk.alias))
2209 {
2210 fprintf (f, " Alias of %s",
2211 lang_hooks.decl_printable_name (thunk.alias, 2));
2212 if (DECL_ASSEMBLER_NAME_SET_P (thunk.alias))
2213 fprintf (f, " (asm:%s)",
2214 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk.alias)));
2215 fprintf (f, "\n");
2216 }
2217
2218 fprintf (f, " Called by: ");
2219
2220 profile_count sum = profile_count::zero ();
2221 for (edge = callers; edge; edge = edge->next_caller)
2222 {
2223 fprintf (f, "%s ", edge->caller->dump_asm_name ());
2224 edge->dump_edge_flags (f);
2225 if (edge->count.initialized_p ())
2226 sum += edge->count.ipa ();
2227 }
2228
2229 fprintf (f, "\n Calls: ");
2230 for (edge = callees; edge; edge = edge->next_callee)
2231 {
2232 fprintf (f, "%s ", edge->callee->dump_asm_name ());
2233 edge->dump_edge_flags (f);
2234 }
2235 fprintf (f, "\n");
2236
2237 if (count.ipa ().initialized_p ())
2238 {
2239 bool ok = true;
2240 bool min = false;
2241 ipa_ref *ref;
2242
2243 FOR_EACH_ALIAS (this, ref)
2244 if (dyn_cast <cgraph_node *> (ref->referring)->count.initialized_p ())
2245 sum += dyn_cast <cgraph_node *> (ref->referring)->count.ipa ();
2246
2247 if (inlined_to
2248 || (symtab->state < EXPANSION
2249 && ultimate_alias_target () == this && only_called_directly_p ()))
2250 ok = !count.ipa ().differs_from_p (sum);
2251 else if (count.ipa () > profile_count::from_gcov_type (100)
2252 && count.ipa () < sum.apply_scale (99, 100))
2253 ok = false, min = true;
2254 if (!ok)
2255 {
2256 fprintf (f, " Invalid sum of caller counts ");
2257 sum.dump (f);
2258 if (min)
2259 fprintf (f, ", should be at most ");
2260 else
2261 fprintf (f, ", should be ");
2262 count.ipa ().dump (f);
2263 fprintf (f, "\n");
2264 }
2265 }
2266
2267 for (edge = indirect_calls; edge; edge = edge->next_callee)
2268 {
2269 if (edge->indirect_info->polymorphic)
2270 {
2271 fprintf (f, " Polymorphic indirect call of type ");
2272 print_generic_expr (f, edge->indirect_info->otr_type, TDF_SLIM);
2273 fprintf (f, " token:%i", (int) edge->indirect_info->otr_token);
2274 }
2275 else
2276 fprintf (f, " Indirect call");
2277 edge->dump_edge_flags (f);
2278 if (edge->indirect_info->param_index != -1)
2279 {
2280 fprintf (f, " of param:%i", edge->indirect_info->param_index);
2281 if (edge->indirect_info->agg_contents)
2282 fprintf (f, " loaded from %s %s at offset %i",
2283 edge->indirect_info->member_ptr ? "member ptr" : "aggregate",
2284 edge->indirect_info->by_ref ? "passed by reference":"",
2285 (int)edge->indirect_info->offset);
2286 if (edge->indirect_info->vptr_changed)
2287 fprintf (f, " (vptr maybe changed)");
2288 }
2289 fprintf (f, " Num speculative call targets: %i",
2290 edge->indirect_info->num_speculative_call_targets);
2291 fprintf (f, "\n");
2292 if (edge->indirect_info->polymorphic)
2293 edge->indirect_info->context.dump (f);
2294 }
2295 }
2296
2297 /* Dump call graph node to file F in graphviz format. */
2298
2299 void
dump_graphviz(FILE * f)2300 cgraph_node::dump_graphviz (FILE *f)
2301 {
2302 cgraph_edge *edge;
2303
2304 for (edge = callees; edge; edge = edge->next_callee)
2305 {
2306 cgraph_node *callee = edge->callee;
2307
2308 fprintf (f, "\t\"%s\" -> \"%s\"\n", dump_name (), callee->dump_name ());
2309 }
2310 }
2311
2312
2313 /* Dump call graph node NODE to stderr. */
2314
2315 DEBUG_FUNCTION void
debug(void)2316 cgraph_node::debug (void)
2317 {
2318 dump (stderr);
2319 }
2320
2321 /* Dump the callgraph to file F. */
2322
2323 void
dump_cgraph(FILE * f)2324 cgraph_node::dump_cgraph (FILE *f)
2325 {
2326 cgraph_node *node;
2327
2328 fprintf (f, "callgraph:\n\n");
2329 FOR_EACH_FUNCTION (node)
2330 node->dump (f);
2331 }
2332
2333 /* Return true when the DECL can possibly be inlined. */
2334
2335 bool
cgraph_function_possibly_inlined_p(tree decl)2336 cgraph_function_possibly_inlined_p (tree decl)
2337 {
2338 if (!symtab->global_info_ready)
2339 return !DECL_UNINLINABLE (decl);
2340 return DECL_POSSIBLY_INLINED (decl);
2341 }
2342
2343 /* cgraph_node is no longer nested function; update cgraph accordingly. */
2344 void
unnest(void)2345 cgraph_node::unnest (void)
2346 {
2347 cgraph_node **node2 = &origin->nested;
2348 gcc_assert (origin);
2349
2350 while (*node2 != this)
2351 node2 = &(*node2)->next_nested;
2352 *node2 = next_nested;
2353 origin = NULL;
2354 }
2355
2356 /* Return function availability. See cgraph.h for description of individual
2357 return values. */
2358 enum availability
get_availability(symtab_node * ref)2359 cgraph_node::get_availability (symtab_node *ref)
2360 {
2361 if (ref)
2362 {
2363 cgraph_node *cref = dyn_cast <cgraph_node *> (ref);
2364 if (cref)
2365 ref = cref->inlined_to;
2366 }
2367 enum availability avail;
2368 if (!analyzed)
2369 avail = AVAIL_NOT_AVAILABLE;
2370 else if (local)
2371 avail = AVAIL_LOCAL;
2372 else if (inlined_to)
2373 avail = AVAIL_AVAILABLE;
2374 else if (transparent_alias)
2375 ultimate_alias_target (&avail, ref);
2376 else if (ifunc_resolver
2377 || lookup_attribute ("noipa", DECL_ATTRIBUTES (decl)))
2378 avail = AVAIL_INTERPOSABLE;
2379 else if (!externally_visible)
2380 avail = AVAIL_AVAILABLE;
2381 /* If this is a reference from symbol itself and there are no aliases, we
2382 may be sure that the symbol was not interposed by something else because
2383 the symbol itself would be unreachable otherwise.
2384
2385 Also comdat groups are always resolved in groups. */
2386 else if ((this == ref && !has_aliases_p ())
2387 || (ref && get_comdat_group ()
2388 && get_comdat_group () == ref->get_comdat_group ()))
2389 avail = AVAIL_AVAILABLE;
2390 /* Inline functions are safe to be analyzed even if their symbol can
2391 be overwritten at runtime. It is not meaningful to enforce any sane
2392 behavior on replacing inline function by different body. */
2393 else if (DECL_DECLARED_INLINE_P (decl))
2394 avail = AVAIL_AVAILABLE;
2395
2396 /* If the function can be overwritten, return OVERWRITABLE. Take
2397 care at least of two notable extensions - the COMDAT functions
2398 used to share template instantiations in C++ (this is symmetric
2399 to code cp_cannot_inline_tree_fn and probably shall be shared and
2400 the inlinability hooks completely eliminated). */
2401
2402 else if (decl_replaceable_p (decl) && !DECL_EXTERNAL (decl))
2403 avail = AVAIL_INTERPOSABLE;
2404 else avail = AVAIL_AVAILABLE;
2405
2406 return avail;
2407 }
2408
2409 /* Worker for cgraph_node_can_be_local_p. */
2410 static bool
cgraph_node_cannot_be_local_p_1(cgraph_node * node,void *)2411 cgraph_node_cannot_be_local_p_1 (cgraph_node *node, void *)
2412 {
2413 return !(!node->force_output
2414 && !node->ifunc_resolver
2415 /* Limitation of gas requires us to output targets of symver aliases
2416 as global symbols. This is binutils PR 25295. */
2417 && !node->symver
2418 && ((DECL_COMDAT (node->decl)
2419 && !node->forced_by_abi
2420 && !node->used_from_object_file_p ()
2421 && !node->same_comdat_group)
2422 || !node->externally_visible));
2423 }
2424
2425 /* Return true if cgraph_node can be made local for API change.
2426 Extern inline functions and C++ COMDAT functions can be made local
2427 at the expense of possible code size growth if function is used in multiple
2428 compilation units. */
2429 bool
can_be_local_p(void)2430 cgraph_node::can_be_local_p (void)
2431 {
2432 return (!address_taken
2433 && !call_for_symbol_thunks_and_aliases (cgraph_node_cannot_be_local_p_1,
2434 NULL, true));
2435 }
2436
2437 /* Call callback on cgraph_node, thunks and aliases associated to cgraph_node.
2438 When INCLUDE_OVERWRITABLE is false, overwritable symbols are
2439 skipped. When EXCLUDE_VIRTUAL_THUNKS is true, virtual thunks are
2440 skipped. */
2441 bool
call_for_symbol_thunks_and_aliases(bool (* callback)(cgraph_node *,void *),void * data,bool include_overwritable,bool exclude_virtual_thunks)2442 cgraph_node::call_for_symbol_thunks_and_aliases (bool (*callback)
2443 (cgraph_node *, void *),
2444 void *data,
2445 bool include_overwritable,
2446 bool exclude_virtual_thunks)
2447 {
2448 cgraph_edge *e;
2449 ipa_ref *ref;
2450 enum availability avail = AVAIL_AVAILABLE;
2451
2452 if (include_overwritable
2453 || (avail = get_availability ()) > AVAIL_INTERPOSABLE)
2454 {
2455 if (callback (this, data))
2456 return true;
2457 }
2458 FOR_EACH_ALIAS (this, ref)
2459 {
2460 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2461 if (include_overwritable
2462 || alias->get_availability () > AVAIL_INTERPOSABLE)
2463 if (alias->call_for_symbol_thunks_and_aliases (callback, data,
2464 include_overwritable,
2465 exclude_virtual_thunks))
2466 return true;
2467 }
2468 if (avail <= AVAIL_INTERPOSABLE)
2469 return false;
2470 for (e = callers; e; e = e->next_caller)
2471 if (e->caller->thunk.thunk_p
2472 && (include_overwritable
2473 || e->caller->get_availability () > AVAIL_INTERPOSABLE)
2474 && !(exclude_virtual_thunks
2475 && e->caller->thunk.virtual_offset_p))
2476 if (e->caller->call_for_symbol_thunks_and_aliases (callback, data,
2477 include_overwritable,
2478 exclude_virtual_thunks))
2479 return true;
2480
2481 return false;
2482 }
2483
2484 /* Worker to bring NODE local. */
2485
2486 bool
make_local(cgraph_node * node,void *)2487 cgraph_node::make_local (cgraph_node *node, void *)
2488 {
2489 gcc_checking_assert (node->can_be_local_p ());
2490 if (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
2491 {
2492 node->make_decl_local ();
2493 node->set_section (NULL);
2494 node->set_comdat_group (NULL);
2495 node->externally_visible = false;
2496 node->forced_by_abi = false;
2497 node->local = true;
2498 node->set_section (NULL);
2499 node->unique_name = ((node->resolution == LDPR_PREVAILING_DEF_IRONLY
2500 || node->resolution == LDPR_PREVAILING_DEF_IRONLY_EXP)
2501 && !flag_incremental_link);
2502 node->resolution = LDPR_PREVAILING_DEF_IRONLY;
2503 gcc_assert (node->get_availability () == AVAIL_LOCAL);
2504 }
2505 return false;
2506 }
2507
2508 /* Bring cgraph node local. */
2509
2510 void
make_local(void)2511 cgraph_node::make_local (void)
2512 {
2513 call_for_symbol_thunks_and_aliases (cgraph_node::make_local, NULL, true);
2514 }
2515
2516 /* Worker to set nothrow flag. */
2517
2518 static void
set_nothrow_flag_1(cgraph_node * node,bool nothrow,bool non_call,bool * changed)2519 set_nothrow_flag_1 (cgraph_node *node, bool nothrow, bool non_call,
2520 bool *changed)
2521 {
2522 cgraph_edge *e;
2523
2524 if (nothrow && !TREE_NOTHROW (node->decl))
2525 {
2526 /* With non-call exceptions we can't say for sure if other function body
2527 was not possibly optimized to still throw. */
2528 if (!non_call || node->binds_to_current_def_p ())
2529 {
2530 TREE_NOTHROW (node->decl) = true;
2531 *changed = true;
2532 for (e = node->callers; e; e = e->next_caller)
2533 e->can_throw_external = false;
2534 }
2535 }
2536 else if (!nothrow && TREE_NOTHROW (node->decl))
2537 {
2538 TREE_NOTHROW (node->decl) = false;
2539 *changed = true;
2540 }
2541 ipa_ref *ref;
2542 FOR_EACH_ALIAS (node, ref)
2543 {
2544 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2545 if (!nothrow || alias->get_availability () > AVAIL_INTERPOSABLE)
2546 set_nothrow_flag_1 (alias, nothrow, non_call, changed);
2547 }
2548 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
2549 if (e->caller->thunk.thunk_p
2550 && (!nothrow || e->caller->get_availability () > AVAIL_INTERPOSABLE))
2551 set_nothrow_flag_1 (e->caller, nothrow, non_call, changed);
2552 }
2553
2554 /* Set TREE_NOTHROW on NODE's decl and on aliases of NODE
2555 if any to NOTHROW. */
2556
2557 bool
set_nothrow_flag(bool nothrow)2558 cgraph_node::set_nothrow_flag (bool nothrow)
2559 {
2560 bool changed = false;
2561 bool non_call = opt_for_fn (decl, flag_non_call_exceptions);
2562
2563 if (!nothrow || get_availability () > AVAIL_INTERPOSABLE)
2564 set_nothrow_flag_1 (this, nothrow, non_call, &changed);
2565 else
2566 {
2567 ipa_ref *ref;
2568
2569 FOR_EACH_ALIAS (this, ref)
2570 {
2571 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2572 if (!nothrow || alias->get_availability () > AVAIL_INTERPOSABLE)
2573 set_nothrow_flag_1 (alias, nothrow, non_call, &changed);
2574 }
2575 }
2576 return changed;
2577 }
2578
2579 /* Worker to set malloc flag. */
2580 static void
set_malloc_flag_1(cgraph_node * node,bool malloc_p,bool * changed)2581 set_malloc_flag_1 (cgraph_node *node, bool malloc_p, bool *changed)
2582 {
2583 if (malloc_p && !DECL_IS_MALLOC (node->decl))
2584 {
2585 DECL_IS_MALLOC (node->decl) = true;
2586 *changed = true;
2587 }
2588
2589 ipa_ref *ref;
2590 FOR_EACH_ALIAS (node, ref)
2591 {
2592 cgraph_node *alias = dyn_cast<cgraph_node *> (ref->referring);
2593 if (!malloc_p || alias->get_availability () > AVAIL_INTERPOSABLE)
2594 set_malloc_flag_1 (alias, malloc_p, changed);
2595 }
2596
2597 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
2598 if (e->caller->thunk.thunk_p
2599 && (!malloc_p || e->caller->get_availability () > AVAIL_INTERPOSABLE))
2600 set_malloc_flag_1 (e->caller, malloc_p, changed);
2601 }
2602
2603 /* Set DECL_IS_MALLOC on NODE's decl and on NODE's aliases if any. */
2604
2605 bool
set_malloc_flag(bool malloc_p)2606 cgraph_node::set_malloc_flag (bool malloc_p)
2607 {
2608 bool changed = false;
2609
2610 if (!malloc_p || get_availability () > AVAIL_INTERPOSABLE)
2611 set_malloc_flag_1 (this, malloc_p, &changed);
2612 else
2613 {
2614 ipa_ref *ref;
2615
2616 FOR_EACH_ALIAS (this, ref)
2617 {
2618 cgraph_node *alias = dyn_cast<cgraph_node *> (ref->referring);
2619 if (!malloc_p || alias->get_availability () > AVAIL_INTERPOSABLE)
2620 set_malloc_flag_1 (alias, malloc_p, &changed);
2621 }
2622 }
2623 return changed;
2624 }
2625
2626 /* Worker to set_const_flag. */
2627
2628 static void
set_const_flag_1(cgraph_node * node,bool set_const,bool looping,bool * changed)2629 set_const_flag_1 (cgraph_node *node, bool set_const, bool looping,
2630 bool *changed)
2631 {
2632 /* Static constructors and destructors without a side effect can be
2633 optimized out. */
2634 if (set_const && !looping)
2635 {
2636 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2637 {
2638 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2639 *changed = true;
2640 }
2641 if (DECL_STATIC_DESTRUCTOR (node->decl))
2642 {
2643 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2644 *changed = true;
2645 }
2646 }
2647 if (!set_const)
2648 {
2649 if (TREE_READONLY (node->decl))
2650 {
2651 TREE_READONLY (node->decl) = 0;
2652 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2653 *changed = true;
2654 }
2655 }
2656 else
2657 {
2658 /* Consider function:
2659
2660 bool a(int *p)
2661 {
2662 return *p==*p;
2663 }
2664
2665 During early optimization we will turn this into:
2666
2667 bool a(int *p)
2668 {
2669 return true;
2670 }
2671
2672 Now if this function will be detected as CONST however when interposed
2673 it may end up being just pure. We always must assume the worst
2674 scenario here. */
2675 if (TREE_READONLY (node->decl))
2676 {
2677 if (!looping && DECL_LOOPING_CONST_OR_PURE_P (node->decl))
2678 {
2679 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2680 *changed = true;
2681 }
2682 }
2683 else if (node->binds_to_current_def_p ())
2684 {
2685 TREE_READONLY (node->decl) = true;
2686 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = looping;
2687 DECL_PURE_P (node->decl) = false;
2688 *changed = true;
2689 }
2690 else
2691 {
2692 if (dump_file && (dump_flags & TDF_DETAILS))
2693 fprintf (dump_file, "Dropping state to PURE because function does "
2694 "not bind to current def.\n");
2695 if (!DECL_PURE_P (node->decl))
2696 {
2697 DECL_PURE_P (node->decl) = true;
2698 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = looping;
2699 *changed = true;
2700 }
2701 else if (!looping && DECL_LOOPING_CONST_OR_PURE_P (node->decl))
2702 {
2703 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2704 *changed = true;
2705 }
2706 }
2707 }
2708
2709 ipa_ref *ref;
2710 FOR_EACH_ALIAS (node, ref)
2711 {
2712 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2713 if (!set_const || alias->get_availability () > AVAIL_INTERPOSABLE)
2714 set_const_flag_1 (alias, set_const, looping, changed);
2715 }
2716 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
2717 if (e->caller->thunk.thunk_p
2718 && (!set_const || e->caller->get_availability () > AVAIL_INTERPOSABLE))
2719 {
2720 /* Virtual thunks access virtual offset in the vtable, so they can
2721 only be pure, never const. */
2722 if (set_const
2723 && (e->caller->thunk.virtual_offset_p
2724 || !node->binds_to_current_def_p (e->caller)))
2725 *changed |= e->caller->set_pure_flag (true, looping);
2726 else
2727 set_const_flag_1 (e->caller, set_const, looping, changed);
2728 }
2729 }
2730
2731 /* If SET_CONST is true, mark function, aliases and thunks to be ECF_CONST.
2732 If SET_CONST if false, clear the flag.
2733
2734 When setting the flag be careful about possible interposition and
2735 do not set the flag for functions that can be interposed and set pure
2736 flag for functions that can bind to other definition.
2737
2738 Return true if any change was done. */
2739
2740 bool
set_const_flag(bool set_const,bool looping)2741 cgraph_node::set_const_flag (bool set_const, bool looping)
2742 {
2743 bool changed = false;
2744 if (!set_const || get_availability () > AVAIL_INTERPOSABLE)
2745 set_const_flag_1 (this, set_const, looping, &changed);
2746 else
2747 {
2748 ipa_ref *ref;
2749
2750 FOR_EACH_ALIAS (this, ref)
2751 {
2752 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2753 if (!set_const || alias->get_availability () > AVAIL_INTERPOSABLE)
2754 set_const_flag_1 (alias, set_const, looping, &changed);
2755 }
2756 }
2757 return changed;
2758 }
2759
2760 /* Info used by set_pure_flag_1. */
2761
2762 struct set_pure_flag_info
2763 {
2764 bool pure;
2765 bool looping;
2766 bool changed;
2767 };
2768
2769 /* Worker to set_pure_flag. */
2770
2771 static bool
set_pure_flag_1(cgraph_node * node,void * data)2772 set_pure_flag_1 (cgraph_node *node, void *data)
2773 {
2774 struct set_pure_flag_info *info = (struct set_pure_flag_info *)data;
2775 /* Static constructors and destructors without a side effect can be
2776 optimized out. */
2777 if (info->pure && !info->looping)
2778 {
2779 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2780 {
2781 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2782 info->changed = true;
2783 }
2784 if (DECL_STATIC_DESTRUCTOR (node->decl))
2785 {
2786 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2787 info->changed = true;
2788 }
2789 }
2790 if (info->pure)
2791 {
2792 if (!DECL_PURE_P (node->decl) && !TREE_READONLY (node->decl))
2793 {
2794 DECL_PURE_P (node->decl) = true;
2795 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = info->looping;
2796 info->changed = true;
2797 }
2798 else if (DECL_LOOPING_CONST_OR_PURE_P (node->decl)
2799 && !info->looping)
2800 {
2801 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2802 info->changed = true;
2803 }
2804 }
2805 else
2806 {
2807 if (DECL_PURE_P (node->decl))
2808 {
2809 DECL_PURE_P (node->decl) = false;
2810 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2811 info->changed = true;
2812 }
2813 }
2814 return false;
2815 }
2816
2817 /* Set DECL_PURE_P on cgraph_node's decl and on aliases of the node
2818 if any to PURE.
2819
2820 When setting the flag, be careful about possible interposition.
2821 Return true if any change was done. */
2822
2823 bool
set_pure_flag(bool pure,bool looping)2824 cgraph_node::set_pure_flag (bool pure, bool looping)
2825 {
2826 struct set_pure_flag_info info = {pure, looping, false};
2827 call_for_symbol_thunks_and_aliases (set_pure_flag_1, &info, !pure, true);
2828 return info.changed;
2829 }
2830
2831 /* Return true when cgraph_node cannot return or throw and thus
2832 it is safe to ignore its side effects for IPA analysis. */
2833
2834 bool
cannot_return_p(void)2835 cgraph_node::cannot_return_p (void)
2836 {
2837 int flags = flags_from_decl_or_type (decl);
2838 if (!opt_for_fn (decl, flag_exceptions))
2839 return (flags & ECF_NORETURN) != 0;
2840 else
2841 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2842 == (ECF_NORETURN | ECF_NOTHROW));
2843 }
2844
2845 /* Return true when call of edge cannot lead to return from caller
2846 and thus it is safe to ignore its side effects for IPA analysis
2847 when computing side effects of the caller.
2848 FIXME: We could actually mark all edges that have no reaching
2849 patch to the exit block or throw to get better results. */
2850 bool
cannot_lead_to_return_p(void)2851 cgraph_edge::cannot_lead_to_return_p (void)
2852 {
2853 if (caller->cannot_return_p ())
2854 return true;
2855 if (indirect_unknown_callee)
2856 {
2857 int flags = indirect_info->ecf_flags;
2858 if (!opt_for_fn (caller->decl, flag_exceptions))
2859 return (flags & ECF_NORETURN) != 0;
2860 else
2861 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2862 == (ECF_NORETURN | ECF_NOTHROW));
2863 }
2864 else
2865 return callee->cannot_return_p ();
2866 }
2867
2868 /* Return true if the edge may be considered hot. */
2869
2870 bool
maybe_hot_p(void)2871 cgraph_edge::maybe_hot_p (void)
2872 {
2873 if (!maybe_hot_count_p (NULL, count.ipa ()))
2874 return false;
2875 if (caller->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED
2876 || (callee
2877 && callee->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED))
2878 return false;
2879 if (caller->frequency > NODE_FREQUENCY_UNLIKELY_EXECUTED
2880 && (callee
2881 && callee->frequency <= NODE_FREQUENCY_EXECUTED_ONCE))
2882 return false;
2883 if (opt_for_fn (caller->decl, optimize_size))
2884 return false;
2885 if (caller->frequency == NODE_FREQUENCY_HOT)
2886 return true;
2887 if (!count.initialized_p ())
2888 return true;
2889 cgraph_node *where = caller->inlined_to ? caller->inlined_to : caller;
2890 if (!where->count.initialized_p ())
2891 return false;
2892 if (caller->frequency == NODE_FREQUENCY_EXECUTED_ONCE)
2893 {
2894 if (count.apply_scale (2, 1) < where->count.apply_scale (3, 1))
2895 return false;
2896 }
2897 else if (count.apply_scale (param_hot_bb_frequency_fraction , 1)
2898 < where->count)
2899 return false;
2900 return true;
2901 }
2902
2903 /* Worker for cgraph_can_remove_if_no_direct_calls_p. */
2904
2905 static bool
nonremovable_p(cgraph_node * node,void *)2906 nonremovable_p (cgraph_node *node, void *)
2907 {
2908 return !node->can_remove_if_no_direct_calls_and_refs_p ();
2909 }
2910
2911 /* Return true if whole comdat group can be removed if there are no direct
2912 calls to THIS. */
2913
2914 bool
can_remove_if_no_direct_calls_p(bool will_inline)2915 cgraph_node::can_remove_if_no_direct_calls_p (bool will_inline)
2916 {
2917 struct ipa_ref *ref;
2918
2919 /* For local symbols or non-comdat group it is the same as
2920 can_remove_if_no_direct_calls_p. */
2921 if (!externally_visible || !same_comdat_group)
2922 {
2923 if (DECL_EXTERNAL (decl))
2924 return true;
2925 if (address_taken)
2926 return false;
2927 return !call_for_symbol_and_aliases (nonremovable_p, NULL, true);
2928 }
2929
2930 if (will_inline && address_taken)
2931 return false;
2932
2933 /* Otherwise check if we can remove the symbol itself and then verify
2934 that only uses of the comdat groups are direct call to THIS
2935 or its aliases. */
2936 if (!can_remove_if_no_direct_calls_and_refs_p ())
2937 return false;
2938
2939 /* Check that all refs come from within the comdat group. */
2940 for (int i = 0; iterate_referring (i, ref); i++)
2941 if (ref->referring->get_comdat_group () != get_comdat_group ())
2942 return false;
2943
2944 struct cgraph_node *target = ultimate_alias_target ();
2945 for (cgraph_node *next = dyn_cast<cgraph_node *> (same_comdat_group);
2946 next != this; next = dyn_cast<cgraph_node *> (next->same_comdat_group))
2947 {
2948 if (!externally_visible)
2949 continue;
2950 if (!next->alias
2951 && !next->can_remove_if_no_direct_calls_and_refs_p ())
2952 return false;
2953
2954 /* If we see different symbol than THIS, be sure to check calls. */
2955 if (next->ultimate_alias_target () != target)
2956 for (cgraph_edge *e = next->callers; e; e = e->next_caller)
2957 if (e->caller->get_comdat_group () != get_comdat_group ()
2958 || will_inline)
2959 return false;
2960
2961 /* If function is not being inlined, we care only about
2962 references outside of the comdat group. */
2963 if (!will_inline)
2964 for (int i = 0; next->iterate_referring (i, ref); i++)
2965 if (ref->referring->get_comdat_group () != get_comdat_group ())
2966 return false;
2967 }
2968 return true;
2969 }
2970
2971 /* Return true when function cgraph_node can be expected to be removed
2972 from program when direct calls in this compilation unit are removed.
2973
2974 As a special case COMDAT functions are
2975 cgraph_can_remove_if_no_direct_calls_p while the are not
2976 cgraph_only_called_directly_p (it is possible they are called from other
2977 unit)
2978
2979 This function behaves as cgraph_only_called_directly_p because eliminating
2980 all uses of COMDAT function does not make it necessarily disappear from
2981 the program unless we are compiling whole program or we do LTO. In this
2982 case we know we win since dynamic linking will not really discard the
2983 linkonce section. */
2984
2985 bool
will_be_removed_from_program_if_no_direct_calls_p(bool will_inline)2986 cgraph_node::will_be_removed_from_program_if_no_direct_calls_p
2987 (bool will_inline)
2988 {
2989 gcc_assert (!inlined_to);
2990 if (DECL_EXTERNAL (decl))
2991 return true;
2992
2993 if (!in_lto_p && !flag_whole_program)
2994 {
2995 /* If the symbol is in comdat group, we need to verify that whole comdat
2996 group becomes unreachable. Technically we could skip references from
2997 within the group, too. */
2998 if (!only_called_directly_p ())
2999 return false;
3000 if (same_comdat_group && externally_visible)
3001 {
3002 struct cgraph_node *target = ultimate_alias_target ();
3003
3004 if (will_inline && address_taken)
3005 return true;
3006 for (cgraph_node *next = dyn_cast<cgraph_node *> (same_comdat_group);
3007 next != this;
3008 next = dyn_cast<cgraph_node *> (next->same_comdat_group))
3009 {
3010 if (!externally_visible)
3011 continue;
3012 if (!next->alias
3013 && !next->only_called_directly_p ())
3014 return false;
3015
3016 /* If we see different symbol than THIS,
3017 be sure to check calls. */
3018 if (next->ultimate_alias_target () != target)
3019 for (cgraph_edge *e = next->callers; e; e = e->next_caller)
3020 if (e->caller->get_comdat_group () != get_comdat_group ()
3021 || will_inline)
3022 return false;
3023 }
3024 }
3025 return true;
3026 }
3027 else
3028 return can_remove_if_no_direct_calls_p (will_inline);
3029 }
3030
3031
3032 /* Worker for cgraph_only_called_directly_p. */
3033
3034 static bool
cgraph_not_only_called_directly_p_1(cgraph_node * node,void *)3035 cgraph_not_only_called_directly_p_1 (cgraph_node *node, void *)
3036 {
3037 return !node->only_called_directly_or_aliased_p ();
3038 }
3039
3040 /* Return true when function cgraph_node and all its aliases are only called
3041 directly.
3042 i.e. it is not externally visible, address was not taken and
3043 it is not used in any other non-standard way. */
3044
3045 bool
only_called_directly_p(void)3046 cgraph_node::only_called_directly_p (void)
3047 {
3048 gcc_assert (ultimate_alias_target () == this);
3049 return !call_for_symbol_and_aliases (cgraph_not_only_called_directly_p_1,
3050 NULL, true);
3051 }
3052
3053
3054 /* Collect all callers of NODE. Worker for collect_callers_of_node. */
3055
3056 static bool
collect_callers_of_node_1(cgraph_node * node,void * data)3057 collect_callers_of_node_1 (cgraph_node *node, void *data)
3058 {
3059 vec<cgraph_edge *> *redirect_callers = (vec<cgraph_edge *> *)data;
3060 cgraph_edge *cs;
3061 enum availability avail;
3062 node->ultimate_alias_target (&avail);
3063
3064 if (avail > AVAIL_INTERPOSABLE)
3065 for (cs = node->callers; cs != NULL; cs = cs->next_caller)
3066 if (!cs->indirect_inlining_edge
3067 && !cs->caller->thunk.thunk_p)
3068 redirect_callers->safe_push (cs);
3069 return false;
3070 }
3071
3072 /* Collect all callers of cgraph_node and its aliases that are known to lead to
3073 cgraph_node (i.e. are not overwritable). */
3074
3075 vec<cgraph_edge *>
collect_callers(void)3076 cgraph_node::collect_callers (void)
3077 {
3078 vec<cgraph_edge *> redirect_callers = vNULL;
3079 call_for_symbol_thunks_and_aliases (collect_callers_of_node_1,
3080 &redirect_callers, false);
3081 return redirect_callers;
3082 }
3083
3084
3085 /* Return TRUE if NODE2 a clone of NODE or is equivalent to it. Return
3086 optimistically true if this cannot be determined. */
3087
3088 static bool
clone_of_p(cgraph_node * node,cgraph_node * node2)3089 clone_of_p (cgraph_node *node, cgraph_node *node2)
3090 {
3091 node = node->ultimate_alias_target ();
3092 node2 = node2->ultimate_alias_target ();
3093
3094 if (node2->clone_of == node
3095 || node2->former_clone_of == node->decl)
3096 return true;
3097
3098 if (!node->thunk.thunk_p && !node->former_thunk_p ())
3099 {
3100 while (node2 && node->decl != node2->decl)
3101 node2 = node2->clone_of;
3102 return node2 != NULL;
3103 }
3104
3105 /* There are no virtual clones of thunks so check former_clone_of or if we
3106 might have skipped thunks because this adjustments are no longer
3107 necessary. */
3108 while (node->thunk.thunk_p || node->former_thunk_p ())
3109 {
3110 if (!node->thunk.this_adjusting)
3111 return false;
3112 /* In case of instrumented expanded thunks, which can have multiple calls
3113 in them, we do not know how to continue and just have to be
3114 optimistic. The same applies if all calls have already been inlined
3115 into the thunk. */
3116 if (!node->callees || node->callees->next_callee)
3117 return true;
3118 node = node->callees->callee->ultimate_alias_target ();
3119
3120 if (!node2->clone.param_adjustments
3121 || node2->clone.param_adjustments->first_param_intact_p ())
3122 return false;
3123 if (node2->former_clone_of == node->decl
3124 || node2->former_clone_of == node->former_clone_of)
3125 return true;
3126
3127 cgraph_node *n2 = node2;
3128 while (n2 && node->decl != n2->decl)
3129 n2 = n2->clone_of;
3130 if (n2)
3131 return true;
3132 }
3133
3134 return false;
3135 }
3136
3137 /* Verify edge count and frequency. */
3138
3139 bool
verify_count()3140 cgraph_edge::verify_count ()
3141 {
3142 bool error_found = false;
3143 if (!count.verify ())
3144 {
3145 error ("caller edge count invalid");
3146 error_found = true;
3147 }
3148 return error_found;
3149 }
3150
3151 /* Switch to THIS_CFUN if needed and print STMT to stderr. */
3152 static void
cgraph_debug_gimple_stmt(function * this_cfun,gimple * stmt)3153 cgraph_debug_gimple_stmt (function *this_cfun, gimple *stmt)
3154 {
3155 bool fndecl_was_null = false;
3156 /* debug_gimple_stmt needs correct cfun */
3157 if (cfun != this_cfun)
3158 set_cfun (this_cfun);
3159 /* ...and an actual current_function_decl */
3160 if (!current_function_decl)
3161 {
3162 current_function_decl = this_cfun->decl;
3163 fndecl_was_null = true;
3164 }
3165 debug_gimple_stmt (stmt);
3166 if (fndecl_was_null)
3167 current_function_decl = NULL;
3168 }
3169
3170 /* Verify that call graph edge corresponds to DECL from the associated
3171 statement. Return true if the verification should fail. */
3172
3173 bool
verify_corresponds_to_fndecl(tree decl)3174 cgraph_edge::verify_corresponds_to_fndecl (tree decl)
3175 {
3176 cgraph_node *node;
3177
3178 if (!decl || callee->inlined_to)
3179 return false;
3180 if (symtab->state == LTO_STREAMING)
3181 return false;
3182 node = cgraph_node::get (decl);
3183
3184 /* We do not know if a node from a different partition is an alias or what it
3185 aliases and therefore cannot do the former_clone_of check reliably. When
3186 body_removed is set, we have lost all information about what was alias or
3187 thunk of and also cannot proceed. */
3188 if (!node
3189 || node->body_removed
3190 || node->in_other_partition
3191 || callee->icf_merged
3192 || callee->in_other_partition)
3193 return false;
3194
3195 node = node->ultimate_alias_target ();
3196
3197 /* Optimizers can redirect unreachable calls or calls triggering undefined
3198 behavior to builtin_unreachable. */
3199
3200 if (fndecl_built_in_p (callee->decl, BUILT_IN_UNREACHABLE))
3201 return false;
3202
3203 if (callee->former_clone_of != node->decl
3204 && (node != callee->ultimate_alias_target ())
3205 && !clone_of_p (node, callee))
3206 return true;
3207 else
3208 return false;
3209 }
3210
3211 /* Disable warnings about missing quoting in GCC diagnostics for
3212 the verification errors. Their format strings don't follow GCC
3213 diagnostic conventions and the calls are ultimately followed by
3214 one to internal_error. */
3215 #if __GNUC__ >= 10
3216 # pragma GCC diagnostic push
3217 # pragma GCC diagnostic ignored "-Wformat-diag"
3218 #endif
3219
3220 /* Verify consistency of speculative call in NODE corresponding to STMT
3221 and LTO_STMT_UID. If INDIRECT is set, assume that it is the indirect
3222 edge of call sequence. Return true if error is found.
3223
3224 This function is called to every component of indirect call (direct edges,
3225 indirect edge and refs). To save duplicated work, do full testing only
3226 in that case. */
3227 static bool
verify_speculative_call(struct cgraph_node * node,gimple * stmt,unsigned int lto_stmt_uid,struct cgraph_edge * indirect)3228 verify_speculative_call (struct cgraph_node *node, gimple *stmt,
3229 unsigned int lto_stmt_uid,
3230 struct cgraph_edge *indirect)
3231 {
3232 if (indirect == NULL)
3233 {
3234 for (indirect = node->indirect_calls; indirect;
3235 indirect = indirect->next_callee)
3236 if (indirect->call_stmt == stmt
3237 && indirect->lto_stmt_uid == lto_stmt_uid)
3238 break;
3239 if (!indirect)
3240 {
3241 error ("missing indirect call in speculative call sequence");
3242 return true;
3243 }
3244 if (!indirect->speculative)
3245 {
3246 error ("indirect call in speculative call sequence has no "
3247 "speculative flag");
3248 return true;
3249 }
3250 return false;
3251 }
3252
3253 /* Maximal number of targets. We probably will never want to have more than
3254 this. */
3255 const unsigned int num = 256;
3256 cgraph_edge *direct_calls[num];
3257 ipa_ref *refs[num];
3258
3259 for (unsigned int i = 0; i < num; i++)
3260 {
3261 direct_calls[i] = NULL;
3262 refs[i] = NULL;
3263 }
3264
3265 cgraph_edge *first_call = NULL;
3266 cgraph_edge *prev_call = NULL;
3267
3268 for (cgraph_edge *direct = node->callees; direct;
3269 direct = direct->next_callee)
3270 if (direct->call_stmt == stmt && direct->lto_stmt_uid == lto_stmt_uid)
3271 {
3272 if (!first_call)
3273 first_call = direct;
3274 if (prev_call && direct != prev_call->next_callee)
3275 {
3276 error ("speculative edges are not adjacent");
3277 return true;
3278 }
3279 prev_call = direct;
3280 if (!direct->speculative)
3281 {
3282 error ("direct call to %s in speculative call sequence has no "
3283 "speculative flag", direct->callee->dump_name ());
3284 return true;
3285 }
3286 if (direct->speculative_id >= num)
3287 {
3288 error ("direct call to %s in speculative call sequence has "
3289 "speculative_id %i out of range",
3290 direct->callee->dump_name (), direct->speculative_id);
3291 return true;
3292 }
3293 if (direct_calls[direct->speculative_id])
3294 {
3295 error ("duplicate direct call to %s in speculative call sequence "
3296 "with speculative_id %i",
3297 direct->callee->dump_name (), direct->speculative_id);
3298 return true;
3299 }
3300 direct_calls[direct->speculative_id] = direct;
3301 }
3302
3303 if (first_call->call_stmt
3304 && first_call != node->get_edge (first_call->call_stmt))
3305 {
3306 error ("call stmt hash does not point to first direct edge of "
3307 "speculative call sequence");
3308 return true;
3309 }
3310
3311 ipa_ref *ref;
3312 for (int i = 0; node->iterate_reference (i, ref); i++)
3313 if (ref->speculative
3314 && ref->stmt == stmt && ref->lto_stmt_uid == lto_stmt_uid)
3315 {
3316 if (ref->speculative_id >= num)
3317 {
3318 error ("direct call to %s in speculative call sequence has "
3319 "speculative_id %i out of range",
3320 ref->referred->dump_name (), ref->speculative_id);
3321 return true;
3322 }
3323 if (refs[ref->speculative_id])
3324 {
3325 error ("duplicate reference %s in speculative call sequence "
3326 "with speculative_id %i",
3327 ref->referred->dump_name (), ref->speculative_id);
3328 return true;
3329 }
3330 refs[ref->speculative_id] = ref;
3331 }
3332
3333 int num_targets = 0;
3334 for (unsigned int i = 0 ; i < num ; i++)
3335 {
3336 if (refs[i] && !direct_calls[i])
3337 {
3338 error ("missing direct call for speculation %i", i);
3339 return true;
3340 }
3341 if (!refs[i] && direct_calls[i])
3342 {
3343 error ("missing ref for speculation %i", i);
3344 return true;
3345 }
3346 if (refs[i] != NULL)
3347 num_targets++;
3348 }
3349
3350 if (num_targets != indirect->num_speculative_call_targets_p ())
3351 {
3352 error ("number of speculative targets %i mismatched with "
3353 "num_speculative_call_targets %i",
3354 num_targets,
3355 indirect->num_speculative_call_targets_p ());
3356 return true;
3357 }
3358 return false;
3359 }
3360
3361 /* Verify cgraph nodes of given cgraph node. */
3362 DEBUG_FUNCTION void
verify_node(void)3363 cgraph_node::verify_node (void)
3364 {
3365 cgraph_edge *e;
3366 function *this_cfun = DECL_STRUCT_FUNCTION (decl);
3367 basic_block this_block;
3368 gimple_stmt_iterator gsi;
3369 bool error_found = false;
3370 int i;
3371 ipa_ref *ref = NULL;
3372
3373 if (seen_error ())
3374 return;
3375
3376 timevar_push (TV_CGRAPH_VERIFY);
3377 error_found |= verify_base ();
3378 for (e = callees; e; e = e->next_callee)
3379 if (e->aux)
3380 {
3381 error ("aux field set for edge %s->%s",
3382 identifier_to_locale (e->caller->name ()),
3383 identifier_to_locale (e->callee->name ()));
3384 error_found = true;
3385 }
3386 if (!count.verify ())
3387 {
3388 error ("cgraph count invalid");
3389 error_found = true;
3390 }
3391 if (inlined_to && same_comdat_group)
3392 {
3393 error ("inline clone in same comdat group list");
3394 error_found = true;
3395 }
3396 if (inlined_to && !count.compatible_p (inlined_to->count))
3397 {
3398 error ("inline clone count is not compatible");
3399 count.debug ();
3400 inlined_to->count.debug ();
3401 error_found = true;
3402 }
3403 if (tp_first_run < 0)
3404 {
3405 error ("tp_first_run must be non-negative");
3406 error_found = true;
3407 }
3408 if (!definition && !in_other_partition && local)
3409 {
3410 error ("local symbols must be defined");
3411 error_found = true;
3412 }
3413 if (inlined_to && externally_visible)
3414 {
3415 error ("externally visible inline clone");
3416 error_found = true;
3417 }
3418 if (inlined_to && address_taken)
3419 {
3420 error ("inline clone with address taken");
3421 error_found = true;
3422 }
3423 if (inlined_to && force_output)
3424 {
3425 error ("inline clone is forced to output");
3426 error_found = true;
3427 }
3428 if (symtab->state != LTO_STREAMING)
3429 {
3430 if (calls_comdat_local && !same_comdat_group)
3431 {
3432 error ("calls_comdat_local is set outside of a comdat group");
3433 error_found = true;
3434 }
3435 if (!inlined_to && calls_comdat_local != check_calls_comdat_local_p ())
3436 {
3437 error ("invalid calls_comdat_local flag");
3438 error_found = true;
3439 }
3440 }
3441 if (DECL_IS_MALLOC (decl)
3442 && !POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
3443 {
3444 error ("malloc attribute should be used for a function that "
3445 "returns a pointer");
3446 error_found = true;
3447 }
3448 for (e = indirect_calls; e; e = e->next_callee)
3449 {
3450 if (e->aux)
3451 {
3452 error ("aux field set for indirect edge from %s",
3453 identifier_to_locale (e->caller->name ()));
3454 error_found = true;
3455 }
3456 if (!e->count.compatible_p (count))
3457 {
3458 error ("edge count is not compatible with function count");
3459 e->count.debug ();
3460 count.debug ();
3461 error_found = true;
3462 }
3463 if (!e->indirect_unknown_callee
3464 || !e->indirect_info)
3465 {
3466 error ("An indirect edge from %s is not marked as indirect or has "
3467 "associated indirect_info, the corresponding statement is: ",
3468 identifier_to_locale (e->caller->name ()));
3469 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3470 error_found = true;
3471 }
3472 if (e->call_stmt && e->lto_stmt_uid)
3473 {
3474 error ("edge has both call_stmt and lto_stmt_uid set");
3475 error_found = true;
3476 }
3477 }
3478 bool check_comdat = comdat_local_p ();
3479 for (e = callers; e; e = e->next_caller)
3480 {
3481 if (e->verify_count ())
3482 error_found = true;
3483 if (check_comdat
3484 && !in_same_comdat_group_p (e->caller))
3485 {
3486 error ("comdat-local function called by %s outside its comdat",
3487 identifier_to_locale (e->caller->name ()));
3488 error_found = true;
3489 }
3490 if (!e->inline_failed)
3491 {
3492 if (inlined_to
3493 != (e->caller->inlined_to
3494 ? e->caller->inlined_to : e->caller))
3495 {
3496 error ("inlined_to pointer is wrong");
3497 error_found = true;
3498 }
3499 if (callers->next_caller)
3500 {
3501 error ("multiple inline callers");
3502 error_found = true;
3503 }
3504 }
3505 else
3506 if (inlined_to)
3507 {
3508 error ("inlined_to pointer set for noninline callers");
3509 error_found = true;
3510 }
3511 }
3512 for (e = callees; e; e = e->next_callee)
3513 {
3514 if (e->verify_count ())
3515 error_found = true;
3516 if (!e->count.compatible_p (count))
3517 {
3518 error ("edge count is not compatible with function count");
3519 e->count.debug ();
3520 count.debug ();
3521 error_found = true;
3522 }
3523 if (gimple_has_body_p (e->caller->decl)
3524 && !e->caller->inlined_to
3525 && !e->speculative
3526 /* Optimized out calls are redirected to __builtin_unreachable. */
3527 && (e->count.nonzero_p ()
3528 || ! e->callee->decl
3529 || !fndecl_built_in_p (e->callee->decl, BUILT_IN_UNREACHABLE))
3530 && count
3531 == ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (decl))->count
3532 && (!e->count.ipa_p ()
3533 && e->count.differs_from_p (gimple_bb (e->call_stmt)->count)))
3534 {
3535 error ("caller edge count does not match BB count");
3536 fprintf (stderr, "edge count: ");
3537 e->count.dump (stderr);
3538 fprintf (stderr, "\n bb count: ");
3539 gimple_bb (e->call_stmt)->count.dump (stderr);
3540 fprintf (stderr, "\n");
3541 error_found = true;
3542 }
3543 if (e->call_stmt && e->lto_stmt_uid)
3544 {
3545 error ("edge has both call_stmt and lto_stmt_uid set");
3546 error_found = true;
3547 }
3548 if (e->speculative
3549 && verify_speculative_call (e->caller, e->call_stmt, e->lto_stmt_uid,
3550 NULL))
3551 error_found = true;
3552 }
3553 for (e = indirect_calls; e; e = e->next_callee)
3554 {
3555 if (e->verify_count ())
3556 error_found = true;
3557 if (gimple_has_body_p (e->caller->decl)
3558 && !e->caller->inlined_to
3559 && !e->speculative
3560 && e->count.ipa_p ()
3561 && count
3562 == ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (decl))->count
3563 && (!e->count.ipa_p ()
3564 && e->count.differs_from_p (gimple_bb (e->call_stmt)->count)))
3565 {
3566 error ("indirect call count does not match BB count");
3567 fprintf (stderr, "edge count: ");
3568 e->count.dump (stderr);
3569 fprintf (stderr, "\n bb count: ");
3570 gimple_bb (e->call_stmt)->count.dump (stderr);
3571 fprintf (stderr, "\n");
3572 error_found = true;
3573 }
3574 if (e->speculative
3575 && verify_speculative_call (e->caller, e->call_stmt, e->lto_stmt_uid,
3576 e))
3577 error_found = true;
3578 }
3579 for (i = 0; iterate_reference (i, ref); i++)
3580 {
3581 if (ref->stmt && ref->lto_stmt_uid)
3582 {
3583 error ("reference has both stmt and lto_stmt_uid set");
3584 error_found = true;
3585 }
3586 if (ref->speculative
3587 && verify_speculative_call (this, ref->stmt,
3588 ref->lto_stmt_uid, NULL))
3589 error_found = true;
3590 }
3591
3592 if (!callers && inlined_to)
3593 {
3594 error ("inlined_to pointer is set but no predecessors found");
3595 error_found = true;
3596 }
3597 if (inlined_to == this)
3598 {
3599 error ("inlined_to pointer refers to itself");
3600 error_found = true;
3601 }
3602
3603 if (clone_of)
3604 {
3605 cgraph_node *first_clone = clone_of->clones;
3606 if (first_clone != this)
3607 {
3608 if (prev_sibling_clone->clone_of != clone_of)
3609 {
3610 error ("cgraph_node has wrong clone_of");
3611 error_found = true;
3612 }
3613 }
3614 }
3615 if (clones)
3616 {
3617 cgraph_node *n;
3618 for (n = clones; n; n = n->next_sibling_clone)
3619 if (n->clone_of != this)
3620 break;
3621 if (n)
3622 {
3623 error ("cgraph_node has wrong clone list");
3624 error_found = true;
3625 }
3626 }
3627 if ((prev_sibling_clone || next_sibling_clone) && !clone_of)
3628 {
3629 error ("cgraph_node is in clone list but it is not clone");
3630 error_found = true;
3631 }
3632 if (!prev_sibling_clone && clone_of && clone_of->clones != this)
3633 {
3634 error ("cgraph_node has wrong prev_clone pointer");
3635 error_found = true;
3636 }
3637 if (prev_sibling_clone && prev_sibling_clone->next_sibling_clone != this)
3638 {
3639 error ("double linked list of clones corrupted");
3640 error_found = true;
3641 }
3642
3643 if (analyzed && alias)
3644 {
3645 bool ref_found = false;
3646 int i;
3647 ipa_ref *ref = NULL;
3648
3649 if (callees)
3650 {
3651 error ("Alias has call edges");
3652 error_found = true;
3653 }
3654 for (i = 0; iterate_reference (i, ref); i++)
3655 if (ref->use != IPA_REF_ALIAS)
3656 {
3657 error ("Alias has non-alias reference");
3658 error_found = true;
3659 }
3660 else if (ref_found)
3661 {
3662 error ("Alias has more than one alias reference");
3663 error_found = true;
3664 }
3665 else
3666 ref_found = true;
3667 if (!ref_found)
3668 {
3669 error ("Analyzed alias has no reference");
3670 error_found = true;
3671 }
3672 }
3673
3674 if (analyzed && thunk.thunk_p)
3675 {
3676 if (!callees)
3677 {
3678 error ("No edge out of thunk node");
3679 error_found = true;
3680 }
3681 else if (callees->next_callee)
3682 {
3683 error ("More than one edge out of thunk node");
3684 error_found = true;
3685 }
3686 if (gimple_has_body_p (decl) && !inlined_to)
3687 {
3688 error ("Thunk is not supposed to have body");
3689 error_found = true;
3690 }
3691 }
3692 else if (analyzed && gimple_has_body_p (decl)
3693 && !TREE_ASM_WRITTEN (decl)
3694 && (!DECL_EXTERNAL (decl) || inlined_to)
3695 && !flag_wpa)
3696 {
3697 if (this_cfun->cfg)
3698 {
3699 hash_set<gimple *> stmts;
3700
3701 /* Reach the trees by walking over the CFG, and note the
3702 enclosing basic-blocks in the call edges. */
3703 FOR_EACH_BB_FN (this_block, this_cfun)
3704 {
3705 for (gsi = gsi_start_phis (this_block);
3706 !gsi_end_p (gsi); gsi_next (&gsi))
3707 stmts.add (gsi_stmt (gsi));
3708 for (gsi = gsi_start_bb (this_block);
3709 !gsi_end_p (gsi);
3710 gsi_next (&gsi))
3711 {
3712 gimple *stmt = gsi_stmt (gsi);
3713 stmts.add (stmt);
3714 if (is_gimple_call (stmt))
3715 {
3716 cgraph_edge *e = get_edge (stmt);
3717 tree decl = gimple_call_fndecl (stmt);
3718 if (e)
3719 {
3720 if (e->aux)
3721 {
3722 error ("shared call_stmt:");
3723 cgraph_debug_gimple_stmt (this_cfun, stmt);
3724 error_found = true;
3725 }
3726 if (!e->indirect_unknown_callee)
3727 {
3728 if (e->verify_corresponds_to_fndecl (decl))
3729 {
3730 error ("edge points to wrong declaration:");
3731 debug_tree (e->callee->decl);
3732 fprintf (stderr," Instead of:");
3733 debug_tree (decl);
3734 error_found = true;
3735 }
3736 }
3737 else if (decl)
3738 {
3739 error ("an indirect edge with unknown callee "
3740 "corresponding to a call_stmt with "
3741 "a known declaration:");
3742 error_found = true;
3743 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3744 }
3745 e->aux = (void *)1;
3746 }
3747 else if (decl)
3748 {
3749 error ("missing callgraph edge for call stmt:");
3750 cgraph_debug_gimple_stmt (this_cfun, stmt);
3751 error_found = true;
3752 }
3753 }
3754 }
3755 }
3756 for (i = 0; iterate_reference (i, ref); i++)
3757 if (ref->stmt && !stmts.contains (ref->stmt))
3758 {
3759 error ("reference to dead statement");
3760 cgraph_debug_gimple_stmt (this_cfun, ref->stmt);
3761 error_found = true;
3762 }
3763 }
3764 else
3765 /* No CFG available?! */
3766 gcc_unreachable ();
3767
3768 for (e = callees; e; e = e->next_callee)
3769 {
3770 if (!e->aux && !e->speculative)
3771 {
3772 error ("edge %s->%s has no corresponding call_stmt",
3773 identifier_to_locale (e->caller->name ()),
3774 identifier_to_locale (e->callee->name ()));
3775 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3776 error_found = true;
3777 }
3778 e->aux = 0;
3779 }
3780 for (e = indirect_calls; e; e = e->next_callee)
3781 {
3782 if (!e->aux && !e->speculative)
3783 {
3784 error ("an indirect edge from %s has no corresponding call_stmt",
3785 identifier_to_locale (e->caller->name ()));
3786 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3787 error_found = true;
3788 }
3789 e->aux = 0;
3790 }
3791 }
3792
3793 if (nested != NULL)
3794 {
3795 for (cgraph_node *n = nested; n != NULL; n = n->next_nested)
3796 {
3797 if (n->origin == NULL)
3798 {
3799 error ("missing origin for a node in a nested list");
3800 error_found = true;
3801 }
3802 else if (n->origin != this)
3803 {
3804 error ("origin points to a different parent");
3805 error_found = true;
3806 break;
3807 }
3808 }
3809 }
3810 if (next_nested != NULL && origin == NULL)
3811 {
3812 error ("missing origin for a node in a nested list");
3813 error_found = true;
3814 }
3815
3816 if (error_found)
3817 {
3818 dump (stderr);
3819 internal_error ("verify_cgraph_node failed");
3820 }
3821 timevar_pop (TV_CGRAPH_VERIFY);
3822 }
3823
3824 /* Verify whole cgraph structure. */
3825 DEBUG_FUNCTION void
verify_cgraph_nodes(void)3826 cgraph_node::verify_cgraph_nodes (void)
3827 {
3828 cgraph_node *node;
3829
3830 if (seen_error ())
3831 return;
3832
3833 FOR_EACH_FUNCTION (node)
3834 node->verify ();
3835 }
3836
3837 #if __GNUC__ >= 10
3838 # pragma GCC diagnostic pop
3839 #endif
3840
3841 /* Walk the alias chain to return the function cgraph_node is alias of.
3842 Walk through thunks, too.
3843 When AVAILABILITY is non-NULL, get minimal availability in the chain.
3844 When REF is non-NULL, assume that reference happens in symbol REF
3845 when determining the availability. */
3846
3847 cgraph_node *
function_symbol(enum availability * availability,struct symtab_node * ref)3848 cgraph_node::function_symbol (enum availability *availability,
3849 struct symtab_node *ref)
3850 {
3851 cgraph_node *node = ultimate_alias_target (availability, ref);
3852
3853 while (node->thunk.thunk_p)
3854 {
3855 enum availability a;
3856
3857 ref = node;
3858 node = node->callees->callee;
3859 node = node->ultimate_alias_target (availability ? &a : NULL, ref);
3860 if (availability && a < *availability)
3861 *availability = a;
3862 }
3863 return node;
3864 }
3865
3866 /* Walk the alias chain to return the function cgraph_node is alias of.
3867 Walk through non virtual thunks, too. Thus we return either a function
3868 or a virtual thunk node.
3869 When AVAILABILITY is non-NULL, get minimal availability in the chain.
3870 When REF is non-NULL, assume that reference happens in symbol REF
3871 when determining the availability. */
3872
3873 cgraph_node *
function_or_virtual_thunk_symbol(enum availability * availability,struct symtab_node * ref)3874 cgraph_node::function_or_virtual_thunk_symbol
3875 (enum availability *availability,
3876 struct symtab_node *ref)
3877 {
3878 cgraph_node *node = ultimate_alias_target (availability, ref);
3879
3880 while (node->thunk.thunk_p && !node->thunk.virtual_offset_p)
3881 {
3882 enum availability a;
3883
3884 ref = node;
3885 node = node->callees->callee;
3886 node = node->ultimate_alias_target (availability ? &a : NULL, ref);
3887 if (availability && a < *availability)
3888 *availability = a;
3889 }
3890 return node;
3891 }
3892
3893 /* When doing LTO, read cgraph_node's body from disk if it is not already
3894 present. */
3895
3896 bool
get_untransformed_body(void)3897 cgraph_node::get_untransformed_body (void)
3898 {
3899 lto_file_decl_data *file_data;
3900 const char *data, *name;
3901 size_t len;
3902 tree decl = this->decl;
3903
3904 /* Check if body is already there. Either we have gimple body or
3905 the function is thunk and in that case we set DECL_ARGUMENTS. */
3906 if (DECL_ARGUMENTS (decl) || gimple_has_body_p (decl))
3907 return false;
3908
3909 gcc_assert (in_lto_p && !DECL_RESULT (decl));
3910
3911 timevar_push (TV_IPA_LTO_GIMPLE_IN);
3912
3913 file_data = lto_file_data;
3914 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
3915
3916 /* We may have renamed the declaration, e.g., a static function. */
3917 name = lto_get_decl_name_mapping (file_data, name);
3918 struct lto_in_decl_state *decl_state
3919 = lto_get_function_in_decl_state (file_data, decl);
3920
3921 cgraph_node *origin = this;
3922 while (origin->clone_of)
3923 origin = origin->clone_of;
3924
3925 int stream_order = origin->order - file_data->order_base;
3926 data = lto_get_section_data (file_data, LTO_section_function_body,
3927 name, stream_order, &len,
3928 decl_state->compressed);
3929 if (!data)
3930 fatal_error (input_location, "%s: section %s.%d is missing",
3931 file_data->file_name, name, stream_order);
3932
3933 gcc_assert (DECL_STRUCT_FUNCTION (decl) == NULL);
3934
3935 if (!quiet_flag)
3936 fprintf (stderr, " in:%s", IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
3937 lto_input_function_body (file_data, this, data);
3938 lto_stats.num_function_bodies++;
3939 lto_free_section_data (file_data, LTO_section_function_body, name,
3940 data, len, decl_state->compressed);
3941 lto_free_function_in_decl_state_for_node (this);
3942 /* Keep lto file data so ipa-inline-analysis knows about cross module
3943 inlining. */
3944
3945 timevar_pop (TV_IPA_LTO_GIMPLE_IN);
3946
3947 return true;
3948 }
3949
3950 /* Prepare function body. When doing LTO, read cgraph_node's body from disk
3951 if it is not already present. When some IPA transformations are scheduled,
3952 apply them. */
3953
3954 bool
get_body(void)3955 cgraph_node::get_body (void)
3956 {
3957 bool updated;
3958
3959 updated = get_untransformed_body ();
3960
3961 /* Getting transformed body makes no sense for inline clones;
3962 we should never use this on real clones because they are materialized
3963 early.
3964 TODO: Materializing clones here will likely lead to smaller LTRANS
3965 footprint. */
3966 gcc_assert (!inlined_to && !clone_of);
3967 if (ipa_transforms_to_apply.exists ())
3968 {
3969 opt_pass *saved_current_pass = current_pass;
3970 FILE *saved_dump_file = dump_file;
3971 const char *saved_dump_file_name = dump_file_name;
3972 dump_flags_t saved_dump_flags = dump_flags;
3973 dump_file_name = NULL;
3974 set_dump_file (NULL);
3975
3976 push_cfun (DECL_STRUCT_FUNCTION (decl));
3977
3978 update_ssa (TODO_update_ssa_only_virtuals);
3979 execute_all_ipa_transforms (true);
3980 cgraph_edge::rebuild_edges ();
3981 free_dominance_info (CDI_DOMINATORS);
3982 free_dominance_info (CDI_POST_DOMINATORS);
3983 pop_cfun ();
3984 updated = true;
3985
3986 current_pass = saved_current_pass;
3987 set_dump_file (saved_dump_file);
3988 dump_file_name = saved_dump_file_name;
3989 dump_flags = saved_dump_flags;
3990 }
3991 return updated;
3992 }
3993
3994 /* Return the DECL_STRUCT_FUNCTION of the function. */
3995
3996 struct function *
get_fun()3997 cgraph_node::get_fun () const
3998 {
3999 const cgraph_node *node = this;
4000 struct function *fun = DECL_STRUCT_FUNCTION (node->decl);
4001
4002 while (!fun && node->clone_of)
4003 {
4004 node = node->clone_of;
4005 fun = DECL_STRUCT_FUNCTION (node->decl);
4006 }
4007
4008 return fun;
4009 }
4010
4011 /* Reset all state within cgraph.c so that we can rerun the compiler
4012 within the same process. For use by toplev::finalize. */
4013
4014 void
cgraph_c_finalize(void)4015 cgraph_c_finalize (void)
4016 {
4017 symtab = NULL;
4018
4019 x_cgraph_nodes_queue = NULL;
4020
4021 cgraph_fnver_htab = NULL;
4022 version_info_node = NULL;
4023 }
4024
4025 /* A worker for call_for_symbol_and_aliases. */
4026
4027 bool
call_for_symbol_and_aliases_1(bool (* callback)(cgraph_node *,void *),void * data,bool include_overwritable)4028 cgraph_node::call_for_symbol_and_aliases_1 (bool (*callback) (cgraph_node *,
4029 void *),
4030 void *data,
4031 bool include_overwritable)
4032 {
4033 ipa_ref *ref;
4034 FOR_EACH_ALIAS (this, ref)
4035 {
4036 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
4037 if (include_overwritable
4038 || alias->get_availability () > AVAIL_INTERPOSABLE)
4039 if (alias->call_for_symbol_and_aliases (callback, data,
4040 include_overwritable))
4041 return true;
4042 }
4043 return false;
4044 }
4045
4046 /* Return true if NODE has thunk. */
4047
4048 bool
has_thunk_p(cgraph_node * node,void *)4049 cgraph_node::has_thunk_p (cgraph_node *node, void *)
4050 {
4051 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
4052 if (e->caller->thunk.thunk_p)
4053 return true;
4054 return false;
4055 }
4056
4057 /* Expected frequency of executions within the function. */
4058
4059 sreal
sreal_frequency()4060 cgraph_edge::sreal_frequency ()
4061 {
4062 return count.to_sreal_scale (caller->inlined_to
4063 ? caller->inlined_to->count
4064 : caller->count);
4065 }
4066
4067
4068 /* During LTO stream in this can be used to check whether call can possibly
4069 be internal to the current translation unit. */
4070
4071 bool
possibly_call_in_translation_unit_p(void)4072 cgraph_edge::possibly_call_in_translation_unit_p (void)
4073 {
4074 gcc_checking_assert (in_lto_p && caller->prevailing_p ());
4075
4076 /* While incremental linking we may end up getting function body later. */
4077 if (flag_incremental_link == INCREMENTAL_LINK_LTO)
4078 return true;
4079
4080 /* We may be smarter here and avoid streaming in indirect calls we can't
4081 track, but that would require arranging streaming the indirect call
4082 summary first. */
4083 if (!callee)
4084 return true;
4085
4086 /* If callee is local to the original translation unit, it will be
4087 defined. */
4088 if (!TREE_PUBLIC (callee->decl) && !DECL_EXTERNAL (callee->decl))
4089 return true;
4090
4091 /* Otherwise we need to lookup prevailing symbol (symbol table is not merged,
4092 yet) and see if it is a definition. In fact we may also resolve aliases,
4093 but that is probably not too important. */
4094 symtab_node *node = callee;
4095 for (int n = 10; node->previous_sharing_asm_name && n ; n--)
4096 node = node->previous_sharing_asm_name;
4097 if (node->previous_sharing_asm_name)
4098 node = symtab_node::get_for_asmname (DECL_ASSEMBLER_NAME (callee->decl));
4099 gcc_assert (TREE_PUBLIC (node->decl));
4100 return node->get_availability () >= AVAIL_INTERPOSABLE;
4101 }
4102
4103 /* Return num_speculative_targets of this edge. */
4104
4105 int
num_speculative_call_targets_p(void)4106 cgraph_edge::num_speculative_call_targets_p (void)
4107 {
4108 return indirect_info ? indirect_info->num_speculative_call_targets : 0;
4109 }
4110
4111 /* Check if function calls comdat local. This is used to recompute
4112 calls_comdat_local flag after function transformations. */
4113 bool
check_calls_comdat_local_p()4114 cgraph_node::check_calls_comdat_local_p ()
4115 {
4116 for (cgraph_edge *e = callees; e; e = e->next_callee)
4117 if (e->inline_failed
4118 ? e->callee->comdat_local_p ()
4119 : e->callee->check_calls_comdat_local_p ())
4120 return true;
4121 return false;
4122 }
4123
4124 /* A stashed copy of "symtab" for use by selftest::symbol_table_test.
4125 This needs to be a global so that it can be a GC root, and thus
4126 prevent the stashed copy from being garbage-collected if the GC runs
4127 during a symbol_table_test. */
4128
4129 symbol_table *saved_symtab;
4130
4131 #if CHECKING_P
4132
4133 namespace selftest {
4134
4135 /* class selftest::symbol_table_test. */
4136
4137 /* Constructor. Store the old value of symtab, and create a new one. */
4138
symbol_table_test()4139 symbol_table_test::symbol_table_test ()
4140 {
4141 gcc_assert (saved_symtab == NULL);
4142 saved_symtab = symtab;
4143 symtab = new (ggc_alloc<symbol_table> ()) symbol_table ();
4144 }
4145
4146 /* Destructor. Restore the old value of symtab. */
4147
~symbol_table_test()4148 symbol_table_test::~symbol_table_test ()
4149 {
4150 gcc_assert (saved_symtab != NULL);
4151 symtab = saved_symtab;
4152 saved_symtab = NULL;
4153 }
4154
4155 /* Verify that symbol_table_test works. */
4156
4157 static void
test_symbol_table_test()4158 test_symbol_table_test ()
4159 {
4160 /* Simulate running two selftests involving symbol tables. */
4161 for (int i = 0; i < 2; i++)
4162 {
4163 symbol_table_test stt;
4164 tree test_decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
4165 get_identifier ("test_decl"),
4166 build_function_type_list (void_type_node,
4167 NULL_TREE));
4168 cgraph_node *node = cgraph_node::get_create (test_decl);
4169 gcc_assert (node);
4170
4171 /* Verify that the node has order 0 on both iterations,
4172 and thus that nodes have predictable dump names in selftests. */
4173 ASSERT_EQ (node->order, 0);
4174 ASSERT_STREQ (node->dump_name (), "test_decl/0");
4175 }
4176 }
4177
4178 /* Run all of the selftests within this file. */
4179
4180 void
cgraph_c_tests()4181 cgraph_c_tests ()
4182 {
4183 test_symbol_table_test ();
4184 }
4185
4186 } // namespace selftest
4187
4188 #endif /* CHECKING_P */
4189
4190 #include "gt-cgraph.h"
4191