1 /* Callgraph clones 2 Copyright (C) 2003-2017 Free Software Foundation, Inc. 3 Contributed by Jan Hubicka 4 5 This file is part of GCC. 6 7 GCC is free software; you can redistribute it and/or modify it under 8 the terms of the GNU General Public License as published by the Free 9 Software Foundation; either version 3, or (at your option) any later 10 version. 11 12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 13 WARRANTY; without even the implied warranty of MERCHANTABILITY or 14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 15 for more details. 16 17 You should have received a copy of the GNU General Public License 18 along with GCC; see the file COPYING3. If not see 19 <http://www.gnu.org/licenses/>. */ 20 21 /* This module provide facilities for clonning functions. I.e. creating 22 new functions based on existing functions with simple modifications, 23 such as replacement of parameters. 24 25 To allow whole program optimization without actual presence of function 26 bodies, an additional infrastructure is provided for so-called virtual 27 clones 28 29 A virtual clone in the callgraph is a function that has no 30 associated body, just a description of how to create its body based 31 on a different function (which itself may be a virtual clone). 32 33 The description of function modifications includes adjustments to 34 the function's signature (which allows, for example, removing or 35 adding function arguments), substitutions to perform on the 36 function body, and, for inlined functions, a pointer to the 37 function that it will be inlined into. 38 39 It is also possible to redirect any edge of the callgraph from a 40 function to its virtual clone. This implies updating of the call 41 site to adjust for the new function signature. 42 43 Most of the transformations performed by inter-procedural 44 optimizations can be represented via virtual clones. For 45 instance, a constant propagation pass can produce a virtual clone 46 of the function which replaces one of its arguments by a 47 constant. The inliner can represent its decisions by producing a 48 clone of a function whose body will be later integrated into 49 a given function. 50 51 Using virtual clones, the program can be easily updated 52 during the Execute stage, solving most of pass interactions 53 problems that would otherwise occur during Transform. 54 55 Virtual clones are later materialized in the LTRANS stage and 56 turned into real functions. Passes executed after the virtual 57 clone were introduced also perform their Transform stage 58 on new functions, so for a pass there is no significant 59 difference between operating on a real function or a virtual 60 clone introduced before its Execute stage. 61 62 Optimization passes then work on virtual clones introduced before 63 their Execute stage as if they were real functions. The 64 only difference is that clones are not visible during the 65 Generate Summary stage. */ 66 67 #include "config.h" 68 #include "system.h" 69 #include "coretypes.h" 70 #include "backend.h" 71 #include "target.h" 72 #include "rtl.h" 73 #include "tree.h" 74 #include "gimple.h" 75 #include "stringpool.h" 76 #include "cgraph.h" 77 #include "lto-streamer.h" 78 #include "tree-eh.h" 79 #include "tree-cfg.h" 80 #include "tree-inline.h" 81 #include "tree-dump.h" 82 #include "gimple-pretty-print.h" 83 84 /* Create clone of edge in the node N represented by CALL_EXPR 85 the callgraph. */ 86 87 cgraph_edge * 88 cgraph_edge::clone (cgraph_node *n, gcall *call_stmt, unsigned stmt_uid, 89 gcov_type count_scale, int freq_scale, bool update_original) 90 { 91 cgraph_edge *new_edge; 92 gcov_type gcov_count = apply_probability (count, count_scale); 93 gcov_type freq; 94 95 /* We do not want to ignore loop nest after frequency drops to 0. */ 96 if (!freq_scale) 97 freq_scale = 1; 98 freq = frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE; 99 if (freq > CGRAPH_FREQ_MAX) 100 freq = CGRAPH_FREQ_MAX; 101 102 if (indirect_unknown_callee) 103 { 104 tree decl; 105 106 if (call_stmt && (decl = gimple_call_fndecl (call_stmt)) 107 /* When the call is speculative, we need to resolve it 108 via cgraph_resolve_speculation and not here. */ 109 && !speculative) 110 { 111 cgraph_node *callee = cgraph_node::get (decl); 112 gcc_checking_assert (callee); 113 new_edge = n->create_edge (callee, call_stmt, gcov_count, freq); 114 } 115 else 116 { 117 new_edge = n->create_indirect_edge (call_stmt, 118 indirect_info->ecf_flags, 119 count, freq, false); 120 *new_edge->indirect_info = *indirect_info; 121 } 122 } 123 else 124 { 125 new_edge = n->create_edge (callee, call_stmt, gcov_count, freq); 126 if (indirect_info) 127 { 128 new_edge->indirect_info 129 = ggc_cleared_alloc<cgraph_indirect_call_info> (); 130 *new_edge->indirect_info = *indirect_info; 131 } 132 } 133 134 new_edge->inline_failed = inline_failed; 135 new_edge->indirect_inlining_edge = indirect_inlining_edge; 136 new_edge->lto_stmt_uid = stmt_uid; 137 /* Clone flags that depend on call_stmt availability manually. */ 138 new_edge->can_throw_external = can_throw_external; 139 new_edge->call_stmt_cannot_inline_p = call_stmt_cannot_inline_p; 140 new_edge->speculative = speculative; 141 new_edge->in_polymorphic_cdtor = in_polymorphic_cdtor; 142 if (update_original) 143 { 144 count -= new_edge->count; 145 if (count < 0) 146 count = 0; 147 } 148 symtab->call_edge_duplication_hooks (this, new_edge); 149 return new_edge; 150 } 151 152 /* Build variant of function type ORIG_TYPE skipping ARGS_TO_SKIP and the 153 return value if SKIP_RETURN is true. */ 154 155 tree 156 cgraph_build_function_type_skip_args (tree orig_type, bitmap args_to_skip, 157 bool skip_return) 158 { 159 tree new_type = NULL; 160 tree args, new_args = NULL; 161 tree new_reversed; 162 int i = 0; 163 164 for (args = TYPE_ARG_TYPES (orig_type); args && args != void_list_node; 165 args = TREE_CHAIN (args), i++) 166 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i)) 167 new_args = tree_cons (NULL_TREE, TREE_VALUE (args), new_args); 168 169 new_reversed = nreverse (new_args); 170 if (args) 171 { 172 if (new_reversed) 173 TREE_CHAIN (new_args) = void_list_node; 174 else 175 new_reversed = void_list_node; 176 } 177 178 /* Use copy_node to preserve as much as possible from original type 179 (debug info, attribute lists etc.) 180 Exception is METHOD_TYPEs must have THIS argument. 181 When we are asked to remove it, we need to build new FUNCTION_TYPE 182 instead. */ 183 if (TREE_CODE (orig_type) != METHOD_TYPE 184 || !args_to_skip 185 || !bitmap_bit_p (args_to_skip, 0)) 186 { 187 new_type = build_distinct_type_copy (orig_type); 188 TYPE_ARG_TYPES (new_type) = new_reversed; 189 } 190 else 191 { 192 new_type 193 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type), 194 new_reversed)); 195 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type); 196 } 197 198 if (skip_return) 199 TREE_TYPE (new_type) = void_type_node; 200 201 return new_type; 202 } 203 204 /* Build variant of function decl ORIG_DECL skipping ARGS_TO_SKIP and the 205 return value if SKIP_RETURN is true. 206 207 Arguments from DECL_ARGUMENTS list can't be removed now, since they are 208 linked by TREE_CHAIN directly. The caller is responsible for eliminating 209 them when they are being duplicated (i.e. copy_arguments_for_versioning). */ 210 211 static tree 212 build_function_decl_skip_args (tree orig_decl, bitmap args_to_skip, 213 bool skip_return) 214 { 215 tree new_decl = copy_node (orig_decl); 216 tree new_type; 217 218 new_type = TREE_TYPE (orig_decl); 219 if (prototype_p (new_type) 220 || (skip_return && !VOID_TYPE_P (TREE_TYPE (new_type)))) 221 new_type 222 = cgraph_build_function_type_skip_args (new_type, args_to_skip, 223 skip_return); 224 TREE_TYPE (new_decl) = new_type; 225 226 /* For declarations setting DECL_VINDEX (i.e. methods) 227 we expect first argument to be THIS pointer. */ 228 if (args_to_skip && bitmap_bit_p (args_to_skip, 0)) 229 DECL_VINDEX (new_decl) = NULL_TREE; 230 231 /* When signature changes, we need to clear builtin info. */ 232 if (DECL_BUILT_IN (new_decl) 233 && args_to_skip 234 && !bitmap_empty_p (args_to_skip)) 235 { 236 DECL_BUILT_IN_CLASS (new_decl) = NOT_BUILT_IN; 237 DECL_FUNCTION_CODE (new_decl) = (enum built_in_function) 0; 238 } 239 /* The FE might have information and assumptions about the other 240 arguments. */ 241 DECL_LANG_SPECIFIC (new_decl) = NULL; 242 return new_decl; 243 } 244 245 /* Set flags of NEW_NODE and its decl. NEW_NODE is a newly created private 246 clone or its thunk. */ 247 248 static void 249 set_new_clone_decl_and_node_flags (cgraph_node *new_node) 250 { 251 DECL_EXTERNAL (new_node->decl) = 0; 252 TREE_PUBLIC (new_node->decl) = 0; 253 DECL_COMDAT (new_node->decl) = 0; 254 DECL_WEAK (new_node->decl) = 0; 255 DECL_VIRTUAL_P (new_node->decl) = 0; 256 DECL_STATIC_CONSTRUCTOR (new_node->decl) = 0; 257 DECL_STATIC_DESTRUCTOR (new_node->decl) = 0; 258 259 new_node->externally_visible = 0; 260 new_node->local.local = 1; 261 new_node->lowered = true; 262 } 263 264 /* Duplicate thunk THUNK if necessary but make it to refer to NODE. 265 ARGS_TO_SKIP, if non-NULL, determines which parameters should be omitted. 266 Function can return NODE if no thunk is necessary, which can happen when 267 thunk is this_adjusting but we are removing this parameter. */ 268 269 static cgraph_node * 270 duplicate_thunk_for_node (cgraph_node *thunk, cgraph_node *node) 271 { 272 cgraph_node *new_thunk, *thunk_of; 273 thunk_of = thunk->callees->callee->ultimate_alias_target (); 274 275 if (thunk_of->thunk.thunk_p) 276 node = duplicate_thunk_for_node (thunk_of, node); 277 278 if (!DECL_ARGUMENTS (thunk->decl)) 279 thunk->get_untransformed_body (); 280 281 cgraph_edge *cs; 282 for (cs = node->callers; cs; cs = cs->next_caller) 283 if (cs->caller->thunk.thunk_p 284 && cs->caller->thunk.this_adjusting == thunk->thunk.this_adjusting 285 && cs->caller->thunk.fixed_offset == thunk->thunk.fixed_offset 286 && cs->caller->thunk.virtual_offset_p == thunk->thunk.virtual_offset_p 287 && cs->caller->thunk.virtual_value == thunk->thunk.virtual_value) 288 return cs->caller; 289 290 tree new_decl; 291 if (!node->clone.args_to_skip) 292 new_decl = copy_node (thunk->decl); 293 else 294 { 295 /* We do not need to duplicate this_adjusting thunks if we have removed 296 this. */ 297 if (thunk->thunk.this_adjusting 298 && bitmap_bit_p (node->clone.args_to_skip, 0)) 299 return node; 300 301 new_decl = build_function_decl_skip_args (thunk->decl, 302 node->clone.args_to_skip, 303 false); 304 } 305 306 tree *link = &DECL_ARGUMENTS (new_decl); 307 int i = 0; 308 for (tree pd = DECL_ARGUMENTS (thunk->decl); pd; pd = DECL_CHAIN (pd), i++) 309 { 310 if (!node->clone.args_to_skip 311 || !bitmap_bit_p (node->clone.args_to_skip, i)) 312 { 313 tree nd = copy_node (pd); 314 DECL_CONTEXT (nd) = new_decl; 315 *link = nd; 316 link = &DECL_CHAIN (nd); 317 } 318 } 319 *link = NULL_TREE; 320 321 gcc_checking_assert (!DECL_STRUCT_FUNCTION (new_decl)); 322 gcc_checking_assert (!DECL_INITIAL (new_decl)); 323 gcc_checking_assert (!DECL_RESULT (new_decl)); 324 gcc_checking_assert (!DECL_RTL_SET_P (new_decl)); 325 326 DECL_NAME (new_decl) = clone_function_name (thunk->decl, "artificial_thunk"); 327 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl)); 328 329 new_thunk = cgraph_node::create (new_decl); 330 set_new_clone_decl_and_node_flags (new_thunk); 331 new_thunk->definition = true; 332 new_thunk->local.can_change_signature = node->local.can_change_signature; 333 new_thunk->thunk = thunk->thunk; 334 new_thunk->unique_name = in_lto_p; 335 new_thunk->former_clone_of = thunk->decl; 336 new_thunk->clone.args_to_skip = node->clone.args_to_skip; 337 new_thunk->clone.combined_args_to_skip = node->clone.combined_args_to_skip; 338 339 cgraph_edge *e = new_thunk->create_edge (node, NULL, 0, 340 CGRAPH_FREQ_BASE); 341 symtab->call_edge_duplication_hooks (thunk->callees, e); 342 symtab->call_cgraph_duplication_hooks (thunk, new_thunk); 343 return new_thunk; 344 } 345 346 /* If E does not lead to a thunk, simply redirect it to N. Otherwise create 347 one or more equivalent thunks for N and redirect E to the first in the 348 chain. Note that it is then necessary to call 349 n->expand_all_artificial_thunks once all callers are redirected. */ 350 351 void 352 cgraph_edge::redirect_callee_duplicating_thunks (cgraph_node *n) 353 { 354 cgraph_node *orig_to = callee->ultimate_alias_target (); 355 if (orig_to->thunk.thunk_p) 356 n = duplicate_thunk_for_node (orig_to, n); 357 358 redirect_callee (n); 359 } 360 361 /* Call expand_thunk on all callers that are thunks and if analyze those nodes 362 that were expanded. */ 363 364 void 365 cgraph_node::expand_all_artificial_thunks () 366 { 367 cgraph_edge *e; 368 for (e = callers; e;) 369 if (e->caller->thunk.thunk_p) 370 { 371 cgraph_node *thunk = e->caller; 372 373 e = e->next_caller; 374 if (thunk->expand_thunk (false, false)) 375 { 376 thunk->thunk.thunk_p = false; 377 thunk->analyze (); 378 } 379 thunk->expand_all_artificial_thunks (); 380 } 381 else 382 e = e->next_caller; 383 } 384 385 void 386 dump_callgraph_transformation (const cgraph_node *original, 387 const cgraph_node *clone, 388 const char *suffix) 389 { 390 if (symtab->ipa_clones_dump_file) 391 { 392 fprintf (symtab->ipa_clones_dump_file, 393 "Callgraph clone;%s;%d;%s;%d;%d;%s;%d;%s;%d;%d;%s\n", 394 original->asm_name (), original->order, 395 DECL_SOURCE_FILE (original->decl), 396 DECL_SOURCE_LINE (original->decl), 397 DECL_SOURCE_COLUMN (original->decl), clone->asm_name (), 398 clone->order, DECL_SOURCE_FILE (clone->decl), 399 DECL_SOURCE_LINE (clone->decl), DECL_SOURCE_COLUMN (clone->decl), 400 suffix); 401 402 symtab->cloned_nodes.add (original); 403 symtab->cloned_nodes.add (clone); 404 } 405 } 406 407 /* Create node representing clone of N executed COUNT times. Decrease 408 the execution counts from original node too. 409 The new clone will have decl set to DECL that may or may not be the same 410 as decl of N. 411 412 When UPDATE_ORIGINAL is true, the counts are subtracted from the original 413 function's profile to reflect the fact that part of execution is handled 414 by node. 415 When CALL_DUPLICATOIN_HOOK is true, the ipa passes are acknowledged about 416 the new clone. Otherwise the caller is responsible for doing so later. 417 418 If the new node is being inlined into another one, NEW_INLINED_TO should be 419 the outline function the new one is (even indirectly) inlined to. All hooks 420 will see this in node's global.inlined_to, when invoked. Can be NULL if the 421 node is not inlined. */ 422 423 cgraph_node * 424 cgraph_node::create_clone (tree new_decl, gcov_type gcov_count, int freq, 425 bool update_original, 426 vec<cgraph_edge *> redirect_callers, 427 bool call_duplication_hook, 428 cgraph_node *new_inlined_to, 429 bitmap args_to_skip, const char *suffix) 430 { 431 cgraph_node *new_node = symtab->create_empty (); 432 cgraph_edge *e; 433 gcov_type count_scale; 434 unsigned i; 435 436 if (new_inlined_to) 437 dump_callgraph_transformation (this, new_inlined_to, "inlining to"); 438 439 new_node->decl = new_decl; 440 new_node->register_symbol (); 441 new_node->origin = origin; 442 new_node->lto_file_data = lto_file_data; 443 if (new_node->origin) 444 { 445 new_node->next_nested = new_node->origin->nested; 446 new_node->origin->nested = new_node; 447 } 448 new_node->analyzed = analyzed; 449 new_node->definition = definition; 450 new_node->local = local; 451 new_node->externally_visible = false; 452 new_node->no_reorder = no_reorder; 453 new_node->local.local = true; 454 new_node->global = global; 455 new_node->global.inlined_to = new_inlined_to; 456 new_node->rtl = rtl; 457 new_node->count = count; 458 new_node->frequency = frequency; 459 new_node->tp_first_run = tp_first_run; 460 new_node->tm_clone = tm_clone; 461 new_node->icf_merged = icf_merged; 462 new_node->merged_comdat = merged_comdat; 463 new_node->thunk = thunk; 464 465 new_node->clone.tree_map = NULL; 466 new_node->clone.args_to_skip = args_to_skip; 467 new_node->split_part = split_part; 468 if (!args_to_skip) 469 new_node->clone.combined_args_to_skip = clone.combined_args_to_skip; 470 else if (clone.combined_args_to_skip) 471 { 472 new_node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC (); 473 bitmap_ior (new_node->clone.combined_args_to_skip, 474 clone.combined_args_to_skip, args_to_skip); 475 } 476 else 477 new_node->clone.combined_args_to_skip = args_to_skip; 478 479 if (count) 480 { 481 if (new_node->count > count) 482 count_scale = REG_BR_PROB_BASE; 483 else 484 count_scale = GCOV_COMPUTE_SCALE (new_node->count, count); 485 } 486 else 487 count_scale = 0; 488 if (update_original) 489 { 490 count -= gcov_count; 491 if (count < 0) 492 count = 0; 493 } 494 495 FOR_EACH_VEC_ELT (redirect_callers, i, e) 496 { 497 /* Redirect calls to the old version node to point to its new 498 version. The only exception is when the edge was proved to 499 be unreachable during the clonning procedure. */ 500 if (!e->callee 501 || DECL_BUILT_IN_CLASS (e->callee->decl) != BUILT_IN_NORMAL 502 || DECL_FUNCTION_CODE (e->callee->decl) != BUILT_IN_UNREACHABLE) 503 e->redirect_callee_duplicating_thunks (new_node); 504 } 505 new_node->expand_all_artificial_thunks (); 506 507 for (e = callees;e; e=e->next_callee) 508 e->clone (new_node, e->call_stmt, e->lto_stmt_uid, count_scale, 509 freq, update_original); 510 511 for (e = indirect_calls; e; e = e->next_callee) 512 e->clone (new_node, e->call_stmt, e->lto_stmt_uid, 513 count_scale, freq, update_original); 514 new_node->clone_references (this); 515 516 new_node->next_sibling_clone = clones; 517 if (clones) 518 clones->prev_sibling_clone = new_node; 519 clones = new_node; 520 new_node->clone_of = this; 521 522 if (call_duplication_hook) 523 symtab->call_cgraph_duplication_hooks (this, new_node); 524 525 if (!new_inlined_to) 526 dump_callgraph_transformation (this, new_node, suffix); 527 528 return new_node; 529 } 530 531 static GTY(()) unsigned int clone_fn_id_num; 532 533 /* Return a new assembler name for a clone with SUFFIX of a decl named 534 NAME. */ 535 536 tree 537 clone_function_name_1 (const char *name, const char *suffix) 538 { 539 size_t len = strlen (name); 540 char *tmp_name, *prefix; 541 542 prefix = XALLOCAVEC (char, len + strlen (suffix) + 2); 543 memcpy (prefix, name, len); 544 strcpy (prefix + len + 1, suffix); 545 prefix[len] = symbol_table::symbol_suffix_separator (); 546 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix, clone_fn_id_num++); 547 return get_identifier (tmp_name); 548 } 549 550 /* Return a new assembler name for a clone of DECL with SUFFIX. */ 551 552 tree 553 clone_function_name (tree decl, const char *suffix) 554 { 555 tree name = DECL_ASSEMBLER_NAME (decl); 556 return clone_function_name_1 (IDENTIFIER_POINTER (name), suffix); 557 } 558 559 560 /* Create callgraph node clone with new declaration. The actual body will 561 be copied later at compilation stage. 562 563 TODO: after merging in ipa-sra use function call notes instead of args_to_skip 564 bitmap interface. 565 */ 566 cgraph_node * 567 cgraph_node::create_virtual_clone (vec<cgraph_edge *> redirect_callers, 568 vec<ipa_replace_map *, va_gc> *tree_map, 569 bitmap args_to_skip, const char * suffix) 570 { 571 tree old_decl = decl; 572 cgraph_node *new_node = NULL; 573 tree new_decl; 574 size_t len, i; 575 ipa_replace_map *map; 576 char *name; 577 578 gcc_checking_assert (local.versionable); 579 gcc_assert (local.can_change_signature || !args_to_skip); 580 581 /* Make a new FUNCTION_DECL tree node */ 582 if (!args_to_skip) 583 new_decl = copy_node (old_decl); 584 else 585 new_decl = build_function_decl_skip_args (old_decl, args_to_skip, false); 586 587 /* These pointers represent function body and will be populated only when clone 588 is materialized. */ 589 gcc_assert (new_decl != old_decl); 590 DECL_STRUCT_FUNCTION (new_decl) = NULL; 591 DECL_ARGUMENTS (new_decl) = NULL; 592 DECL_INITIAL (new_decl) = NULL; 593 DECL_RESULT (new_decl) = NULL; 594 /* We can not do DECL_RESULT (new_decl) = NULL; here because of LTO partitioning 595 sometimes storing only clone decl instead of original. */ 596 597 /* Generate a new name for the new version. */ 598 len = IDENTIFIER_LENGTH (DECL_NAME (old_decl)); 599 name = XALLOCAVEC (char, len + strlen (suffix) + 2); 600 memcpy (name, IDENTIFIER_POINTER (DECL_NAME (old_decl)), len); 601 strcpy (name + len + 1, suffix); 602 name[len] = '.'; 603 DECL_NAME (new_decl) = get_identifier (name); 604 SET_DECL_ASSEMBLER_NAME (new_decl, clone_function_name (old_decl, suffix)); 605 SET_DECL_RTL (new_decl, NULL); 606 607 new_node = create_clone (new_decl, count, CGRAPH_FREQ_BASE, false, 608 redirect_callers, false, NULL, args_to_skip, suffix); 609 610 /* Update the properties. 611 Make clone visible only within this translation unit. Make sure 612 that is not weak also. 613 ??? We cannot use COMDAT linkage because there is no 614 ABI support for this. */ 615 set_new_clone_decl_and_node_flags (new_node); 616 new_node->clone.tree_map = tree_map; 617 if (!implicit_section) 618 new_node->set_section (get_section ()); 619 620 /* Clones of global symbols or symbols with unique names are unique. */ 621 if ((TREE_PUBLIC (old_decl) 622 && !DECL_EXTERNAL (old_decl) 623 && !DECL_WEAK (old_decl) 624 && !DECL_COMDAT (old_decl)) 625 || in_lto_p) 626 new_node->unique_name = true; 627 FOR_EACH_VEC_SAFE_ELT (tree_map, i, map) 628 new_node->maybe_create_reference (map->new_tree, NULL); 629 630 if (ipa_transforms_to_apply.exists ()) 631 new_node->ipa_transforms_to_apply 632 = ipa_transforms_to_apply.copy (); 633 634 symtab->call_cgraph_duplication_hooks (this, new_node); 635 636 return new_node; 637 } 638 639 /* callgraph node being removed from symbol table; see if its entry can be 640 replaced by other inline clone. */ 641 cgraph_node * 642 cgraph_node::find_replacement (void) 643 { 644 cgraph_node *next_inline_clone, *replacement; 645 646 for (next_inline_clone = clones; 647 next_inline_clone 648 && next_inline_clone->decl != decl; 649 next_inline_clone = next_inline_clone->next_sibling_clone) 650 ; 651 652 /* If there is inline clone of the node being removed, we need 653 to put it into the position of removed node and reorganize all 654 other clones to be based on it. */ 655 if (next_inline_clone) 656 { 657 cgraph_node *n; 658 cgraph_node *new_clones; 659 660 replacement = next_inline_clone; 661 662 /* Unlink inline clone from the list of clones of removed node. */ 663 if (next_inline_clone->next_sibling_clone) 664 next_inline_clone->next_sibling_clone->prev_sibling_clone 665 = next_inline_clone->prev_sibling_clone; 666 if (next_inline_clone->prev_sibling_clone) 667 { 668 gcc_assert (clones != next_inline_clone); 669 next_inline_clone->prev_sibling_clone->next_sibling_clone 670 = next_inline_clone->next_sibling_clone; 671 } 672 else 673 { 674 gcc_assert (clones == next_inline_clone); 675 clones = next_inline_clone->next_sibling_clone; 676 } 677 678 new_clones = clones; 679 clones = NULL; 680 681 /* Copy clone info. */ 682 next_inline_clone->clone = clone; 683 684 /* Now place it into clone tree at same level at NODE. */ 685 next_inline_clone->clone_of = clone_of; 686 next_inline_clone->prev_sibling_clone = NULL; 687 next_inline_clone->next_sibling_clone = NULL; 688 if (clone_of) 689 { 690 if (clone_of->clones) 691 clone_of->clones->prev_sibling_clone = next_inline_clone; 692 next_inline_clone->next_sibling_clone = clone_of->clones; 693 clone_of->clones = next_inline_clone; 694 } 695 696 /* Merge the clone list. */ 697 if (new_clones) 698 { 699 if (!next_inline_clone->clones) 700 next_inline_clone->clones = new_clones; 701 else 702 { 703 n = next_inline_clone->clones; 704 while (n->next_sibling_clone) 705 n = n->next_sibling_clone; 706 n->next_sibling_clone = new_clones; 707 new_clones->prev_sibling_clone = n; 708 } 709 } 710 711 /* Update clone_of pointers. */ 712 n = new_clones; 713 while (n) 714 { 715 n->clone_of = next_inline_clone; 716 n = n->next_sibling_clone; 717 } 718 return replacement; 719 } 720 else 721 return NULL; 722 } 723 724 /* Like cgraph_set_call_stmt but walk the clone tree and update all 725 clones sharing the same function body. 726 When WHOLE_SPECULATIVE_EDGES is true, all three components of 727 speculative edge gets updated. Otherwise we update only direct 728 call. */ 729 730 void 731 cgraph_node::set_call_stmt_including_clones (gimple *old_stmt, 732 gcall *new_stmt, 733 bool update_speculative) 734 { 735 cgraph_node *node; 736 cgraph_edge *edge = get_edge (old_stmt); 737 738 if (edge) 739 edge->set_call_stmt (new_stmt, update_speculative); 740 741 node = clones; 742 if (node) 743 while (node != this) 744 { 745 cgraph_edge *edge = node->get_edge (old_stmt); 746 if (edge) 747 { 748 edge->set_call_stmt (new_stmt, update_speculative); 749 /* If UPDATE_SPECULATIVE is false, it means that we are turning 750 speculative call into a real code sequence. Update the 751 callgraph edges. */ 752 if (edge->speculative && !update_speculative) 753 { 754 cgraph_edge *direct, *indirect; 755 ipa_ref *ref; 756 757 gcc_assert (!edge->indirect_unknown_callee); 758 edge->speculative_call_info (direct, indirect, ref); 759 direct->speculative = false; 760 indirect->speculative = false; 761 ref->speculative = false; 762 } 763 } 764 if (node->clones) 765 node = node->clones; 766 else if (node->next_sibling_clone) 767 node = node->next_sibling_clone; 768 else 769 { 770 while (node != this && !node->next_sibling_clone) 771 node = node->clone_of; 772 if (node != this) 773 node = node->next_sibling_clone; 774 } 775 } 776 } 777 778 /* Like cgraph_create_edge walk the clone tree and update all clones sharing 779 same function body. If clones already have edge for OLD_STMT; only 780 update the edge same way as cgraph_set_call_stmt_including_clones does. 781 782 TODO: COUNT and LOOP_DEPTH should be properly distributed based on relative 783 frequencies of the clones. */ 784 785 void 786 cgraph_node::create_edge_including_clones (cgraph_node *callee, 787 gimple *old_stmt, gcall *stmt, 788 gcov_type count, 789 int freq, 790 cgraph_inline_failed_t reason) 791 { 792 cgraph_node *node; 793 cgraph_edge *edge; 794 795 if (!get_edge (stmt)) 796 { 797 edge = create_edge (callee, stmt, count, freq); 798 edge->inline_failed = reason; 799 } 800 801 node = clones; 802 if (node) 803 while (node != this) 804 /* Thunk clones do not get updated while copying inline function body. */ 805 if (!node->thunk.thunk_p) 806 { 807 cgraph_edge *edge = node->get_edge (old_stmt); 808 809 /* It is possible that clones already contain the edge while 810 master didn't. Either we promoted indirect call into direct 811 call in the clone or we are processing clones of unreachable 812 master where edges has been removed. */ 813 if (edge) 814 edge->set_call_stmt (stmt); 815 else if (! node->get_edge (stmt)) 816 { 817 edge = node->create_edge (callee, stmt, count, freq); 818 edge->inline_failed = reason; 819 } 820 821 if (node->clones) 822 node = node->clones; 823 else if (node->next_sibling_clone) 824 node = node->next_sibling_clone; 825 else 826 { 827 while (node != this && !node->next_sibling_clone) 828 node = node->clone_of; 829 if (node != this) 830 node = node->next_sibling_clone; 831 } 832 } 833 } 834 835 /* Remove the node from cgraph and all inline clones inlined into it. 836 Skip however removal of FORBIDDEN_NODE and return true if it needs to be 837 removed. This allows to call the function from outer loop walking clone 838 tree. */ 839 840 bool 841 cgraph_node::remove_symbol_and_inline_clones (cgraph_node *forbidden_node) 842 { 843 cgraph_edge *e, *next; 844 bool found = false; 845 846 if (this == forbidden_node) 847 { 848 callers->remove (); 849 return true; 850 } 851 for (e = callees; e; e = next) 852 { 853 next = e->next_callee; 854 if (!e->inline_failed) 855 found |= e->callee->remove_symbol_and_inline_clones (forbidden_node); 856 } 857 remove (); 858 return found; 859 } 860 861 /* The edges representing the callers of the NEW_VERSION node were 862 fixed by cgraph_function_versioning (), now the call_expr in their 863 respective tree code should be updated to call the NEW_VERSION. */ 864 865 static void 866 update_call_expr (cgraph_node *new_version) 867 { 868 cgraph_edge *e; 869 870 gcc_assert (new_version); 871 872 /* Update the call expr on the edges to call the new version. */ 873 for (e = new_version->callers; e; e = e->next_caller) 874 { 875 function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl); 876 gimple_call_set_fndecl (e->call_stmt, new_version->decl); 877 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt); 878 } 879 } 880 881 882 /* Create a new cgraph node which is the new version of 883 callgraph node. REDIRECT_CALLERS holds the callers 884 edges which should be redirected to point to 885 NEW_VERSION. ALL the callees edges of the node 886 are cloned to the new version node. Return the new 887 version node. 888 889 If non-NULL BLOCK_TO_COPY determine what basic blocks 890 was copied to prevent duplications of calls that are dead 891 in the clone. */ 892 893 cgraph_node * 894 cgraph_node::create_version_clone (tree new_decl, 895 vec<cgraph_edge *> redirect_callers, 896 bitmap bbs_to_copy, 897 const char *suffix) 898 { 899 cgraph_node *new_version; 900 cgraph_edge *e; 901 unsigned i; 902 903 new_version = cgraph_node::create (new_decl); 904 905 new_version->analyzed = analyzed; 906 new_version->definition = definition; 907 new_version->local = local; 908 new_version->externally_visible = false; 909 new_version->no_reorder = no_reorder; 910 new_version->local.local = new_version->definition; 911 new_version->global = global; 912 new_version->rtl = rtl; 913 new_version->count = count; 914 915 for (e = callees; e; e=e->next_callee) 916 if (!bbs_to_copy 917 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index)) 918 e->clone (new_version, e->call_stmt, 919 e->lto_stmt_uid, REG_BR_PROB_BASE, 920 CGRAPH_FREQ_BASE, 921 true); 922 for (e = indirect_calls; e; e=e->next_callee) 923 if (!bbs_to_copy 924 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index)) 925 e->clone (new_version, e->call_stmt, 926 e->lto_stmt_uid, REG_BR_PROB_BASE, 927 CGRAPH_FREQ_BASE, 928 true); 929 FOR_EACH_VEC_ELT (redirect_callers, i, e) 930 { 931 /* Redirect calls to the old version node to point to its new 932 version. */ 933 e->redirect_callee (new_version); 934 } 935 936 symtab->call_cgraph_duplication_hooks (this, new_version); 937 938 dump_callgraph_transformation (this, new_version, suffix); 939 940 return new_version; 941 } 942 943 /* Perform function versioning. 944 Function versioning includes copying of the tree and 945 a callgraph update (creating a new cgraph node and updating 946 its callees and callers). 947 948 REDIRECT_CALLERS varray includes the edges to be redirected 949 to the new version. 950 951 TREE_MAP is a mapping of tree nodes we want to replace with 952 new ones (according to results of prior analysis). 953 954 If non-NULL ARGS_TO_SKIP determine function parameters to remove 955 from new version. 956 If SKIP_RETURN is true, the new version will return void. 957 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy. 958 If non_NULL NEW_ENTRY determine new entry BB of the clone. 959 960 Return the new version's cgraph node. */ 961 962 cgraph_node * 963 cgraph_node::create_version_clone_with_body 964 (vec<cgraph_edge *> redirect_callers, 965 vec<ipa_replace_map *, va_gc> *tree_map, bitmap args_to_skip, 966 bool skip_return, bitmap bbs_to_copy, basic_block new_entry_block, 967 const char *suffix) 968 { 969 tree old_decl = decl; 970 cgraph_node *new_version_node = NULL; 971 tree new_decl; 972 973 if (!tree_versionable_function_p (old_decl)) 974 return NULL; 975 976 gcc_assert (local.can_change_signature || !args_to_skip); 977 978 /* Make a new FUNCTION_DECL tree node for the new version. */ 979 if (!args_to_skip && !skip_return) 980 new_decl = copy_node (old_decl); 981 else 982 new_decl 983 = build_function_decl_skip_args (old_decl, args_to_skip, skip_return); 984 985 /* Generate a new name for the new version. */ 986 DECL_NAME (new_decl) = clone_function_name (old_decl, suffix); 987 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl)); 988 SET_DECL_RTL (new_decl, NULL); 989 990 /* When the old decl was a con-/destructor make sure the clone isn't. */ 991 DECL_STATIC_CONSTRUCTOR (new_decl) = 0; 992 DECL_STATIC_DESTRUCTOR (new_decl) = 0; 993 994 /* Create the new version's call-graph node. 995 and update the edges of the new node. */ 996 new_version_node = create_version_clone (new_decl, redirect_callers, 997 bbs_to_copy, suffix); 998 999 if (ipa_transforms_to_apply.exists ()) 1000 new_version_node->ipa_transforms_to_apply 1001 = ipa_transforms_to_apply.copy (); 1002 /* Copy the OLD_VERSION_NODE function tree to the new version. */ 1003 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip, 1004 skip_return, bbs_to_copy, new_entry_block); 1005 1006 /* Update the new version's properties. 1007 Make The new version visible only within this translation unit. Make sure 1008 that is not weak also. 1009 ??? We cannot use COMDAT linkage because there is no 1010 ABI support for this. */ 1011 new_version_node->make_decl_local (); 1012 DECL_VIRTUAL_P (new_version_node->decl) = 0; 1013 new_version_node->externally_visible = 0; 1014 new_version_node->local.local = 1; 1015 new_version_node->lowered = true; 1016 if (!implicit_section) 1017 new_version_node->set_section (get_section ()); 1018 /* Clones of global symbols or symbols with unique names are unique. */ 1019 if ((TREE_PUBLIC (old_decl) 1020 && !DECL_EXTERNAL (old_decl) 1021 && !DECL_WEAK (old_decl) 1022 && !DECL_COMDAT (old_decl)) 1023 || in_lto_p) 1024 new_version_node->unique_name = true; 1025 1026 /* Update the call_expr on the edges to call the new version node. */ 1027 update_call_expr (new_version_node); 1028 1029 symtab->call_cgraph_insertion_hooks (this); 1030 return new_version_node; 1031 } 1032 1033 /* Given virtual clone, turn it into actual clone. */ 1034 1035 static void 1036 cgraph_materialize_clone (cgraph_node *node) 1037 { 1038 bitmap_obstack_initialize (NULL); 1039 node->former_clone_of = node->clone_of->decl; 1040 if (node->clone_of->former_clone_of) 1041 node->former_clone_of = node->clone_of->former_clone_of; 1042 /* Copy the OLD_VERSION_NODE function tree to the new version. */ 1043 tree_function_versioning (node->clone_of->decl, node->decl, 1044 node->clone.tree_map, true, 1045 node->clone.args_to_skip, false, 1046 NULL, NULL); 1047 if (symtab->dump_file) 1048 { 1049 dump_function_to_file (node->clone_of->decl, symtab->dump_file, 1050 dump_flags); 1051 dump_function_to_file (node->decl, symtab->dump_file, dump_flags); 1052 } 1053 1054 /* Function is no longer clone. */ 1055 if (node->next_sibling_clone) 1056 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone; 1057 if (node->prev_sibling_clone) 1058 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone; 1059 else 1060 node->clone_of->clones = node->next_sibling_clone; 1061 node->next_sibling_clone = NULL; 1062 node->prev_sibling_clone = NULL; 1063 if (!node->clone_of->analyzed && !node->clone_of->clones) 1064 { 1065 node->clone_of->release_body (); 1066 node->clone_of->remove_callees (); 1067 node->clone_of->remove_all_references (); 1068 } 1069 node->clone_of = NULL; 1070 bitmap_obstack_release (NULL); 1071 } 1072 1073 /* Once all functions from compilation unit are in memory, produce all clones 1074 and update all calls. We might also do this on demand if we don't want to 1075 bring all functions to memory prior compilation, but current WHOPR 1076 implementation does that and it is a bit easier to keep everything right in 1077 this order. */ 1078 1079 void 1080 symbol_table::materialize_all_clones (void) 1081 { 1082 cgraph_node *node; 1083 bool stabilized = false; 1084 1085 1086 if (symtab->dump_file) 1087 fprintf (symtab->dump_file, "Materializing clones\n"); 1088 1089 cgraph_node::checking_verify_cgraph_nodes (); 1090 1091 /* We can also do topological order, but number of iterations should be 1092 bounded by number of IPA passes since single IPA pass is probably not 1093 going to create clones of clones it created itself. */ 1094 while (!stabilized) 1095 { 1096 stabilized = true; 1097 FOR_EACH_FUNCTION (node) 1098 { 1099 if (node->clone_of && node->decl != node->clone_of->decl 1100 && !gimple_has_body_p (node->decl)) 1101 { 1102 if (!node->clone_of->clone_of) 1103 node->clone_of->get_untransformed_body (); 1104 if (gimple_has_body_p (node->clone_of->decl)) 1105 { 1106 if (symtab->dump_file) 1107 { 1108 fprintf (symtab->dump_file, "cloning %s to %s\n", 1109 xstrdup_for_dump (node->clone_of->name ()), 1110 xstrdup_for_dump (node->name ())); 1111 if (node->clone.tree_map) 1112 { 1113 unsigned int i; 1114 fprintf (symtab->dump_file, " replace map: "); 1115 for (i = 0; 1116 i < vec_safe_length (node->clone.tree_map); 1117 i++) 1118 { 1119 ipa_replace_map *replace_info; 1120 replace_info = (*node->clone.tree_map)[i]; 1121 print_generic_expr (symtab->dump_file, replace_info->old_tree, 0); 1122 fprintf (symtab->dump_file, " -> "); 1123 print_generic_expr (symtab->dump_file, replace_info->new_tree, 0); 1124 fprintf (symtab->dump_file, "%s%s;", 1125 replace_info->replace_p ? "(replace)":"", 1126 replace_info->ref_p ? "(ref)":""); 1127 } 1128 fprintf (symtab->dump_file, "\n"); 1129 } 1130 if (node->clone.args_to_skip) 1131 { 1132 fprintf (symtab->dump_file, " args_to_skip: "); 1133 dump_bitmap (symtab->dump_file, 1134 node->clone.args_to_skip); 1135 } 1136 if (node->clone.args_to_skip) 1137 { 1138 fprintf (symtab->dump_file, " combined_args_to_skip:"); 1139 dump_bitmap (symtab->dump_file, node->clone.combined_args_to_skip); 1140 } 1141 } 1142 cgraph_materialize_clone (node); 1143 stabilized = false; 1144 } 1145 } 1146 } 1147 } 1148 FOR_EACH_FUNCTION (node) 1149 if (!node->analyzed && node->callees) 1150 { 1151 node->remove_callees (); 1152 node->remove_all_references (); 1153 } 1154 else 1155 node->clear_stmts_in_references (); 1156 if (symtab->dump_file) 1157 fprintf (symtab->dump_file, "Materialization Call site updates done.\n"); 1158 1159 cgraph_node::checking_verify_cgraph_nodes (); 1160 1161 symtab->remove_unreachable_nodes (symtab->dump_file); 1162 } 1163 1164 #include "gt-cgraphclones.h" 1165