1 /* Callgraph clones 2 Copyright (C) 2003-2019 Free Software Foundation, Inc. 3 Contributed by Jan Hubicka 4 5 This file is part of GCC. 6 7 GCC is free software; you can redistribute it and/or modify it under 8 the terms of the GNU General Public License as published by the Free 9 Software Foundation; either version 3, or (at your option) any later 10 version. 11 12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 13 WARRANTY; without even the implied warranty of MERCHANTABILITY or 14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 15 for more details. 16 17 You should have received a copy of the GNU General Public License 18 along with GCC; see the file COPYING3. If not see 19 <http://www.gnu.org/licenses/>. */ 20 21 /* This module provide facilities for clonning functions. I.e. creating 22 new functions based on existing functions with simple modifications, 23 such as replacement of parameters. 24 25 To allow whole program optimization without actual presence of function 26 bodies, an additional infrastructure is provided for so-called virtual 27 clones 28 29 A virtual clone in the callgraph is a function that has no 30 associated body, just a description of how to create its body based 31 on a different function (which itself may be a virtual clone). 32 33 The description of function modifications includes adjustments to 34 the function's signature (which allows, for example, removing or 35 adding function arguments), substitutions to perform on the 36 function body, and, for inlined functions, a pointer to the 37 function that it will be inlined into. 38 39 It is also possible to redirect any edge of the callgraph from a 40 function to its virtual clone. This implies updating of the call 41 site to adjust for the new function signature. 42 43 Most of the transformations performed by inter-procedural 44 optimizations can be represented via virtual clones. For 45 instance, a constant propagation pass can produce a virtual clone 46 of the function which replaces one of its arguments by a 47 constant. The inliner can represent its decisions by producing a 48 clone of a function whose body will be later integrated into 49 a given function. 50 51 Using virtual clones, the program can be easily updated 52 during the Execute stage, solving most of pass interactions 53 problems that would otherwise occur during Transform. 54 55 Virtual clones are later materialized in the LTRANS stage and 56 turned into real functions. Passes executed after the virtual 57 clone were introduced also perform their Transform stage 58 on new functions, so for a pass there is no significant 59 difference between operating on a real function or a virtual 60 clone introduced before its Execute stage. 61 62 Optimization passes then work on virtual clones introduced before 63 their Execute stage as if they were real functions. The 64 only difference is that clones are not visible during the 65 Generate Summary stage. */ 66 67 #include "config.h" 68 #include "system.h" 69 #include "coretypes.h" 70 #include "backend.h" 71 #include "target.h" 72 #include "rtl.h" 73 #include "tree.h" 74 #include "gimple.h" 75 #include "stringpool.h" 76 #include "cgraph.h" 77 #include "lto-streamer.h" 78 #include "tree-eh.h" 79 #include "tree-cfg.h" 80 #include "tree-inline.h" 81 #include "dumpfile.h" 82 #include "gimple-pretty-print.h" 83 84 /* Create clone of edge in the node N represented by CALL_EXPR 85 the callgraph. */ 86 87 cgraph_edge * 88 cgraph_edge::clone (cgraph_node *n, gcall *call_stmt, unsigned stmt_uid, 89 profile_count num, profile_count den, 90 bool update_original) 91 { 92 cgraph_edge *new_edge; 93 profile_count::adjust_for_ipa_scaling (&num, &den); 94 profile_count prof_count = count.apply_scale (num, den); 95 96 if (indirect_unknown_callee) 97 { 98 tree decl; 99 100 if (call_stmt && (decl = gimple_call_fndecl (call_stmt)) 101 /* When the call is speculative, we need to resolve it 102 via cgraph_resolve_speculation and not here. */ 103 && !speculative) 104 { 105 cgraph_node *callee = cgraph_node::get (decl); 106 gcc_checking_assert (callee); 107 new_edge = n->create_edge (callee, call_stmt, prof_count); 108 } 109 else 110 { 111 new_edge = n->create_indirect_edge (call_stmt, 112 indirect_info->ecf_flags, 113 prof_count, false); 114 *new_edge->indirect_info = *indirect_info; 115 } 116 } 117 else 118 { 119 new_edge = n->create_edge (callee, call_stmt, prof_count); 120 if (indirect_info) 121 { 122 new_edge->indirect_info 123 = ggc_cleared_alloc<cgraph_indirect_call_info> (); 124 *new_edge->indirect_info = *indirect_info; 125 } 126 } 127 128 new_edge->inline_failed = inline_failed; 129 new_edge->indirect_inlining_edge = indirect_inlining_edge; 130 new_edge->lto_stmt_uid = stmt_uid; 131 /* Clone flags that depend on call_stmt availability manually. */ 132 new_edge->can_throw_external = can_throw_external; 133 new_edge->call_stmt_cannot_inline_p = call_stmt_cannot_inline_p; 134 new_edge->speculative = speculative; 135 new_edge->in_polymorphic_cdtor = in_polymorphic_cdtor; 136 137 /* Update IPA profile. Local profiles need no updating in original. */ 138 if (update_original) 139 count = count.combine_with_ipa_count (count.ipa () 140 - new_edge->count.ipa ()); 141 symtab->call_edge_duplication_hooks (this, new_edge); 142 return new_edge; 143 } 144 145 /* Build variant of function type ORIG_TYPE skipping ARGS_TO_SKIP and the 146 return value if SKIP_RETURN is true. */ 147 148 tree 149 cgraph_build_function_type_skip_args (tree orig_type, bitmap args_to_skip, 150 bool skip_return) 151 { 152 tree new_type = NULL; 153 tree args, new_args = NULL; 154 tree new_reversed; 155 int i = 0; 156 157 for (args = TYPE_ARG_TYPES (orig_type); args && args != void_list_node; 158 args = TREE_CHAIN (args), i++) 159 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i)) 160 new_args = tree_cons (NULL_TREE, TREE_VALUE (args), new_args); 161 162 new_reversed = nreverse (new_args); 163 if (args) 164 { 165 if (new_reversed) 166 TREE_CHAIN (new_args) = void_list_node; 167 else 168 new_reversed = void_list_node; 169 } 170 171 /* Use copy_node to preserve as much as possible from original type 172 (debug info, attribute lists etc.) 173 Exception is METHOD_TYPEs must have THIS argument. 174 When we are asked to remove it, we need to build new FUNCTION_TYPE 175 instead. */ 176 if (TREE_CODE (orig_type) != METHOD_TYPE 177 || !args_to_skip 178 || !bitmap_bit_p (args_to_skip, 0)) 179 { 180 new_type = build_distinct_type_copy (orig_type); 181 TYPE_ARG_TYPES (new_type) = new_reversed; 182 } 183 else 184 { 185 new_type 186 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type), 187 new_reversed)); 188 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type); 189 } 190 191 if (skip_return) 192 TREE_TYPE (new_type) = void_type_node; 193 194 return new_type; 195 } 196 197 /* Build variant of function decl ORIG_DECL skipping ARGS_TO_SKIP and the 198 return value if SKIP_RETURN is true. 199 200 Arguments from DECL_ARGUMENTS list can't be removed now, since they are 201 linked by TREE_CHAIN directly. The caller is responsible for eliminating 202 them when they are being duplicated (i.e. copy_arguments_for_versioning). */ 203 204 static tree 205 build_function_decl_skip_args (tree orig_decl, bitmap args_to_skip, 206 bool skip_return) 207 { 208 tree new_decl = copy_node (orig_decl); 209 tree new_type; 210 211 new_type = TREE_TYPE (orig_decl); 212 if (prototype_p (new_type) 213 || (skip_return && !VOID_TYPE_P (TREE_TYPE (new_type)))) 214 new_type 215 = cgraph_build_function_type_skip_args (new_type, args_to_skip, 216 skip_return); 217 TREE_TYPE (new_decl) = new_type; 218 219 /* For declarations setting DECL_VINDEX (i.e. methods) 220 we expect first argument to be THIS pointer. */ 221 if (args_to_skip && bitmap_bit_p (args_to_skip, 0)) 222 DECL_VINDEX (new_decl) = NULL_TREE; 223 224 /* When signature changes, we need to clear builtin info. */ 225 if (fndecl_built_in_p (new_decl) 226 && args_to_skip 227 && !bitmap_empty_p (args_to_skip)) 228 { 229 DECL_BUILT_IN_CLASS (new_decl) = NOT_BUILT_IN; 230 DECL_FUNCTION_CODE (new_decl) = (enum built_in_function) 0; 231 } 232 /* The FE might have information and assumptions about the other 233 arguments. */ 234 DECL_LANG_SPECIFIC (new_decl) = NULL; 235 return new_decl; 236 } 237 238 /* Set flags of NEW_NODE and its decl. NEW_NODE is a newly created private 239 clone or its thunk. */ 240 241 static void 242 set_new_clone_decl_and_node_flags (cgraph_node *new_node) 243 { 244 DECL_EXTERNAL (new_node->decl) = 0; 245 TREE_PUBLIC (new_node->decl) = 0; 246 DECL_COMDAT (new_node->decl) = 0; 247 DECL_WEAK (new_node->decl) = 0; 248 DECL_VIRTUAL_P (new_node->decl) = 0; 249 DECL_STATIC_CONSTRUCTOR (new_node->decl) = 0; 250 DECL_STATIC_DESTRUCTOR (new_node->decl) = 0; 251 252 new_node->externally_visible = 0; 253 new_node->local.local = 1; 254 new_node->lowered = true; 255 } 256 257 /* Duplicate thunk THUNK if necessary but make it to refer to NODE. 258 ARGS_TO_SKIP, if non-NULL, determines which parameters should be omitted. 259 Function can return NODE if no thunk is necessary, which can happen when 260 thunk is this_adjusting but we are removing this parameter. */ 261 262 static cgraph_node * 263 duplicate_thunk_for_node (cgraph_node *thunk, cgraph_node *node) 264 { 265 cgraph_node *new_thunk, *thunk_of; 266 thunk_of = thunk->callees->callee->ultimate_alias_target (); 267 268 if (thunk_of->thunk.thunk_p) 269 node = duplicate_thunk_for_node (thunk_of, node); 270 271 if (!DECL_ARGUMENTS (thunk->decl)) 272 thunk->get_untransformed_body (); 273 274 cgraph_edge *cs; 275 for (cs = node->callers; cs; cs = cs->next_caller) 276 if (cs->caller->thunk.thunk_p 277 && cs->caller->thunk.fixed_offset == thunk->thunk.fixed_offset 278 && cs->caller->thunk.virtual_value == thunk->thunk.virtual_value 279 && cs->caller->thunk.indirect_offset == thunk->thunk.indirect_offset 280 && cs->caller->thunk.this_adjusting == thunk->thunk.this_adjusting 281 && cs->caller->thunk.virtual_offset_p == thunk->thunk.virtual_offset_p) 282 return cs->caller; 283 284 tree new_decl; 285 if (!node->clone.args_to_skip) 286 new_decl = copy_node (thunk->decl); 287 else 288 { 289 /* We do not need to duplicate this_adjusting thunks if we have removed 290 this. */ 291 if (thunk->thunk.this_adjusting 292 && bitmap_bit_p (node->clone.args_to_skip, 0)) 293 return node; 294 295 new_decl = build_function_decl_skip_args (thunk->decl, 296 node->clone.args_to_skip, 297 false); 298 } 299 300 tree *link = &DECL_ARGUMENTS (new_decl); 301 int i = 0; 302 for (tree pd = DECL_ARGUMENTS (thunk->decl); pd; pd = DECL_CHAIN (pd), i++) 303 { 304 if (!node->clone.args_to_skip 305 || !bitmap_bit_p (node->clone.args_to_skip, i)) 306 { 307 tree nd = copy_node (pd); 308 DECL_CONTEXT (nd) = new_decl; 309 *link = nd; 310 link = &DECL_CHAIN (nd); 311 } 312 } 313 *link = NULL_TREE; 314 315 gcc_checking_assert (!DECL_STRUCT_FUNCTION (new_decl)); 316 gcc_checking_assert (!DECL_INITIAL (new_decl)); 317 gcc_checking_assert (!DECL_RESULT (new_decl)); 318 gcc_checking_assert (!DECL_RTL_SET_P (new_decl)); 319 320 DECL_NAME (new_decl) = clone_function_name_numbered (thunk->decl, 321 "artificial_thunk"); 322 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl)); 323 324 /* We need to force DECL_IGNORED_P because the new thunk is created after 325 early debug was run. */ 326 DECL_IGNORED_P (new_decl) = 1; 327 328 new_thunk = cgraph_node::create (new_decl); 329 set_new_clone_decl_and_node_flags (new_thunk); 330 new_thunk->definition = true; 331 new_thunk->local.can_change_signature = node->local.can_change_signature; 332 new_thunk->thunk = thunk->thunk; 333 new_thunk->unique_name = in_lto_p; 334 new_thunk->former_clone_of = thunk->decl; 335 new_thunk->clone.args_to_skip = node->clone.args_to_skip; 336 new_thunk->clone.combined_args_to_skip = node->clone.combined_args_to_skip; 337 338 cgraph_edge *e = new_thunk->create_edge (node, NULL, new_thunk->count); 339 symtab->call_edge_duplication_hooks (thunk->callees, e); 340 symtab->call_cgraph_duplication_hooks (thunk, new_thunk); 341 return new_thunk; 342 } 343 344 /* If E does not lead to a thunk, simply redirect it to N. Otherwise create 345 one or more equivalent thunks for N and redirect E to the first in the 346 chain. Note that it is then necessary to call 347 n->expand_all_artificial_thunks once all callers are redirected. */ 348 349 void 350 cgraph_edge::redirect_callee_duplicating_thunks (cgraph_node *n) 351 { 352 cgraph_node *orig_to = callee->ultimate_alias_target (); 353 if (orig_to->thunk.thunk_p) 354 n = duplicate_thunk_for_node (orig_to, n); 355 356 redirect_callee (n); 357 } 358 359 /* Call expand_thunk on all callers that are thunks and if analyze those nodes 360 that were expanded. */ 361 362 void 363 cgraph_node::expand_all_artificial_thunks () 364 { 365 cgraph_edge *e; 366 for (e = callers; e;) 367 if (e->caller->thunk.thunk_p) 368 { 369 cgraph_node *thunk = e->caller; 370 371 e = e->next_caller; 372 if (thunk->expand_thunk (false, false)) 373 { 374 thunk->thunk.thunk_p = false; 375 thunk->analyze (); 376 } 377 thunk->expand_all_artificial_thunks (); 378 } 379 else 380 e = e->next_caller; 381 } 382 383 void 384 dump_callgraph_transformation (const cgraph_node *original, 385 const cgraph_node *clone, 386 const char *suffix) 387 { 388 if (symtab->ipa_clones_dump_file) 389 { 390 fprintf (symtab->ipa_clones_dump_file, 391 "Callgraph clone;%s;%d;%s;%d;%d;%s;%d;%s;%d;%d;%s\n", 392 original->asm_name (), original->order, 393 DECL_SOURCE_FILE (original->decl), 394 DECL_SOURCE_LINE (original->decl), 395 DECL_SOURCE_COLUMN (original->decl), clone->asm_name (), 396 clone->order, DECL_SOURCE_FILE (clone->decl), 397 DECL_SOURCE_LINE (clone->decl), DECL_SOURCE_COLUMN (clone->decl), 398 suffix); 399 400 symtab->cloned_nodes.add (original); 401 symtab->cloned_nodes.add (clone); 402 } 403 } 404 405 /* Create node representing clone of N executed COUNT times. Decrease 406 the execution counts from original node too. 407 The new clone will have decl set to DECL that may or may not be the same 408 as decl of N. 409 410 When UPDATE_ORIGINAL is true, the counts are subtracted from the original 411 function's profile to reflect the fact that part of execution is handled 412 by node. 413 When CALL_DUPLICATOIN_HOOK is true, the ipa passes are acknowledged about 414 the new clone. Otherwise the caller is responsible for doing so later. 415 416 If the new node is being inlined into another one, NEW_INLINED_TO should be 417 the outline function the new one is (even indirectly) inlined to. All hooks 418 will see this in node's global.inlined_to, when invoked. Can be NULL if the 419 node is not inlined. */ 420 421 cgraph_node * 422 cgraph_node::create_clone (tree new_decl, profile_count prof_count, 423 bool update_original, 424 vec<cgraph_edge *> redirect_callers, 425 bool call_duplication_hook, 426 cgraph_node *new_inlined_to, 427 bitmap args_to_skip, const char *suffix) 428 { 429 cgraph_node *new_node = symtab->create_empty (); 430 cgraph_edge *e; 431 unsigned i; 432 profile_count old_count = count; 433 434 if (new_inlined_to) 435 dump_callgraph_transformation (this, new_inlined_to, "inlining to"); 436 437 /* When inlining we scale precisely to prof_count, when cloning we can 438 preserve local profile. */ 439 if (!new_inlined_to) 440 prof_count = count.combine_with_ipa_count (prof_count); 441 new_node->count = prof_count; 442 443 /* Update IPA profile. Local profiles need no updating in original. */ 444 if (update_original) 445 count = count.combine_with_ipa_count (count.ipa () - prof_count.ipa ()); 446 new_node->decl = new_decl; 447 new_node->register_symbol (); 448 new_node->origin = origin; 449 new_node->lto_file_data = lto_file_data; 450 if (new_node->origin) 451 { 452 new_node->next_nested = new_node->origin->nested; 453 new_node->origin->nested = new_node; 454 } 455 new_node->analyzed = analyzed; 456 new_node->definition = definition; 457 new_node->local = local; 458 new_node->externally_visible = false; 459 new_node->no_reorder = no_reorder; 460 new_node->local.local = true; 461 new_node->global = global; 462 new_node->global.inlined_to = new_inlined_to; 463 new_node->rtl = rtl; 464 new_node->frequency = frequency; 465 new_node->tp_first_run = tp_first_run; 466 new_node->tm_clone = tm_clone; 467 new_node->icf_merged = icf_merged; 468 new_node->merged_comdat = merged_comdat; 469 new_node->thunk = thunk; 470 471 new_node->clone.tree_map = NULL; 472 new_node->clone.args_to_skip = args_to_skip; 473 new_node->split_part = split_part; 474 if (!args_to_skip) 475 new_node->clone.combined_args_to_skip = clone.combined_args_to_skip; 476 else if (clone.combined_args_to_skip) 477 { 478 new_node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC (); 479 bitmap_ior (new_node->clone.combined_args_to_skip, 480 clone.combined_args_to_skip, args_to_skip); 481 } 482 else 483 new_node->clone.combined_args_to_skip = args_to_skip; 484 485 FOR_EACH_VEC_ELT (redirect_callers, i, e) 486 { 487 /* Redirect calls to the old version node to point to its new 488 version. The only exception is when the edge was proved to 489 be unreachable during the clonning procedure. */ 490 if (!e->callee 491 || !fndecl_built_in_p (e->callee->decl, BUILT_IN_UNREACHABLE)) 492 e->redirect_callee_duplicating_thunks (new_node); 493 } 494 new_node->expand_all_artificial_thunks (); 495 496 for (e = callees;e; e=e->next_callee) 497 e->clone (new_node, e->call_stmt, e->lto_stmt_uid, new_node->count, old_count, 498 update_original); 499 500 for (e = indirect_calls; e; e = e->next_callee) 501 e->clone (new_node, e->call_stmt, e->lto_stmt_uid, 502 new_node->count, old_count, update_original); 503 new_node->clone_references (this); 504 505 new_node->next_sibling_clone = clones; 506 if (clones) 507 clones->prev_sibling_clone = new_node; 508 clones = new_node; 509 new_node->clone_of = this; 510 511 if (call_duplication_hook) 512 symtab->call_cgraph_duplication_hooks (this, new_node); 513 514 if (!new_inlined_to) 515 dump_callgraph_transformation (this, new_node, suffix); 516 517 return new_node; 518 } 519 520 static GTY(()) hash_map<const char *, unsigned> *clone_fn_ids; 521 522 /* Return a new assembler name for a clone of decl named NAME. Apart 523 from the string SUFFIX, the new name will end with a unique (for 524 each NAME) unspecified number. If clone numbering is not needed 525 then the two argument clone_function_name should be used instead. 526 Should not be called directly except for by 527 lto-partition.c:privatize_symbol_name_1. */ 528 529 tree 530 clone_function_name_numbered (const char *name, const char *suffix) 531 { 532 /* Initialize the function->counter mapping the first time it's 533 needed. */ 534 if (!clone_fn_ids) 535 clone_fn_ids = hash_map<const char *, unsigned int>::create_ggc (64); 536 unsigned int &suffix_counter = clone_fn_ids->get_or_insert ( 537 IDENTIFIER_POINTER (get_identifier (name))); 538 return clone_function_name (name, suffix, suffix_counter++); 539 } 540 541 /* Return a new assembler name for a clone of DECL. Apart from string 542 SUFFIX, the new name will end with a unique (for each DECL 543 assembler name) unspecified number. If clone numbering is not 544 needed then the two argument clone_function_name should be used 545 instead. */ 546 547 tree 548 clone_function_name_numbered (tree decl, const char *suffix) 549 { 550 tree name = DECL_ASSEMBLER_NAME (decl); 551 return clone_function_name_numbered (IDENTIFIER_POINTER (name), 552 suffix); 553 } 554 555 /* Return a new assembler name for a clone of decl named NAME. Apart 556 from the string SUFFIX, the new name will end with the specified 557 NUMBER. If clone numbering is not needed then the two argument 558 clone_function_name should be used instead. */ 559 560 tree 561 clone_function_name (const char *name, const char *suffix, 562 unsigned long number) 563 { 564 size_t len = strlen (name); 565 char *tmp_name, *prefix; 566 567 prefix = XALLOCAVEC (char, len + strlen (suffix) + 2); 568 memcpy (prefix, name, len); 569 strcpy (prefix + len + 1, suffix); 570 prefix[len] = symbol_table::symbol_suffix_separator (); 571 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix, number); 572 return get_identifier (tmp_name); 573 } 574 575 /* Return a new assembler name for a clone of DECL. Apart from the 576 string SUFFIX, the new name will end with the specified NUMBER. If 577 clone numbering is not needed then the two argument 578 clone_function_name should be used instead. */ 579 580 tree 581 clone_function_name (tree decl, const char *suffix, 582 unsigned long number) 583 { 584 return clone_function_name ( 585 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)), suffix, number); 586 } 587 588 /* Return a new assembler name ending with the string SUFFIX for a 589 clone of DECL. */ 590 591 tree 592 clone_function_name (tree decl, const char *suffix) 593 { 594 tree identifier = DECL_ASSEMBLER_NAME (decl); 595 /* For consistency this needs to behave the same way as 596 ASM_FORMAT_PRIVATE_NAME does, but without the final number 597 suffix. */ 598 char *separator = XALLOCAVEC (char, 2); 599 separator[0] = symbol_table::symbol_suffix_separator (); 600 separator[1] = 0; 601 #if defined (NO_DOT_IN_LABEL) && defined (NO_DOLLAR_IN_LABEL) 602 const char *prefix = "__"; 603 #else 604 const char *prefix = ""; 605 #endif 606 char *result = ACONCAT ((prefix, 607 IDENTIFIER_POINTER (identifier), 608 separator, 609 suffix, 610 (char*)0)); 611 return get_identifier (result); 612 } 613 614 615 /* Create callgraph node clone with new declaration. The actual body will be 616 copied later at compilation stage. The name of the new clone will be 617 constructed from the name of the original node, SUFFIX and NUM_SUFFIX. 618 619 TODO: after merging in ipa-sra use function call notes instead of args_to_skip 620 bitmap interface. 621 */ 622 cgraph_node * 623 cgraph_node::create_virtual_clone (vec<cgraph_edge *> redirect_callers, 624 vec<ipa_replace_map *, va_gc> *tree_map, 625 bitmap args_to_skip, const char * suffix, 626 unsigned num_suffix) 627 { 628 tree old_decl = decl; 629 cgraph_node *new_node = NULL; 630 tree new_decl; 631 size_t len, i; 632 ipa_replace_map *map; 633 char *name; 634 635 gcc_checking_assert (local.versionable); 636 gcc_assert (local.can_change_signature || !args_to_skip); 637 638 /* Make a new FUNCTION_DECL tree node */ 639 if (!args_to_skip) 640 new_decl = copy_node (old_decl); 641 else 642 new_decl = build_function_decl_skip_args (old_decl, args_to_skip, false); 643 644 /* These pointers represent function body and will be populated only when clone 645 is materialized. */ 646 gcc_assert (new_decl != old_decl); 647 DECL_STRUCT_FUNCTION (new_decl) = NULL; 648 DECL_ARGUMENTS (new_decl) = NULL; 649 DECL_INITIAL (new_decl) = NULL; 650 DECL_RESULT (new_decl) = NULL; 651 /* We cannot do DECL_RESULT (new_decl) = NULL; here because of LTO partitioning 652 sometimes storing only clone decl instead of original. */ 653 654 /* Generate a new name for the new version. */ 655 len = IDENTIFIER_LENGTH (DECL_NAME (old_decl)); 656 name = XALLOCAVEC (char, len + strlen (suffix) + 2); 657 memcpy (name, IDENTIFIER_POINTER (DECL_NAME (old_decl)), len); 658 strcpy (name + len + 1, suffix); 659 name[len] = '.'; 660 DECL_NAME (new_decl) = get_identifier (name); 661 SET_DECL_ASSEMBLER_NAME (new_decl, 662 clone_function_name (old_decl, suffix, num_suffix)); 663 SET_DECL_RTL (new_decl, NULL); 664 665 new_node = create_clone (new_decl, count, false, 666 redirect_callers, false, NULL, args_to_skip, suffix); 667 668 /* Update the properties. 669 Make clone visible only within this translation unit. Make sure 670 that is not weak also. 671 ??? We cannot use COMDAT linkage because there is no 672 ABI support for this. */ 673 set_new_clone_decl_and_node_flags (new_node); 674 new_node->clone.tree_map = tree_map; 675 if (!implicit_section) 676 new_node->set_section (get_section ()); 677 678 /* Clones of global symbols or symbols with unique names are unique. */ 679 if ((TREE_PUBLIC (old_decl) 680 && !DECL_EXTERNAL (old_decl) 681 && !DECL_WEAK (old_decl) 682 && !DECL_COMDAT (old_decl)) 683 || in_lto_p) 684 new_node->unique_name = true; 685 FOR_EACH_VEC_SAFE_ELT (tree_map, i, map) 686 new_node->maybe_create_reference (map->new_tree, NULL); 687 688 if (ipa_transforms_to_apply.exists ()) 689 new_node->ipa_transforms_to_apply 690 = ipa_transforms_to_apply.copy (); 691 692 symtab->call_cgraph_duplication_hooks (this, new_node); 693 694 return new_node; 695 } 696 697 /* callgraph node being removed from symbol table; see if its entry can be 698 replaced by other inline clone. */ 699 cgraph_node * 700 cgraph_node::find_replacement (void) 701 { 702 cgraph_node *next_inline_clone, *replacement; 703 704 for (next_inline_clone = clones; 705 next_inline_clone 706 && next_inline_clone->decl != decl; 707 next_inline_clone = next_inline_clone->next_sibling_clone) 708 ; 709 710 /* If there is inline clone of the node being removed, we need 711 to put it into the position of removed node and reorganize all 712 other clones to be based on it. */ 713 if (next_inline_clone) 714 { 715 cgraph_node *n; 716 cgraph_node *new_clones; 717 718 replacement = next_inline_clone; 719 720 /* Unlink inline clone from the list of clones of removed node. */ 721 if (next_inline_clone->next_sibling_clone) 722 next_inline_clone->next_sibling_clone->prev_sibling_clone 723 = next_inline_clone->prev_sibling_clone; 724 if (next_inline_clone->prev_sibling_clone) 725 { 726 gcc_assert (clones != next_inline_clone); 727 next_inline_clone->prev_sibling_clone->next_sibling_clone 728 = next_inline_clone->next_sibling_clone; 729 } 730 else 731 { 732 gcc_assert (clones == next_inline_clone); 733 clones = next_inline_clone->next_sibling_clone; 734 } 735 736 new_clones = clones; 737 clones = NULL; 738 739 /* Copy clone info. */ 740 next_inline_clone->clone = clone; 741 742 /* Now place it into clone tree at same level at NODE. */ 743 next_inline_clone->clone_of = clone_of; 744 next_inline_clone->prev_sibling_clone = NULL; 745 next_inline_clone->next_sibling_clone = NULL; 746 if (clone_of) 747 { 748 if (clone_of->clones) 749 clone_of->clones->prev_sibling_clone = next_inline_clone; 750 next_inline_clone->next_sibling_clone = clone_of->clones; 751 clone_of->clones = next_inline_clone; 752 } 753 754 /* Merge the clone list. */ 755 if (new_clones) 756 { 757 if (!next_inline_clone->clones) 758 next_inline_clone->clones = new_clones; 759 else 760 { 761 n = next_inline_clone->clones; 762 while (n->next_sibling_clone) 763 n = n->next_sibling_clone; 764 n->next_sibling_clone = new_clones; 765 new_clones->prev_sibling_clone = n; 766 } 767 } 768 769 /* Update clone_of pointers. */ 770 n = new_clones; 771 while (n) 772 { 773 n->clone_of = next_inline_clone; 774 n = n->next_sibling_clone; 775 } 776 return replacement; 777 } 778 else 779 return NULL; 780 } 781 782 /* Like cgraph_set_call_stmt but walk the clone tree and update all 783 clones sharing the same function body. 784 When WHOLE_SPECULATIVE_EDGES is true, all three components of 785 speculative edge gets updated. Otherwise we update only direct 786 call. */ 787 788 void 789 cgraph_node::set_call_stmt_including_clones (gimple *old_stmt, 790 gcall *new_stmt, 791 bool update_speculative) 792 { 793 cgraph_node *node; 794 cgraph_edge *edge = get_edge (old_stmt); 795 796 if (edge) 797 edge->set_call_stmt (new_stmt, update_speculative); 798 799 node = clones; 800 if (node) 801 while (node != this) 802 { 803 cgraph_edge *edge = node->get_edge (old_stmt); 804 if (edge) 805 { 806 edge->set_call_stmt (new_stmt, update_speculative); 807 /* If UPDATE_SPECULATIVE is false, it means that we are turning 808 speculative call into a real code sequence. Update the 809 callgraph edges. */ 810 if (edge->speculative && !update_speculative) 811 { 812 cgraph_edge *direct, *indirect; 813 ipa_ref *ref; 814 815 gcc_assert (!edge->indirect_unknown_callee); 816 edge->speculative_call_info (direct, indirect, ref); 817 direct->speculative = false; 818 indirect->speculative = false; 819 ref->speculative = false; 820 } 821 } 822 if (node->clones) 823 node = node->clones; 824 else if (node->next_sibling_clone) 825 node = node->next_sibling_clone; 826 else 827 { 828 while (node != this && !node->next_sibling_clone) 829 node = node->clone_of; 830 if (node != this) 831 node = node->next_sibling_clone; 832 } 833 } 834 } 835 836 /* Like cgraph_create_edge walk the clone tree and update all clones sharing 837 same function body. If clones already have edge for OLD_STMT; only 838 update the edge same way as cgraph_set_call_stmt_including_clones does. 839 840 TODO: COUNT and LOOP_DEPTH should be properly distributed based on relative 841 frequencies of the clones. */ 842 843 void 844 cgraph_node::create_edge_including_clones (cgraph_node *callee, 845 gimple *old_stmt, gcall *stmt, 846 profile_count count, 847 cgraph_inline_failed_t reason) 848 { 849 cgraph_node *node; 850 cgraph_edge *edge; 851 852 if (!get_edge (stmt)) 853 { 854 edge = create_edge (callee, stmt, count); 855 edge->inline_failed = reason; 856 } 857 858 node = clones; 859 if (node) 860 while (node != this) 861 /* Thunk clones do not get updated while copying inline function body. */ 862 if (!node->thunk.thunk_p) 863 { 864 cgraph_edge *edge = node->get_edge (old_stmt); 865 866 /* It is possible that clones already contain the edge while 867 master didn't. Either we promoted indirect call into direct 868 call in the clone or we are processing clones of unreachable 869 master where edges has been removed. */ 870 if (edge) 871 edge->set_call_stmt (stmt); 872 else if (! node->get_edge (stmt)) 873 { 874 edge = node->create_edge (callee, stmt, count); 875 edge->inline_failed = reason; 876 } 877 878 if (node->clones) 879 node = node->clones; 880 else if (node->next_sibling_clone) 881 node = node->next_sibling_clone; 882 else 883 { 884 while (node != this && !node->next_sibling_clone) 885 node = node->clone_of; 886 if (node != this) 887 node = node->next_sibling_clone; 888 } 889 } 890 } 891 892 /* Remove the node from cgraph and all inline clones inlined into it. 893 Skip however removal of FORBIDDEN_NODE and return true if it needs to be 894 removed. This allows to call the function from outer loop walking clone 895 tree. */ 896 897 bool 898 cgraph_node::remove_symbol_and_inline_clones (cgraph_node *forbidden_node) 899 { 900 cgraph_edge *e, *next; 901 bool found = false; 902 903 if (this == forbidden_node) 904 { 905 callers->remove (); 906 return true; 907 } 908 for (e = callees; e; e = next) 909 { 910 next = e->next_callee; 911 if (!e->inline_failed) 912 found |= e->callee->remove_symbol_and_inline_clones (forbidden_node); 913 } 914 remove (); 915 return found; 916 } 917 918 /* The edges representing the callers of the NEW_VERSION node were 919 fixed by cgraph_function_versioning (), now the call_expr in their 920 respective tree code should be updated to call the NEW_VERSION. */ 921 922 static void 923 update_call_expr (cgraph_node *new_version) 924 { 925 cgraph_edge *e; 926 927 gcc_assert (new_version); 928 929 /* Update the call expr on the edges to call the new version. */ 930 for (e = new_version->callers; e; e = e->next_caller) 931 { 932 function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl); 933 gimple_call_set_fndecl (e->call_stmt, new_version->decl); 934 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt); 935 } 936 } 937 938 939 /* Create a new cgraph node which is the new version of 940 callgraph node. REDIRECT_CALLERS holds the callers 941 edges which should be redirected to point to 942 NEW_VERSION. ALL the callees edges of the node 943 are cloned to the new version node. Return the new 944 version node. 945 946 If non-NULL BLOCK_TO_COPY determine what basic blocks 947 was copied to prevent duplications of calls that are dead 948 in the clone. */ 949 950 cgraph_node * 951 cgraph_node::create_version_clone (tree new_decl, 952 vec<cgraph_edge *> redirect_callers, 953 bitmap bbs_to_copy, 954 const char *suffix) 955 { 956 cgraph_node *new_version; 957 cgraph_edge *e; 958 unsigned i; 959 960 new_version = cgraph_node::create (new_decl); 961 962 new_version->analyzed = analyzed; 963 new_version->definition = definition; 964 new_version->local = local; 965 new_version->externally_visible = false; 966 new_version->no_reorder = no_reorder; 967 new_version->local.local = new_version->definition; 968 new_version->global = global; 969 new_version->rtl = rtl; 970 new_version->count = count; 971 972 for (e = callees; e; e=e->next_callee) 973 if (!bbs_to_copy 974 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index)) 975 e->clone (new_version, e->call_stmt, 976 e->lto_stmt_uid, count, count, 977 true); 978 for (e = indirect_calls; e; e=e->next_callee) 979 if (!bbs_to_copy 980 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index)) 981 e->clone (new_version, e->call_stmt, 982 e->lto_stmt_uid, count, count, 983 true); 984 FOR_EACH_VEC_ELT (redirect_callers, i, e) 985 { 986 /* Redirect calls to the old version node to point to its new 987 version. */ 988 e->redirect_callee (new_version); 989 } 990 991 symtab->call_cgraph_duplication_hooks (this, new_version); 992 993 dump_callgraph_transformation (this, new_version, suffix); 994 995 return new_version; 996 } 997 998 /* Perform function versioning. 999 Function versioning includes copying of the tree and 1000 a callgraph update (creating a new cgraph node and updating 1001 its callees and callers). 1002 1003 REDIRECT_CALLERS varray includes the edges to be redirected 1004 to the new version. 1005 1006 TREE_MAP is a mapping of tree nodes we want to replace with 1007 new ones (according to results of prior analysis). 1008 1009 If non-NULL ARGS_TO_SKIP determine function parameters to remove 1010 from new version. 1011 If SKIP_RETURN is true, the new version will return void. 1012 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy. 1013 If non_NULL NEW_ENTRY determine new entry BB of the clone. 1014 1015 If TARGET_ATTRIBUTES is non-null, when creating a new declaration, 1016 add the attributes to DECL_ATTRIBUTES. And call valid_attribute_p 1017 that will promote value of the attribute DECL_FUNCTION_SPECIFIC_TARGET 1018 of the declaration. 1019 1020 Return the new version's cgraph node. */ 1021 1022 cgraph_node * 1023 cgraph_node::create_version_clone_with_body 1024 (vec<cgraph_edge *> redirect_callers, 1025 vec<ipa_replace_map *, va_gc> *tree_map, bitmap args_to_skip, 1026 bool skip_return, bitmap bbs_to_copy, basic_block new_entry_block, 1027 const char *suffix, tree target_attributes) 1028 { 1029 tree old_decl = decl; 1030 cgraph_node *new_version_node = NULL; 1031 tree new_decl; 1032 1033 if (!tree_versionable_function_p (old_decl)) 1034 return NULL; 1035 1036 gcc_assert (local.can_change_signature || !args_to_skip); 1037 1038 /* Make a new FUNCTION_DECL tree node for the new version. */ 1039 if (!args_to_skip && !skip_return) 1040 new_decl = copy_node (old_decl); 1041 else 1042 new_decl 1043 = build_function_decl_skip_args (old_decl, args_to_skip, skip_return); 1044 1045 /* Generate a new name for the new version. */ 1046 DECL_NAME (new_decl) = clone_function_name_numbered (old_decl, suffix); 1047 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl)); 1048 SET_DECL_RTL (new_decl, NULL); 1049 1050 DECL_VIRTUAL_P (new_decl) = 0; 1051 1052 if (target_attributes) 1053 { 1054 DECL_ATTRIBUTES (new_decl) = target_attributes; 1055 1056 location_t saved_loc = input_location; 1057 tree v = TREE_VALUE (target_attributes); 1058 input_location = DECL_SOURCE_LOCATION (new_decl); 1059 bool r = targetm.target_option.valid_attribute_p (new_decl, NULL, v, 0); 1060 input_location = saved_loc; 1061 if (!r) 1062 return NULL; 1063 } 1064 1065 /* When the old decl was a con-/destructor make sure the clone isn't. */ 1066 DECL_STATIC_CONSTRUCTOR (new_decl) = 0; 1067 DECL_STATIC_DESTRUCTOR (new_decl) = 0; 1068 1069 /* Create the new version's call-graph node. 1070 and update the edges of the new node. */ 1071 new_version_node = create_version_clone (new_decl, redirect_callers, 1072 bbs_to_copy, suffix); 1073 1074 if (ipa_transforms_to_apply.exists ()) 1075 new_version_node->ipa_transforms_to_apply 1076 = ipa_transforms_to_apply.copy (); 1077 /* Copy the OLD_VERSION_NODE function tree to the new version. */ 1078 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip, 1079 skip_return, bbs_to_copy, new_entry_block); 1080 1081 /* Update the new version's properties. 1082 Make The new version visible only within this translation unit. Make sure 1083 that is not weak also. 1084 ??? We cannot use COMDAT linkage because there is no 1085 ABI support for this. */ 1086 new_version_node->make_decl_local (); 1087 DECL_VIRTUAL_P (new_version_node->decl) = 0; 1088 new_version_node->externally_visible = 0; 1089 new_version_node->local.local = 1; 1090 new_version_node->lowered = true; 1091 if (!implicit_section) 1092 new_version_node->set_section (get_section ()); 1093 /* Clones of global symbols or symbols with unique names are unique. */ 1094 if ((TREE_PUBLIC (old_decl) 1095 && !DECL_EXTERNAL (old_decl) 1096 && !DECL_WEAK (old_decl) 1097 && !DECL_COMDAT (old_decl)) 1098 || in_lto_p) 1099 new_version_node->unique_name = true; 1100 1101 /* Update the call_expr on the edges to call the new version node. */ 1102 update_call_expr (new_version_node); 1103 1104 symtab->call_cgraph_insertion_hooks (new_version_node); 1105 return new_version_node; 1106 } 1107 1108 /* Given virtual clone, turn it into actual clone. */ 1109 1110 static void 1111 cgraph_materialize_clone (cgraph_node *node) 1112 { 1113 bitmap_obstack_initialize (NULL); 1114 node->former_clone_of = node->clone_of->decl; 1115 if (node->clone_of->former_clone_of) 1116 node->former_clone_of = node->clone_of->former_clone_of; 1117 /* Copy the OLD_VERSION_NODE function tree to the new version. */ 1118 tree_function_versioning (node->clone_of->decl, node->decl, 1119 node->clone.tree_map, true, 1120 node->clone.args_to_skip, false, 1121 NULL, NULL); 1122 if (symtab->dump_file) 1123 { 1124 dump_function_to_file (node->clone_of->decl, symtab->dump_file, 1125 dump_flags); 1126 dump_function_to_file (node->decl, symtab->dump_file, dump_flags); 1127 } 1128 1129 /* Function is no longer clone. */ 1130 if (node->next_sibling_clone) 1131 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone; 1132 if (node->prev_sibling_clone) 1133 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone; 1134 else 1135 node->clone_of->clones = node->next_sibling_clone; 1136 node->next_sibling_clone = NULL; 1137 node->prev_sibling_clone = NULL; 1138 if (!node->clone_of->analyzed && !node->clone_of->clones) 1139 { 1140 node->clone_of->release_body (); 1141 node->clone_of->remove_callees (); 1142 node->clone_of->remove_all_references (); 1143 } 1144 node->clone_of = NULL; 1145 bitmap_obstack_release (NULL); 1146 } 1147 1148 /* Once all functions from compilation unit are in memory, produce all clones 1149 and update all calls. We might also do this on demand if we don't want to 1150 bring all functions to memory prior compilation, but current WHOPR 1151 implementation does that and it is a bit easier to keep everything right in 1152 this order. */ 1153 1154 void 1155 symbol_table::materialize_all_clones (void) 1156 { 1157 cgraph_node *node; 1158 bool stabilized = false; 1159 1160 1161 if (symtab->dump_file) 1162 fprintf (symtab->dump_file, "Materializing clones\n"); 1163 1164 cgraph_node::checking_verify_cgraph_nodes (); 1165 1166 /* We can also do topological order, but number of iterations should be 1167 bounded by number of IPA passes since single IPA pass is probably not 1168 going to create clones of clones it created itself. */ 1169 while (!stabilized) 1170 { 1171 stabilized = true; 1172 FOR_EACH_FUNCTION (node) 1173 { 1174 if (node->clone_of && node->decl != node->clone_of->decl 1175 && !gimple_has_body_p (node->decl)) 1176 { 1177 if (!node->clone_of->clone_of) 1178 node->clone_of->get_untransformed_body (); 1179 if (gimple_has_body_p (node->clone_of->decl)) 1180 { 1181 if (symtab->dump_file) 1182 { 1183 fprintf (symtab->dump_file, "cloning %s to %s\n", 1184 xstrdup_for_dump (node->clone_of->name ()), 1185 xstrdup_for_dump (node->name ())); 1186 if (node->clone.tree_map) 1187 { 1188 unsigned int i; 1189 fprintf (symtab->dump_file, " replace map: "); 1190 for (i = 0; 1191 i < vec_safe_length (node->clone.tree_map); 1192 i++) 1193 { 1194 ipa_replace_map *replace_info; 1195 replace_info = (*node->clone.tree_map)[i]; 1196 print_generic_expr (symtab->dump_file, 1197 replace_info->old_tree); 1198 fprintf (symtab->dump_file, " -> "); 1199 print_generic_expr (symtab->dump_file, 1200 replace_info->new_tree); 1201 fprintf (symtab->dump_file, "%s%s;", 1202 replace_info->replace_p ? "(replace)":"", 1203 replace_info->ref_p ? "(ref)":""); 1204 } 1205 fprintf (symtab->dump_file, "\n"); 1206 } 1207 if (node->clone.args_to_skip) 1208 { 1209 fprintf (symtab->dump_file, " args_to_skip: "); 1210 dump_bitmap (symtab->dump_file, 1211 node->clone.args_to_skip); 1212 } 1213 if (node->clone.args_to_skip) 1214 { 1215 fprintf (symtab->dump_file, " combined_args_to_skip:"); 1216 dump_bitmap (symtab->dump_file, node->clone.combined_args_to_skip); 1217 } 1218 } 1219 cgraph_materialize_clone (node); 1220 stabilized = false; 1221 } 1222 } 1223 } 1224 } 1225 FOR_EACH_FUNCTION (node) 1226 if (!node->analyzed && node->callees) 1227 { 1228 node->remove_callees (); 1229 node->remove_all_references (); 1230 } 1231 else 1232 node->clear_stmts_in_references (); 1233 if (symtab->dump_file) 1234 fprintf (symtab->dump_file, "Materialization Call site updates done.\n"); 1235 1236 cgraph_node::checking_verify_cgraph_nodes (); 1237 1238 symtab->remove_unreachable_nodes (symtab->dump_file); 1239 } 1240 1241 #include "gt-cgraphclones.h" 1242