1 /* Output variables, constants and external declarations, for GNU compiler. 2 Copyright (C) 1987-2020 Free Software Foundation, Inc. 3 4 This file is part of GCC. 5 6 GCC is free software; you can redistribute it and/or modify it under 7 the terms of the GNU General Public License as published by the Free 8 Software Foundation; either version 3, or (at your option) any later 9 version. 10 11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 12 WARRANTY; without even the implied warranty of MERCHANTABILITY or 13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 14 for more details. 15 16 You should have received a copy of the GNU General Public License 17 along with GCC; see the file COPYING3. If not see 18 <http://www.gnu.org/licenses/>. */ 19 20 21 /* This file handles generation of all the assembler code 22 *except* the instructions of a function. 23 This includes declarations of variables and their initial values. 24 25 We also output the assembler code for constants stored in memory 26 and are responsible for combining constants with the same value. */ 27 28 #include "config.h" 29 #include "system.h" 30 #include "coretypes.h" 31 #include "backend.h" 32 #include "target.h" 33 #include "rtl.h" 34 #include "tree.h" 35 #include "predict.h" 36 #include "memmodel.h" 37 #include "tm_p.h" 38 #include "stringpool.h" 39 #include "regs.h" 40 #include "emit-rtl.h" 41 #include "cgraph.h" 42 #include "diagnostic-core.h" 43 #include "fold-const.h" 44 #include "stor-layout.h" 45 #include "varasm.h" 46 #include "flags.h" 47 #include "stmt.h" 48 #include "expr.h" 49 #include "expmed.h" 50 #include "optabs.h" 51 #include "output.h" 52 #include "langhooks.h" 53 #include "debug.h" 54 #include "common/common-target.h" 55 #include "stringpool.h" 56 #include "attribs.h" 57 #include "asan.h" 58 #include "rtl-iter.h" 59 #include "file-prefix-map.h" /* remap_debug_filename() */ 60 61 #ifdef XCOFF_DEBUGGING_INFO 62 #include "xcoffout.h" /* Needed for external data declarations. */ 63 #endif 64 65 /* The (assembler) name of the first globally-visible object output. */ 66 extern GTY(()) const char *first_global_object_name; 67 extern GTY(()) const char *weak_global_object_name; 68 69 const char *first_global_object_name; 70 const char *weak_global_object_name; 71 72 class addr_const; 73 class constant_descriptor_rtx; 74 struct rtx_constant_pool; 75 76 #define n_deferred_constants (crtl->varasm.deferred_constants) 77 78 /* Number for making the label on the next 79 constant that is stored in memory. */ 80 81 static GTY(()) int const_labelno; 82 83 /* Carry information from ASM_DECLARE_OBJECT_NAME 84 to ASM_FINISH_DECLARE_OBJECT. */ 85 86 int size_directive_output; 87 88 /* The last decl for which assemble_variable was called, 89 if it did ASM_DECLARE_OBJECT_NAME. 90 If the last call to assemble_variable didn't do that, 91 this holds 0. */ 92 93 tree last_assemble_variable_decl; 94 95 /* The following global variable indicates if the first basic block 96 in a function belongs to the cold partition or not. */ 97 98 bool first_function_block_is_cold; 99 100 /* Whether we saw any functions with no_split_stack. */ 101 102 static bool saw_no_split_stack; 103 104 static const char *strip_reg_name (const char *); 105 static int contains_pointers_p (tree); 106 #ifdef ASM_OUTPUT_EXTERNAL 107 static bool incorporeal_function_p (tree); 108 #endif 109 static void decode_addr_const (tree, class addr_const *); 110 static hashval_t const_hash_1 (const tree); 111 static int compare_constant (const tree, const tree); 112 static void output_constant_def_contents (rtx); 113 static void output_addressed_constants (tree); 114 static unsigned HOST_WIDE_INT output_constant (tree, unsigned HOST_WIDE_INT, 115 unsigned int, bool, bool); 116 static void globalize_decl (tree); 117 static bool decl_readonly_section_1 (enum section_category); 118 #ifdef BSS_SECTION_ASM_OP 119 #ifdef ASM_OUTPUT_ALIGNED_BSS 120 static void asm_output_aligned_bss (FILE *, tree, const char *, 121 unsigned HOST_WIDE_INT, int) 122 ATTRIBUTE_UNUSED; 123 #endif 124 #endif /* BSS_SECTION_ASM_OP */ 125 static void mark_weak (tree); 126 static void output_constant_pool (const char *, tree); 127 static void handle_vtv_comdat_section (section *, const_tree); 128 129 /* Well-known sections, each one associated with some sort of *_ASM_OP. */ 130 section *text_section; 131 section *data_section; 132 section *readonly_data_section; 133 section *sdata_section; 134 section *ctors_section; 135 section *dtors_section; 136 section *bss_section; 137 section *sbss_section; 138 139 /* Various forms of common section. All are guaranteed to be nonnull. */ 140 section *tls_comm_section; 141 section *comm_section; 142 section *lcomm_section; 143 144 /* A SECTION_NOSWITCH section used for declaring global BSS variables. 145 May be null. */ 146 section *bss_noswitch_section; 147 148 /* The section that holds the main exception table, when known. The section 149 is set either by the target's init_sections hook or by the first call to 150 switch_to_exception_section. */ 151 section *exception_section; 152 153 /* The section that holds the DWARF2 frame unwind information, when known. 154 The section is set either by the target's init_sections hook or by the 155 first call to switch_to_eh_frame_section. */ 156 section *eh_frame_section; 157 158 /* asm_out_file's current section. This is NULL if no section has yet 159 been selected or if we lose track of what the current section is. */ 160 section *in_section; 161 162 /* True if code for the current function is currently being directed 163 at the cold section. */ 164 bool in_cold_section_p; 165 166 /* The following global holds the "function name" for the code in the 167 cold section of a function, if hot/cold function splitting is enabled 168 and there was actually code that went into the cold section. A 169 pseudo function name is needed for the cold section of code for some 170 debugging tools that perform symbolization. */ 171 tree cold_function_name = NULL_TREE; 172 173 /* A linked list of all the unnamed sections. */ 174 static GTY(()) section *unnamed_sections; 175 176 /* Return a nonzero value if DECL has a section attribute. */ 177 #define IN_NAMED_SECTION(DECL) \ 178 (VAR_OR_FUNCTION_DECL_P (DECL) && DECL_SECTION_NAME (DECL) != NULL) 179 180 struct section_hasher : ggc_ptr_hash<section> 181 { 182 typedef const char *compare_type; 183 184 static hashval_t hash (section *); 185 static bool equal (section *, const char *); 186 }; 187 188 /* Hash table of named sections. */ 189 static GTY(()) hash_table<section_hasher> *section_htab; 190 191 struct object_block_hasher : ggc_ptr_hash<object_block> 192 { 193 typedef const section *compare_type; 194 195 static hashval_t hash (object_block *); 196 static bool equal (object_block *, const section *); 197 }; 198 199 /* A table of object_blocks, indexed by section. */ 200 static GTY(()) hash_table<object_block_hasher> *object_block_htab; 201 202 /* The next number to use for internal anchor labels. */ 203 static GTY(()) int anchor_labelno; 204 205 /* A pool of constants that can be shared between functions. */ 206 static GTY(()) struct rtx_constant_pool *shared_constant_pool; 207 208 /* Helper routines for maintaining section_htab. */ 209 210 bool 211 section_hasher::equal (section *old, const char *new_name) 212 { 213 return strcmp (old->named.name, new_name) == 0; 214 } 215 216 hashval_t 217 section_hasher::hash (section *old) 218 { 219 return htab_hash_string (old->named.name); 220 } 221 222 /* Return a hash value for section SECT. */ 223 224 static hashval_t 225 hash_section (section *sect) 226 { 227 if (sect->common.flags & SECTION_NAMED) 228 return htab_hash_string (sect->named.name); 229 return sect->common.flags & ~SECTION_DECLARED; 230 } 231 232 /* Helper routines for maintaining object_block_htab. */ 233 234 inline bool 235 object_block_hasher::equal (object_block *old, const section *new_section) 236 { 237 return old->sect == new_section; 238 } 239 240 hashval_t 241 object_block_hasher::hash (object_block *old) 242 { 243 return hash_section (old->sect); 244 } 245 246 /* Return a new unnamed section with the given fields. */ 247 248 section * 249 get_unnamed_section (unsigned int flags, void (*callback) (const void *), 250 const void *data) 251 { 252 section *sect; 253 254 sect = ggc_alloc<section> (); 255 sect->unnamed.common.flags = flags | SECTION_UNNAMED; 256 sect->unnamed.callback = callback; 257 sect->unnamed.data = data; 258 sect->unnamed.next = unnamed_sections; 259 260 unnamed_sections = sect; 261 return sect; 262 } 263 264 /* Return a SECTION_NOSWITCH section with the given fields. */ 265 266 static section * 267 get_noswitch_section (unsigned int flags, noswitch_section_callback callback) 268 { 269 section *sect; 270 271 sect = ggc_alloc<section> (); 272 sect->noswitch.common.flags = flags | SECTION_NOSWITCH; 273 sect->noswitch.callback = callback; 274 275 return sect; 276 } 277 278 /* Return the named section structure associated with NAME. Create 279 a new section with the given fields if no such structure exists. 280 When NOT_EXISTING, then fail if the section already exists. */ 281 282 section * 283 get_section (const char *name, unsigned int flags, tree decl, 284 bool not_existing) 285 { 286 section *sect, **slot; 287 288 slot = section_htab->find_slot_with_hash (name, htab_hash_string (name), 289 INSERT); 290 flags |= SECTION_NAMED; 291 if (*slot == NULL) 292 { 293 sect = ggc_alloc<section> (); 294 sect->named.common.flags = flags; 295 sect->named.name = ggc_strdup (name); 296 sect->named.decl = decl; 297 *slot = sect; 298 } 299 else 300 { 301 if (not_existing) 302 internal_error ("Section already exists: %qs", name); 303 304 sect = *slot; 305 /* It is fine if one of the sections has SECTION_NOTYPE as long as 306 the other has none of the contrary flags (see the logic at the end 307 of default_section_type_flags, below). */ 308 if (((sect->common.flags ^ flags) & SECTION_NOTYPE) 309 && !((sect->common.flags | flags) 310 & (SECTION_CODE | SECTION_BSS | SECTION_TLS | SECTION_ENTSIZE 311 | (HAVE_COMDAT_GROUP ? SECTION_LINKONCE : 0)))) 312 { 313 sect->common.flags |= SECTION_NOTYPE; 314 flags |= SECTION_NOTYPE; 315 } 316 if ((sect->common.flags & ~SECTION_DECLARED) != flags 317 && ((sect->common.flags | flags) & SECTION_OVERRIDE) == 0) 318 { 319 /* It is fine if one of the section flags is 320 SECTION_WRITE | SECTION_RELRO and the other has none of these 321 flags (i.e. read-only) in named sections and either the 322 section hasn't been declared yet or has been declared as writable. 323 In that case just make sure the resulting flags are 324 SECTION_WRITE | SECTION_RELRO, ie. writable only because of 325 relocations. */ 326 if (((sect->common.flags ^ flags) & (SECTION_WRITE | SECTION_RELRO)) 327 == (SECTION_WRITE | SECTION_RELRO) 328 && (sect->common.flags 329 & ~(SECTION_DECLARED | SECTION_WRITE | SECTION_RELRO)) 330 == (flags & ~(SECTION_WRITE | SECTION_RELRO)) 331 && ((sect->common.flags & SECTION_DECLARED) == 0 332 || (sect->common.flags & SECTION_WRITE))) 333 { 334 sect->common.flags |= (SECTION_WRITE | SECTION_RELRO); 335 return sect; 336 } 337 /* Sanity check user variables for flag changes. */ 338 if (sect->named.decl != NULL 339 && DECL_P (sect->named.decl) 340 && decl != sect->named.decl) 341 { 342 if (decl != NULL && DECL_P (decl)) 343 error ("%+qD causes a section type conflict with %qD", 344 decl, sect->named.decl); 345 else 346 error ("section type conflict with %qD", sect->named.decl); 347 inform (DECL_SOURCE_LOCATION (sect->named.decl), 348 "%qD was declared here", sect->named.decl); 349 } 350 else if (decl != NULL && DECL_P (decl)) 351 error ("%+qD causes a section type conflict", decl); 352 else 353 error ("section type conflict"); 354 /* Make sure we don't error about one section multiple times. */ 355 sect->common.flags |= SECTION_OVERRIDE; 356 } 357 } 358 return sect; 359 } 360 361 /* Return true if the current compilation mode benefits from having 362 objects grouped into blocks. */ 363 364 static bool 365 use_object_blocks_p (void) 366 { 367 return flag_section_anchors; 368 } 369 370 /* Return the object_block structure for section SECT. Create a new 371 structure if we haven't created one already. Return null if SECT 372 itself is null. Return also null for mergeable sections since 373 section anchors can't be used in mergeable sections anyway, 374 because the linker might move objects around, and using the 375 object blocks infrastructure in that case is both a waste and a 376 maintenance burden. */ 377 378 static struct object_block * 379 get_block_for_section (section *sect) 380 { 381 struct object_block *block; 382 383 if (sect == NULL) 384 return NULL; 385 386 if (sect->common.flags & SECTION_MERGE) 387 return NULL; 388 389 object_block **slot 390 = object_block_htab->find_slot_with_hash (sect, hash_section (sect), 391 INSERT); 392 block = *slot; 393 if (block == NULL) 394 { 395 block = ggc_cleared_alloc<object_block> (); 396 block->sect = sect; 397 *slot = block; 398 } 399 return block; 400 } 401 402 /* Create a symbol with label LABEL and place it at byte offset 403 OFFSET in BLOCK. OFFSET can be negative if the symbol's offset 404 is not yet known. LABEL must be a garbage-collected string. */ 405 406 static rtx 407 create_block_symbol (const char *label, struct object_block *block, 408 HOST_WIDE_INT offset) 409 { 410 rtx symbol; 411 unsigned int size; 412 413 /* Create the extended SYMBOL_REF. */ 414 size = RTX_HDR_SIZE + sizeof (struct block_symbol); 415 symbol = (rtx) ggc_internal_alloc (size); 416 417 /* Initialize the normal SYMBOL_REF fields. */ 418 memset (symbol, 0, size); 419 PUT_CODE (symbol, SYMBOL_REF); 420 PUT_MODE (symbol, Pmode); 421 XSTR (symbol, 0) = label; 422 SYMBOL_REF_FLAGS (symbol) = SYMBOL_FLAG_HAS_BLOCK_INFO; 423 424 /* Initialize the block_symbol stuff. */ 425 SYMBOL_REF_BLOCK (symbol) = block; 426 SYMBOL_REF_BLOCK_OFFSET (symbol) = offset; 427 428 return symbol; 429 } 430 431 /* Return a section with a particular name and with whatever SECTION_* 432 flags section_type_flags deems appropriate. The name of the section 433 is taken from NAME if nonnull, otherwise it is taken from DECL's 434 DECL_SECTION_NAME. DECL is the decl associated with the section 435 (see the section comment for details) and RELOC is as for 436 section_type_flags. */ 437 438 section * 439 get_named_section (tree decl, const char *name, int reloc) 440 { 441 unsigned int flags; 442 443 if (name == NULL) 444 { 445 gcc_assert (decl && DECL_P (decl) && DECL_SECTION_NAME (decl)); 446 name = DECL_SECTION_NAME (decl); 447 } 448 449 flags = targetm.section_type_flags (decl, name, reloc); 450 return get_section (name, flags, decl); 451 } 452 453 /* Worker for resolve_unique_section. */ 454 455 static bool 456 set_implicit_section (struct symtab_node *n, void *data ATTRIBUTE_UNUSED) 457 { 458 n->implicit_section = true; 459 return false; 460 } 461 462 /* If required, set DECL_SECTION_NAME to a unique name. */ 463 464 void 465 resolve_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED, 466 int flag_function_or_data_sections) 467 { 468 if (DECL_SECTION_NAME (decl) == NULL 469 && targetm_common.have_named_sections 470 && (flag_function_or_data_sections 471 || DECL_COMDAT_GROUP (decl))) 472 { 473 targetm.asm_out.unique_section (decl, reloc); 474 if (DECL_SECTION_NAME (decl)) 475 symtab_node::get (decl)->call_for_symbol_and_aliases 476 (set_implicit_section, NULL, true); 477 } 478 } 479 480 #ifdef BSS_SECTION_ASM_OP 481 482 #ifdef ASM_OUTPUT_ALIGNED_BSS 483 484 /* Utility function for targets to use in implementing 485 ASM_OUTPUT_ALIGNED_BSS. 486 ??? It is believed that this function will work in most cases so such 487 support is localized here. */ 488 489 static void 490 asm_output_aligned_bss (FILE *file, tree decl ATTRIBUTE_UNUSED, 491 const char *name, unsigned HOST_WIDE_INT size, 492 int align) 493 { 494 switch_to_section (bss_section); 495 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT)); 496 #ifdef ASM_DECLARE_OBJECT_NAME 497 last_assemble_variable_decl = decl; 498 ASM_DECLARE_OBJECT_NAME (file, name, decl); 499 #else 500 /* Standard thing is just output label for the object. */ 501 ASM_OUTPUT_LABEL (file, name); 502 #endif /* ASM_DECLARE_OBJECT_NAME */ 503 ASM_OUTPUT_SKIP (file, size ? size : 1); 504 } 505 506 #endif 507 508 #endif /* BSS_SECTION_ASM_OP */ 509 510 #ifndef USE_SELECT_SECTION_FOR_FUNCTIONS 511 /* Return the hot section for function DECL. Return text_section for 512 null DECLs. */ 513 514 static section * 515 hot_function_section (tree decl) 516 { 517 if (decl != NULL_TREE 518 && DECL_SECTION_NAME (decl) != NULL 519 && targetm_common.have_named_sections) 520 return get_named_section (decl, NULL, 0); 521 else 522 return text_section; 523 } 524 #endif 525 526 /* Return section for TEXT_SECTION_NAME if DECL or DECL_SECTION_NAME (DECL) 527 is NULL. 528 529 When DECL_SECTION_NAME is non-NULL and it is implicit section and 530 NAMED_SECTION_SUFFIX is non-NULL, then produce section called 531 concatenate the name with NAMED_SECTION_SUFFIX. 532 Otherwise produce "TEXT_SECTION_NAME.IMPLICIT_NAME". */ 533 534 section * 535 get_named_text_section (tree decl, 536 const char *text_section_name, 537 const char *named_section_suffix) 538 { 539 if (decl && DECL_SECTION_NAME (decl)) 540 { 541 if (named_section_suffix) 542 { 543 const char *dsn = DECL_SECTION_NAME (decl); 544 const char *stripped_name; 545 char *name, *buffer; 546 547 name = (char *) alloca (strlen (dsn) + 1); 548 memcpy (name, dsn, 549 strlen (dsn) + 1); 550 551 stripped_name = targetm.strip_name_encoding (name); 552 553 buffer = ACONCAT ((stripped_name, named_section_suffix, NULL)); 554 return get_named_section (decl, buffer, 0); 555 } 556 else if (symtab_node::get (decl)->implicit_section) 557 { 558 const char *name; 559 560 /* Do not try to split gnu_linkonce functions. This gets somewhat 561 slipperly. */ 562 if (DECL_COMDAT_GROUP (decl) && !HAVE_COMDAT_GROUP) 563 return NULL; 564 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)); 565 name = targetm.strip_name_encoding (name); 566 return get_named_section (decl, ACONCAT ((text_section_name, ".", 567 name, NULL)), 0); 568 } 569 else 570 return NULL; 571 } 572 return get_named_section (decl, text_section_name, 0); 573 } 574 575 /* Choose named function section based on its frequency. */ 576 577 section * 578 default_function_section (tree decl, enum node_frequency freq, 579 bool startup, bool exit) 580 { 581 #if defined HAVE_LD_EH_GC_SECTIONS && defined HAVE_LD_EH_GC_SECTIONS_BUG 582 /* Old GNU linkers have buggy --gc-section support, which sometimes 583 results in .gcc_except_table* sections being garbage collected. */ 584 if (decl 585 && symtab_node::get (decl)->implicit_section) 586 return NULL; 587 #endif 588 589 if (!flag_reorder_functions 590 || !targetm_common.have_named_sections) 591 return NULL; 592 /* Startup code should go to startup subsection unless it is 593 unlikely executed (this happens especially with function splitting 594 where we can split away unnecessary parts of static constructors. */ 595 if (startup && freq != NODE_FREQUENCY_UNLIKELY_EXECUTED) 596 { 597 /* During LTO the tp_first_run profiling will naturally place all 598 initialization code first. Using separate section is counter-productive 599 because startup only code may call functions which are no longer 600 startup only. */ 601 if (!in_lto_p 602 || !cgraph_node::get (decl)->tp_first_run 603 || !opt_for_fn (decl, flag_profile_reorder_functions)) 604 return get_named_text_section (decl, ".text.startup", NULL); 605 else 606 return NULL; 607 } 608 609 /* Similarly for exit. */ 610 if (exit && freq != NODE_FREQUENCY_UNLIKELY_EXECUTED) 611 return get_named_text_section (decl, ".text.exit", NULL); 612 613 /* Group cold functions together, similarly for hot code. */ 614 switch (freq) 615 { 616 case NODE_FREQUENCY_UNLIKELY_EXECUTED: 617 return get_named_text_section (decl, ".text.unlikely", NULL); 618 case NODE_FREQUENCY_HOT: 619 return get_named_text_section (decl, ".text.hot", NULL); 620 /* FALLTHRU */ 621 default: 622 return NULL; 623 } 624 } 625 626 /* Return the section for function DECL. 627 628 If DECL is NULL_TREE, return the text section. We can be passed 629 NULL_TREE under some circumstances by dbxout.c at least. 630 631 If FORCE_COLD is true, return cold function section ignoring 632 the frequency info of cgraph_node. */ 633 634 static section * 635 function_section_1 (tree decl, bool force_cold) 636 { 637 section *section = NULL; 638 enum node_frequency freq = NODE_FREQUENCY_NORMAL; 639 bool startup = false, exit = false; 640 641 if (decl) 642 { 643 struct cgraph_node *node = cgraph_node::get (decl); 644 645 if (node) 646 { 647 freq = node->frequency; 648 startup = node->only_called_at_startup; 649 exit = node->only_called_at_exit; 650 } 651 } 652 if (force_cold) 653 freq = NODE_FREQUENCY_UNLIKELY_EXECUTED; 654 655 #ifdef USE_SELECT_SECTION_FOR_FUNCTIONS 656 if (decl != NULL_TREE 657 && DECL_SECTION_NAME (decl) != NULL) 658 { 659 if (targetm.asm_out.function_section) 660 section = targetm.asm_out.function_section (decl, freq, 661 startup, exit); 662 if (section) 663 return section; 664 return get_named_section (decl, NULL, 0); 665 } 666 else 667 return targetm.asm_out.select_section 668 (decl, freq == NODE_FREQUENCY_UNLIKELY_EXECUTED, 669 symtab_node::get (decl)->definition_alignment ()); 670 #else 671 if (targetm.asm_out.function_section) 672 section = targetm.asm_out.function_section (decl, freq, startup, exit); 673 if (section) 674 return section; 675 return hot_function_section (decl); 676 #endif 677 } 678 679 /* Return the section for function DECL. 680 681 If DECL is NULL_TREE, return the text section. We can be passed 682 NULL_TREE under some circumstances by dbxout.c at least. */ 683 684 section * 685 function_section (tree decl) 686 { 687 /* Handle cases where function splitting code decides 688 to put function entry point into unlikely executed section 689 despite the fact that the function itself is not cold 690 (i.e. it is called rarely but contains a hot loop that is 691 better to live in hot subsection for the code locality). */ 692 return function_section_1 (decl, 693 first_function_block_is_cold); 694 } 695 696 /* Return the section for the current function, take IN_COLD_SECTION_P 697 into account. */ 698 699 section * 700 current_function_section (void) 701 { 702 return function_section_1 (current_function_decl, in_cold_section_p); 703 } 704 705 /* Tell assembler to switch to unlikely-to-be-executed text section. */ 706 707 section * 708 unlikely_text_section (void) 709 { 710 return function_section_1 (current_function_decl, true); 711 } 712 713 /* When called within a function context, return true if the function 714 has been assigned a cold text section and if SECT is that section. 715 When called outside a function context, return true if SECT is the 716 default cold section. */ 717 718 bool 719 unlikely_text_section_p (section *sect) 720 { 721 return sect == function_section_1 (current_function_decl, true); 722 } 723 724 /* Switch to the other function partition (if inside of hot section 725 into cold section, otherwise into the hot section). */ 726 727 void 728 switch_to_other_text_partition (void) 729 { 730 in_cold_section_p = !in_cold_section_p; 731 switch_to_section (current_function_section ()); 732 } 733 734 /* Return the read-only data section associated with function DECL. */ 735 736 section * 737 default_function_rodata_section (tree decl) 738 { 739 if (decl != NULL_TREE && DECL_SECTION_NAME (decl)) 740 { 741 const char *name = DECL_SECTION_NAME (decl); 742 743 if (DECL_COMDAT_GROUP (decl) && HAVE_COMDAT_GROUP) 744 { 745 const char *dot; 746 size_t len; 747 char* rname; 748 749 dot = strchr (name + 1, '.'); 750 if (!dot) 751 dot = name; 752 len = strlen (dot) + 8; 753 rname = (char *) alloca (len); 754 755 strcpy (rname, ".rodata"); 756 strcat (rname, dot); 757 return get_section (rname, SECTION_LINKONCE, decl); 758 } 759 /* For .gnu.linkonce.t.foo we want to use .gnu.linkonce.r.foo. */ 760 else if (DECL_COMDAT_GROUP (decl) 761 && strncmp (name, ".gnu.linkonce.t.", 16) == 0) 762 { 763 size_t len = strlen (name) + 1; 764 char *rname = (char *) alloca (len); 765 766 memcpy (rname, name, len); 767 rname[14] = 'r'; 768 return get_section (rname, SECTION_LINKONCE, decl); 769 } 770 /* For .text.foo we want to use .rodata.foo. */ 771 else if (flag_function_sections && flag_data_sections 772 && strncmp (name, ".text.", 6) == 0) 773 { 774 size_t len = strlen (name) + 1; 775 char *rname = (char *) alloca (len + 2); 776 777 memcpy (rname, ".rodata", 7); 778 memcpy (rname + 7, name + 5, len - 5); 779 return get_section (rname, 0, decl); 780 } 781 } 782 783 return readonly_data_section; 784 } 785 786 /* Return the read-only data section associated with function DECL 787 for targets where that section should be always the single 788 readonly data section. */ 789 790 section * 791 default_no_function_rodata_section (tree decl ATTRIBUTE_UNUSED) 792 { 793 return readonly_data_section; 794 } 795 796 /* A subroutine of mergeable_string_section and mergeable_constant_section. */ 797 798 static const char * 799 function_mergeable_rodata_prefix (void) 800 { 801 section *s = targetm.asm_out.function_rodata_section (current_function_decl); 802 if (SECTION_STYLE (s) == SECTION_NAMED) 803 return s->named.name; 804 else 805 return targetm.asm_out.mergeable_rodata_prefix; 806 } 807 808 /* Return the section to use for string merging. */ 809 810 static section * 811 mergeable_string_section (tree decl ATTRIBUTE_UNUSED, 812 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED, 813 unsigned int flags ATTRIBUTE_UNUSED) 814 { 815 HOST_WIDE_INT len; 816 817 if (HAVE_GAS_SHF_MERGE && flag_merge_constants 818 && TREE_CODE (decl) == STRING_CST 819 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE 820 && align <= 256 821 && (len = int_size_in_bytes (TREE_TYPE (decl))) > 0 822 && TREE_STRING_LENGTH (decl) == len) 823 { 824 scalar_int_mode mode; 825 unsigned int modesize; 826 const char *str; 827 HOST_WIDE_INT i; 828 int j, unit; 829 const char *prefix = function_mergeable_rodata_prefix (); 830 char *name = (char *) alloca (strlen (prefix) + 30); 831 832 mode = SCALAR_INT_TYPE_MODE (TREE_TYPE (TREE_TYPE (decl))); 833 modesize = GET_MODE_BITSIZE (mode); 834 if (modesize >= 8 && modesize <= 256 835 && (modesize & (modesize - 1)) == 0) 836 { 837 if (align < modesize) 838 align = modesize; 839 840 if (!HAVE_LD_ALIGNED_SHF_MERGE && align > 8) 841 return readonly_data_section; 842 843 str = TREE_STRING_POINTER (decl); 844 unit = GET_MODE_SIZE (mode); 845 846 /* Check for embedded NUL characters. */ 847 for (i = 0; i < len; i += unit) 848 { 849 for (j = 0; j < unit; j++) 850 if (str[i + j] != '\0') 851 break; 852 if (j == unit) 853 break; 854 } 855 if (i == len - unit || (unit == 1 && i == len)) 856 { 857 sprintf (name, "%s.str%d.%d", prefix, 858 modesize / 8, (int) (align / 8)); 859 flags |= (modesize / 8) | SECTION_MERGE | SECTION_STRINGS; 860 return get_section (name, flags, NULL); 861 } 862 } 863 } 864 865 return readonly_data_section; 866 } 867 868 /* Return the section to use for constant merging. */ 869 870 section * 871 mergeable_constant_section (machine_mode mode ATTRIBUTE_UNUSED, 872 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED, 873 unsigned int flags ATTRIBUTE_UNUSED) 874 { 875 if (HAVE_GAS_SHF_MERGE && flag_merge_constants 876 && mode != VOIDmode 877 && mode != BLKmode 878 && known_le (GET_MODE_BITSIZE (mode), align) 879 && align >= 8 880 && align <= 256 881 && (align & (align - 1)) == 0 882 && (HAVE_LD_ALIGNED_SHF_MERGE ? 1 : align == 8)) 883 { 884 const char *prefix = function_mergeable_rodata_prefix (); 885 char *name = (char *) alloca (strlen (prefix) + 30); 886 887 sprintf (name, "%s.cst%d", prefix, (int) (align / 8)); 888 flags |= (align / 8) | SECTION_MERGE; 889 return get_section (name, flags, NULL); 890 } 891 return readonly_data_section; 892 } 893 894 /* Given NAME, a putative register name, discard any customary prefixes. */ 895 896 static const char * 897 strip_reg_name (const char *name) 898 { 899 #ifdef REGISTER_PREFIX 900 if (!strncmp (name, REGISTER_PREFIX, strlen (REGISTER_PREFIX))) 901 name += strlen (REGISTER_PREFIX); 902 #endif 903 if (name[0] == '%' || name[0] == '#') 904 name++; 905 return name; 906 } 907 908 /* The user has asked for a DECL to have a particular name. Set (or 909 change) it in such a way that we don't prefix an underscore to 910 it. */ 911 void 912 set_user_assembler_name (tree decl, const char *name) 913 { 914 char *starred = (char *) alloca (strlen (name) + 2); 915 starred[0] = '*'; 916 strcpy (starred + 1, name); 917 symtab->change_decl_assembler_name (decl, get_identifier (starred)); 918 SET_DECL_RTL (decl, NULL_RTX); 919 } 920 921 /* Decode an `asm' spec for a declaration as a register name. 922 Return the register number, or -1 if nothing specified, 923 or -2 if the ASMSPEC is not `cc' or `memory' and is not recognized, 924 or -3 if ASMSPEC is `cc' and is not recognized, 925 or -4 if ASMSPEC is `memory' and is not recognized. 926 Accept an exact spelling or a decimal number. 927 Prefixes such as % are optional. */ 928 929 int 930 decode_reg_name_and_count (const char *asmspec, int *pnregs) 931 { 932 /* Presume just one register is clobbered. */ 933 *pnregs = 1; 934 935 if (asmspec != 0) 936 { 937 int i; 938 939 /* Get rid of confusing prefixes. */ 940 asmspec = strip_reg_name (asmspec); 941 942 /* Allow a decimal number as a "register name". */ 943 for (i = strlen (asmspec) - 1; i >= 0; i--) 944 if (! ISDIGIT (asmspec[i])) 945 break; 946 if (asmspec[0] != 0 && i < 0) 947 { 948 i = atoi (asmspec); 949 if (i < FIRST_PSEUDO_REGISTER && i >= 0 && reg_names[i][0]) 950 return i; 951 else 952 return -2; 953 } 954 955 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 956 if (reg_names[i][0] 957 && ! strcmp (asmspec, strip_reg_name (reg_names[i]))) 958 return i; 959 960 #ifdef OVERLAPPING_REGISTER_NAMES 961 { 962 static const struct 963 { 964 const char *const name; 965 const int number; 966 const int nregs; 967 } table[] = OVERLAPPING_REGISTER_NAMES; 968 969 for (i = 0; i < (int) ARRAY_SIZE (table); i++) 970 if (table[i].name[0] 971 && ! strcmp (asmspec, table[i].name)) 972 { 973 *pnregs = table[i].nregs; 974 return table[i].number; 975 } 976 } 977 #endif /* OVERLAPPING_REGISTER_NAMES */ 978 979 #ifdef ADDITIONAL_REGISTER_NAMES 980 { 981 static const struct { const char *const name; const int number; } table[] 982 = ADDITIONAL_REGISTER_NAMES; 983 984 for (i = 0; i < (int) ARRAY_SIZE (table); i++) 985 if (table[i].name[0] 986 && ! strcmp (asmspec, table[i].name) 987 && reg_names[table[i].number][0]) 988 return table[i].number; 989 } 990 #endif /* ADDITIONAL_REGISTER_NAMES */ 991 992 if (!strcmp (asmspec, "memory")) 993 return -4; 994 995 if (!strcmp (asmspec, "cc")) 996 return -3; 997 998 return -2; 999 } 1000 1001 return -1; 1002 } 1003 1004 int 1005 decode_reg_name (const char *name) 1006 { 1007 int count; 1008 return decode_reg_name_and_count (name, &count); 1009 } 1010 1011 1012 /* Return true if DECL's initializer is suitable for a BSS section. */ 1013 1014 bool 1015 bss_initializer_p (const_tree decl, bool named) 1016 { 1017 /* Do not put non-common constants into the .bss section, they belong in 1018 a readonly section, except when NAMED is true. */ 1019 return ((!TREE_READONLY (decl) || DECL_COMMON (decl) || named) 1020 && (DECL_INITIAL (decl) == NULL 1021 /* In LTO we have no errors in program; error_mark_node is used 1022 to mark offlined constructors. */ 1023 || (DECL_INITIAL (decl) == error_mark_node 1024 && !in_lto_p) 1025 || (flag_zero_initialized_in_bss 1026 && initializer_zerop (DECL_INITIAL (decl))))); 1027 } 1028 1029 /* Compute the alignment of variable specified by DECL. 1030 DONT_OUTPUT_DATA is from assemble_variable. */ 1031 1032 void 1033 align_variable (tree decl, bool dont_output_data) 1034 { 1035 unsigned int align = DECL_ALIGN (decl); 1036 1037 /* In the case for initialing an array whose length isn't specified, 1038 where we have not yet been able to do the layout, 1039 figure out the proper alignment now. */ 1040 if (dont_output_data && DECL_SIZE (decl) == 0 1041 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE) 1042 align = MAX (align, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl)))); 1043 1044 /* Some object file formats have a maximum alignment which they support. 1045 In particular, a.out format supports a maximum alignment of 4. */ 1046 if (align > MAX_OFILE_ALIGNMENT) 1047 { 1048 error ("alignment of %q+D is greater than maximum object " 1049 "file alignment %d", decl, 1050 MAX_OFILE_ALIGNMENT/BITS_PER_UNIT); 1051 align = MAX_OFILE_ALIGNMENT; 1052 } 1053 1054 if (! DECL_USER_ALIGN (decl)) 1055 { 1056 #ifdef DATA_ABI_ALIGNMENT 1057 unsigned int data_abi_align 1058 = DATA_ABI_ALIGNMENT (TREE_TYPE (decl), align); 1059 /* For backwards compatibility, don't assume the ABI alignment for 1060 TLS variables. */ 1061 if (! DECL_THREAD_LOCAL_P (decl) || data_abi_align <= BITS_PER_WORD) 1062 align = data_abi_align; 1063 #endif 1064 1065 /* On some machines, it is good to increase alignment sometimes. 1066 But as DECL_ALIGN is used both for actually emitting the variable 1067 and for code accessing the variable as guaranteed alignment, we 1068 can only increase the alignment if it is a performance optimization 1069 if the references to it must bind to the current definition. */ 1070 if (decl_binds_to_current_def_p (decl) 1071 && !DECL_VIRTUAL_P (decl)) 1072 { 1073 #ifdef DATA_ALIGNMENT 1074 unsigned int data_align = DATA_ALIGNMENT (TREE_TYPE (decl), align); 1075 /* Don't increase alignment too much for TLS variables - TLS space 1076 is too precious. */ 1077 if (! DECL_THREAD_LOCAL_P (decl) || data_align <= BITS_PER_WORD) 1078 align = data_align; 1079 #endif 1080 if (DECL_INITIAL (decl) != 0 1081 /* In LTO we have no errors in program; error_mark_node is used 1082 to mark offlined constructors. */ 1083 && (in_lto_p || DECL_INITIAL (decl) != error_mark_node)) 1084 { 1085 unsigned int const_align 1086 = targetm.constant_alignment (DECL_INITIAL (decl), align); 1087 /* Don't increase alignment too much for TLS variables - TLS 1088 space is too precious. */ 1089 if (! DECL_THREAD_LOCAL_P (decl) || const_align <= BITS_PER_WORD) 1090 align = const_align; 1091 } 1092 } 1093 } 1094 1095 /* Reset the alignment in case we have made it tighter, so we can benefit 1096 from it in get_pointer_alignment. */ 1097 SET_DECL_ALIGN (decl, align); 1098 } 1099 1100 /* Return DECL_ALIGN (decl), possibly increased for optimization purposes 1101 beyond what align_variable returned. */ 1102 1103 static unsigned int 1104 get_variable_align (tree decl) 1105 { 1106 unsigned int align = DECL_ALIGN (decl); 1107 1108 /* For user aligned vars or static vars align_variable already did 1109 everything. */ 1110 if (DECL_USER_ALIGN (decl) || !TREE_PUBLIC (decl)) 1111 return align; 1112 1113 #ifdef DATA_ABI_ALIGNMENT 1114 if (DECL_THREAD_LOCAL_P (decl)) 1115 align = DATA_ABI_ALIGNMENT (TREE_TYPE (decl), align); 1116 #endif 1117 1118 /* For decls that bind to the current definition, align_variable 1119 did also everything, except for not assuming ABI required alignment 1120 of TLS variables. For other vars, increase the alignment here 1121 as an optimization. */ 1122 if (!decl_binds_to_current_def_p (decl)) 1123 { 1124 /* On some machines, it is good to increase alignment sometimes. */ 1125 #ifdef DATA_ALIGNMENT 1126 unsigned int data_align = DATA_ALIGNMENT (TREE_TYPE (decl), align); 1127 /* Don't increase alignment too much for TLS variables - TLS space 1128 is too precious. */ 1129 if (! DECL_THREAD_LOCAL_P (decl) || data_align <= BITS_PER_WORD) 1130 align = data_align; 1131 #endif 1132 if (DECL_INITIAL (decl) != 0 1133 /* In LTO we have no errors in program; error_mark_node is used 1134 to mark offlined constructors. */ 1135 && (in_lto_p || DECL_INITIAL (decl) != error_mark_node)) 1136 { 1137 unsigned int const_align 1138 = targetm.constant_alignment (DECL_INITIAL (decl), align); 1139 /* Don't increase alignment too much for TLS variables - TLS space 1140 is too precious. */ 1141 if (! DECL_THREAD_LOCAL_P (decl) || const_align <= BITS_PER_WORD) 1142 align = const_align; 1143 } 1144 } 1145 1146 return align; 1147 } 1148 1149 /* Return the section into which the given VAR_DECL or CONST_DECL 1150 should be placed. PREFER_NOSWITCH_P is true if a noswitch 1151 section should be used wherever possible. */ 1152 1153 section * 1154 get_variable_section (tree decl, bool prefer_noswitch_p) 1155 { 1156 addr_space_t as = ADDR_SPACE_GENERIC; 1157 int reloc; 1158 varpool_node *vnode = varpool_node::get (decl); 1159 if (vnode) 1160 { 1161 vnode = vnode->ultimate_alias_target (); 1162 decl = vnode->decl; 1163 } 1164 1165 if (TREE_TYPE (decl) != error_mark_node) 1166 as = TYPE_ADDR_SPACE (TREE_TYPE (decl)); 1167 1168 /* We need the constructor to figure out reloc flag. */ 1169 if (vnode) 1170 vnode->get_constructor (); 1171 1172 if (DECL_COMMON (decl)) 1173 { 1174 /* If the decl has been given an explicit section name, or it resides 1175 in a non-generic address space, then it isn't common, and shouldn't 1176 be handled as such. */ 1177 gcc_assert (DECL_SECTION_NAME (decl) == NULL 1178 && ADDR_SPACE_GENERIC_P (as)); 1179 if (DECL_THREAD_LOCAL_P (decl)) 1180 return tls_comm_section; 1181 else if (TREE_PUBLIC (decl) && bss_initializer_p (decl)) 1182 return comm_section; 1183 } 1184 1185 if (DECL_INITIAL (decl) == error_mark_node) 1186 reloc = contains_pointers_p (TREE_TYPE (decl)) ? 3 : 0; 1187 else if (DECL_INITIAL (decl)) 1188 reloc = compute_reloc_for_constant (DECL_INITIAL (decl)); 1189 else 1190 reloc = 0; 1191 1192 resolve_unique_section (decl, reloc, flag_data_sections); 1193 if (IN_NAMED_SECTION (decl)) 1194 { 1195 section *sect = get_named_section (decl, NULL, reloc); 1196 1197 if ((sect->common.flags & SECTION_BSS) 1198 && !bss_initializer_p (decl, true)) 1199 { 1200 error_at (DECL_SOURCE_LOCATION (decl), 1201 "only zero initializers are allowed in section %qs", 1202 sect->named.name); 1203 DECL_INITIAL (decl) = error_mark_node; 1204 } 1205 return sect; 1206 } 1207 1208 if (ADDR_SPACE_GENERIC_P (as) 1209 && !DECL_THREAD_LOCAL_P (decl) 1210 && !(prefer_noswitch_p && targetm.have_switchable_bss_sections) 1211 && bss_initializer_p (decl)) 1212 { 1213 if (!TREE_PUBLIC (decl) 1214 && !((flag_sanitize & SANITIZE_ADDRESS) 1215 && asan_protect_global (decl))) 1216 return lcomm_section; 1217 if (bss_noswitch_section) 1218 return bss_noswitch_section; 1219 } 1220 1221 return targetm.asm_out.select_section (decl, reloc, 1222 get_variable_align (decl)); 1223 } 1224 1225 /* Return the block into which object_block DECL should be placed. */ 1226 1227 static struct object_block * 1228 get_block_for_decl (tree decl) 1229 { 1230 section *sect; 1231 1232 if (VAR_P (decl)) 1233 { 1234 /* The object must be defined in this translation unit. */ 1235 if (DECL_EXTERNAL (decl)) 1236 return NULL; 1237 1238 /* There's no point using object blocks for something that is 1239 isolated by definition. */ 1240 if (DECL_COMDAT_GROUP (decl)) 1241 return NULL; 1242 } 1243 1244 /* We can only calculate block offsets if the decl has a known 1245 constant size. */ 1246 if (DECL_SIZE_UNIT (decl) == NULL) 1247 return NULL; 1248 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (decl))) 1249 return NULL; 1250 1251 /* Find out which section should contain DECL. We cannot put it into 1252 an object block if it requires a standalone definition. */ 1253 if (VAR_P (decl)) 1254 align_variable (decl, 0); 1255 sect = get_variable_section (decl, true); 1256 if (SECTION_STYLE (sect) == SECTION_NOSWITCH) 1257 return NULL; 1258 1259 return get_block_for_section (sect); 1260 } 1261 1262 /* Make sure block symbol SYMBOL is in block BLOCK. */ 1263 1264 static void 1265 change_symbol_block (rtx symbol, struct object_block *block) 1266 { 1267 if (block != SYMBOL_REF_BLOCK (symbol)) 1268 { 1269 gcc_assert (SYMBOL_REF_BLOCK_OFFSET (symbol) < 0); 1270 SYMBOL_REF_BLOCK (symbol) = block; 1271 } 1272 } 1273 1274 /* Return true if it is possible to put DECL in an object_block. */ 1275 1276 static bool 1277 use_blocks_for_decl_p (tree decl) 1278 { 1279 struct symtab_node *snode; 1280 1281 /* Only data DECLs can be placed into object blocks. */ 1282 if (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL) 1283 return false; 1284 1285 /* DECL_INITIAL (decl) set to decl is a hack used for some decls that 1286 are never used from code directly and we never want object block handling 1287 for those. */ 1288 if (DECL_INITIAL (decl) == decl) 1289 return false; 1290 1291 /* If this decl is an alias, then we don't want to emit a 1292 definition. */ 1293 if (VAR_P (decl) 1294 && (snode = symtab_node::get (decl)) != NULL 1295 && snode->alias) 1296 return false; 1297 1298 return targetm.use_blocks_for_decl_p (decl); 1299 } 1300 1301 /* Follow the IDENTIFIER_TRANSPARENT_ALIAS chain starting at *ALIAS 1302 until we find an identifier that is not itself a transparent alias. 1303 Modify the alias passed to it by reference (and all aliases on the 1304 way to the ultimate target), such that they do not have to be 1305 followed again, and return the ultimate target of the alias 1306 chain. */ 1307 1308 static inline tree 1309 ultimate_transparent_alias_target (tree *alias) 1310 { 1311 tree target = *alias; 1312 1313 if (IDENTIFIER_TRANSPARENT_ALIAS (target)) 1314 { 1315 gcc_assert (TREE_CHAIN (target)); 1316 target = ultimate_transparent_alias_target (&TREE_CHAIN (target)); 1317 gcc_assert (! IDENTIFIER_TRANSPARENT_ALIAS (target) 1318 && ! TREE_CHAIN (target)); 1319 *alias = target; 1320 } 1321 1322 return target; 1323 } 1324 1325 /* Create the DECL_RTL for a VAR_DECL or FUNCTION_DECL. DECL should 1326 have static storage duration. In other words, it should not be an 1327 automatic variable, including PARM_DECLs. 1328 1329 There is, however, one exception: this function handles variables 1330 explicitly placed in a particular register by the user. 1331 1332 This is never called for PARM_DECL nodes. */ 1333 1334 void 1335 make_decl_rtl (tree decl) 1336 { 1337 const char *name = 0; 1338 int reg_number; 1339 tree id; 1340 rtx x; 1341 1342 /* Check that we are not being given an automatic variable. */ 1343 gcc_assert (TREE_CODE (decl) != PARM_DECL 1344 && TREE_CODE (decl) != RESULT_DECL); 1345 1346 /* A weak alias has TREE_PUBLIC set but not the other bits. */ 1347 gcc_assert (!VAR_P (decl) 1348 || TREE_STATIC (decl) 1349 || TREE_PUBLIC (decl) 1350 || DECL_EXTERNAL (decl) 1351 || DECL_REGISTER (decl)); 1352 1353 /* And that we were not given a type or a label. */ 1354 gcc_assert (TREE_CODE (decl) != TYPE_DECL 1355 && TREE_CODE (decl) != LABEL_DECL); 1356 1357 /* For a duplicate declaration, we can be called twice on the 1358 same DECL node. Don't discard the RTL already made. */ 1359 if (DECL_RTL_SET_P (decl)) 1360 { 1361 /* If the old RTL had the wrong mode, fix the mode. */ 1362 x = DECL_RTL (decl); 1363 if (GET_MODE (x) != DECL_MODE (decl)) 1364 SET_DECL_RTL (decl, adjust_address_nv (x, DECL_MODE (decl), 0)); 1365 1366 if (TREE_CODE (decl) != FUNCTION_DECL && DECL_REGISTER (decl)) 1367 return; 1368 1369 /* ??? Another way to do this would be to maintain a hashed 1370 table of such critters. Instead of adding stuff to a DECL 1371 to give certain attributes to it, we could use an external 1372 hash map from DECL to set of attributes. */ 1373 1374 /* Let the target reassign the RTL if it wants. 1375 This is necessary, for example, when one machine specific 1376 decl attribute overrides another. */ 1377 targetm.encode_section_info (decl, DECL_RTL (decl), false); 1378 1379 /* If the symbol has a SYMBOL_REF_BLOCK field, update it based 1380 on the new decl information. */ 1381 if (MEM_P (x) 1382 && GET_CODE (XEXP (x, 0)) == SYMBOL_REF 1383 && SYMBOL_REF_HAS_BLOCK_INFO_P (XEXP (x, 0))) 1384 change_symbol_block (XEXP (x, 0), get_block_for_decl (decl)); 1385 1386 return; 1387 } 1388 1389 /* If this variable belongs to the global constant pool, retrieve the 1390 pre-computed RTL or recompute it in LTO mode. */ 1391 if (VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl)) 1392 { 1393 SET_DECL_RTL (decl, output_constant_def (DECL_INITIAL (decl), 1)); 1394 return; 1395 } 1396 1397 id = DECL_ASSEMBLER_NAME (decl); 1398 name = IDENTIFIER_POINTER (id); 1399 1400 if (name[0] != '*' && TREE_CODE (decl) != FUNCTION_DECL 1401 && DECL_REGISTER (decl)) 1402 { 1403 error ("register name not specified for %q+D", decl); 1404 } 1405 else if (TREE_CODE (decl) != FUNCTION_DECL && DECL_REGISTER (decl)) 1406 { 1407 const char *asmspec = name+1; 1408 machine_mode mode = DECL_MODE (decl); 1409 reg_number = decode_reg_name (asmspec); 1410 /* First detect errors in declaring global registers. */ 1411 if (reg_number == -1) 1412 error ("register name not specified for %q+D", decl); 1413 else if (reg_number < 0) 1414 error ("invalid register name for %q+D", decl); 1415 else if (mode == BLKmode) 1416 error ("data type of %q+D isn%'t suitable for a register", 1417 decl); 1418 else if (!in_hard_reg_set_p (accessible_reg_set, mode, reg_number)) 1419 error ("the register specified for %q+D cannot be accessed" 1420 " by the current target", decl); 1421 else if (!in_hard_reg_set_p (operand_reg_set, mode, reg_number)) 1422 error ("the register specified for %q+D is not general enough" 1423 " to be used as a register variable", decl); 1424 else if (!targetm.hard_regno_mode_ok (reg_number, mode)) 1425 error ("register specified for %q+D isn%'t suitable for data type", 1426 decl); 1427 /* Now handle properly declared static register variables. */ 1428 else 1429 { 1430 int nregs; 1431 1432 if (DECL_INITIAL (decl) != 0 && TREE_STATIC (decl)) 1433 { 1434 DECL_INITIAL (decl) = 0; 1435 error ("global register variable has initial value"); 1436 } 1437 if (TREE_THIS_VOLATILE (decl)) 1438 warning (OPT_Wvolatile_register_var, 1439 "optimization may eliminate reads and/or " 1440 "writes to register variables"); 1441 1442 /* If the user specified one of the eliminables registers here, 1443 e.g., FRAME_POINTER_REGNUM, we don't want to get this variable 1444 confused with that register and be eliminated. This usage is 1445 somewhat suspect... */ 1446 1447 SET_DECL_RTL (decl, gen_raw_REG (mode, reg_number)); 1448 ORIGINAL_REGNO (DECL_RTL (decl)) = reg_number; 1449 REG_USERVAR_P (DECL_RTL (decl)) = 1; 1450 1451 if (TREE_STATIC (decl)) 1452 { 1453 /* Make this register global, so not usable for anything 1454 else. */ 1455 #ifdef ASM_DECLARE_REGISTER_GLOBAL 1456 name = IDENTIFIER_POINTER (DECL_NAME (decl)); 1457 ASM_DECLARE_REGISTER_GLOBAL (asm_out_file, decl, reg_number, name); 1458 #endif 1459 nregs = hard_regno_nregs (reg_number, mode); 1460 while (nregs > 0) 1461 globalize_reg (decl, reg_number + --nregs); 1462 } 1463 1464 /* As a register variable, it has no section. */ 1465 return; 1466 } 1467 /* Avoid internal errors from invalid register 1468 specifications. */ 1469 SET_DECL_ASSEMBLER_NAME (decl, NULL_TREE); 1470 DECL_HARD_REGISTER (decl) = 0; 1471 /* Also avoid SSA inconsistencies by pretending this is an external 1472 decl now. */ 1473 DECL_EXTERNAL (decl) = 1; 1474 return; 1475 } 1476 /* Now handle ordinary static variables and functions (in memory). 1477 Also handle vars declared register invalidly. */ 1478 else if (name[0] == '*') 1479 { 1480 #ifdef REGISTER_PREFIX 1481 if (strlen (REGISTER_PREFIX) != 0) 1482 { 1483 reg_number = decode_reg_name (name); 1484 if (reg_number >= 0 || reg_number == -3) 1485 error ("register name given for non-register variable %q+D", decl); 1486 } 1487 #endif 1488 } 1489 1490 /* Specifying a section attribute on a variable forces it into a 1491 non-.bss section, and thus it cannot be common. */ 1492 /* FIXME: In general this code should not be necessary because 1493 visibility pass is doing the same work. But notice_global_symbol 1494 is called early and it needs to make DECL_RTL to get the name. 1495 we take care of recomputing the DECL_RTL after visibility is changed. */ 1496 if (VAR_P (decl) 1497 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)) 1498 && DECL_SECTION_NAME (decl) != NULL 1499 && DECL_INITIAL (decl) == NULL_TREE 1500 && DECL_COMMON (decl)) 1501 DECL_COMMON (decl) = 0; 1502 1503 /* Variables can't be both common and weak. */ 1504 if (VAR_P (decl) && DECL_WEAK (decl)) 1505 DECL_COMMON (decl) = 0; 1506 1507 if (use_object_blocks_p () && use_blocks_for_decl_p (decl)) 1508 x = create_block_symbol (name, get_block_for_decl (decl), -1); 1509 else 1510 { 1511 machine_mode address_mode = Pmode; 1512 if (TREE_TYPE (decl) != error_mark_node) 1513 { 1514 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl)); 1515 address_mode = targetm.addr_space.address_mode (as); 1516 } 1517 x = gen_rtx_SYMBOL_REF (address_mode, name); 1518 } 1519 SYMBOL_REF_WEAK (x) = DECL_WEAK (decl); 1520 SET_SYMBOL_REF_DECL (x, decl); 1521 1522 x = gen_rtx_MEM (DECL_MODE (decl), x); 1523 if (TREE_CODE (decl) != FUNCTION_DECL) 1524 set_mem_attributes (x, decl, 1); 1525 SET_DECL_RTL (decl, x); 1526 1527 /* Optionally set flags or add text to the name to record information 1528 such as that it is a function name. 1529 If the name is changed, the macro ASM_OUTPUT_LABELREF 1530 will have to know how to strip this information. */ 1531 targetm.encode_section_info (decl, DECL_RTL (decl), true); 1532 } 1533 1534 /* Like make_decl_rtl, but inhibit creation of new alias sets when 1535 calling make_decl_rtl. Also, reset DECL_RTL before returning the 1536 rtl. */ 1537 1538 rtx 1539 make_decl_rtl_for_debug (tree decl) 1540 { 1541 unsigned int save_aliasing_flag; 1542 rtx rtl; 1543 1544 if (DECL_RTL_SET_P (decl)) 1545 return DECL_RTL (decl); 1546 1547 /* Kludge alert! Somewhere down the call chain, make_decl_rtl will 1548 call new_alias_set. If running with -fcompare-debug, sometimes 1549 we do not want to create alias sets that will throw the alias 1550 numbers off in the comparison dumps. So... clearing 1551 flag_strict_aliasing will keep new_alias_set() from creating a 1552 new set. */ 1553 save_aliasing_flag = flag_strict_aliasing; 1554 flag_strict_aliasing = 0; 1555 1556 rtl = DECL_RTL (decl); 1557 /* Reset DECL_RTL back, as various parts of the compiler expects 1558 DECL_RTL set meaning it is actually going to be output. */ 1559 SET_DECL_RTL (decl, NULL); 1560 1561 flag_strict_aliasing = save_aliasing_flag; 1562 return rtl; 1563 } 1564 1565 /* Output a string of literal assembler code 1566 for an `asm' keyword used between functions. */ 1567 1568 void 1569 assemble_asm (tree string) 1570 { 1571 const char *p; 1572 app_enable (); 1573 1574 if (TREE_CODE (string) == ADDR_EXPR) 1575 string = TREE_OPERAND (string, 0); 1576 1577 p = TREE_STRING_POINTER (string); 1578 fprintf (asm_out_file, "%s%s\n", p[0] == '\t' ? "" : "\t", p); 1579 } 1580 1581 /* Write the address of the entity given by SYMBOL to SEC. */ 1582 void 1583 assemble_addr_to_section (rtx symbol, section *sec) 1584 { 1585 switch_to_section (sec); 1586 assemble_align (POINTER_SIZE); 1587 assemble_integer (symbol, POINTER_SIZE_UNITS, POINTER_SIZE, 1); 1588 } 1589 1590 /* Return the numbered .ctors.N (if CONSTRUCTOR_P) or .dtors.N (if 1591 not) section for PRIORITY. */ 1592 section * 1593 get_cdtor_priority_section (int priority, bool constructor_p) 1594 { 1595 /* Buffer conservatively large enough for the full range of a 32-bit 1596 int plus the text below. */ 1597 char buf[18]; 1598 1599 /* ??? This only works reliably with the GNU linker. */ 1600 sprintf (buf, "%s.%.5u", 1601 constructor_p ? ".ctors" : ".dtors", 1602 /* Invert the numbering so the linker puts us in the proper 1603 order; constructors are run from right to left, and the 1604 linker sorts in increasing order. */ 1605 MAX_INIT_PRIORITY - priority); 1606 return get_section (buf, SECTION_WRITE, NULL); 1607 } 1608 1609 void 1610 default_named_section_asm_out_destructor (rtx symbol, int priority) 1611 { 1612 section *sec; 1613 1614 if (priority != DEFAULT_INIT_PRIORITY) 1615 sec = get_cdtor_priority_section (priority, 1616 /*constructor_p=*/false); 1617 else 1618 sec = get_section (".dtors", SECTION_WRITE, NULL); 1619 1620 assemble_addr_to_section (symbol, sec); 1621 } 1622 1623 #ifdef DTORS_SECTION_ASM_OP 1624 void 1625 default_dtor_section_asm_out_destructor (rtx symbol, 1626 int priority ATTRIBUTE_UNUSED) 1627 { 1628 assemble_addr_to_section (symbol, dtors_section); 1629 } 1630 #endif 1631 1632 void 1633 default_named_section_asm_out_constructor (rtx symbol, int priority) 1634 { 1635 section *sec; 1636 1637 if (priority != DEFAULT_INIT_PRIORITY) 1638 sec = get_cdtor_priority_section (priority, 1639 /*constructor_p=*/true); 1640 else 1641 sec = get_section (".ctors", SECTION_WRITE, NULL); 1642 1643 assemble_addr_to_section (symbol, sec); 1644 } 1645 1646 #ifdef CTORS_SECTION_ASM_OP 1647 void 1648 default_ctor_section_asm_out_constructor (rtx symbol, 1649 int priority ATTRIBUTE_UNUSED) 1650 { 1651 assemble_addr_to_section (symbol, ctors_section); 1652 } 1653 #endif 1654 1655 /* CONSTANT_POOL_BEFORE_FUNCTION may be defined as an expression with 1656 a nonzero value if the constant pool should be output before the 1657 start of the function, or a zero value if the pool should output 1658 after the end of the function. The default is to put it before the 1659 start. */ 1660 1661 #ifndef CONSTANT_POOL_BEFORE_FUNCTION 1662 #define CONSTANT_POOL_BEFORE_FUNCTION 1 1663 #endif 1664 1665 /* DECL is an object (either VAR_DECL or FUNCTION_DECL) which is going 1666 to be output to assembler. 1667 Set first_global_object_name and weak_global_object_name as appropriate. */ 1668 1669 void 1670 notice_global_symbol (tree decl) 1671 { 1672 const char **t = &first_global_object_name; 1673 1674 if (first_global_object_name 1675 || !TREE_PUBLIC (decl) 1676 || DECL_EXTERNAL (decl) 1677 || !DECL_NAME (decl) 1678 || (VAR_P (decl) && DECL_HARD_REGISTER (decl)) 1679 || (TREE_CODE (decl) != FUNCTION_DECL 1680 && (!VAR_P (decl) 1681 || (DECL_COMMON (decl) 1682 && (DECL_INITIAL (decl) == 0 1683 || DECL_INITIAL (decl) == error_mark_node))))) 1684 return; 1685 1686 /* We win when global object is found, but it is useful to know about weak 1687 symbol as well so we can produce nicer unique names. */ 1688 if (DECL_WEAK (decl) || DECL_ONE_ONLY (decl) || flag_shlib) 1689 t = &weak_global_object_name; 1690 1691 if (!*t) 1692 { 1693 tree id = DECL_ASSEMBLER_NAME (decl); 1694 ultimate_transparent_alias_target (&id); 1695 *t = ggc_strdup (targetm.strip_name_encoding (IDENTIFIER_POINTER (id))); 1696 } 1697 } 1698 1699 /* If not using flag_reorder_blocks_and_partition, decide early whether the 1700 current function goes into the cold section, so that targets can use 1701 current_function_section during RTL expansion. DECL describes the 1702 function. */ 1703 1704 void 1705 decide_function_section (tree decl) 1706 { 1707 first_function_block_is_cold = false; 1708 1709 if (DECL_SECTION_NAME (decl)) 1710 { 1711 struct cgraph_node *node = cgraph_node::get (current_function_decl); 1712 /* Calls to function_section rely on first_function_block_is_cold 1713 being accurate. */ 1714 first_function_block_is_cold = (node 1715 && node->frequency 1716 == NODE_FREQUENCY_UNLIKELY_EXECUTED); 1717 } 1718 1719 in_cold_section_p = first_function_block_is_cold; 1720 } 1721 1722 /* Get the function's name, as described by its RTL. This may be 1723 different from the DECL_NAME name used in the source file. */ 1724 const char * 1725 get_fnname_from_decl (tree decl) 1726 { 1727 rtx x = DECL_RTL (decl); 1728 gcc_assert (MEM_P (x)); 1729 x = XEXP (x, 0); 1730 gcc_assert (GET_CODE (x) == SYMBOL_REF); 1731 return XSTR (x, 0); 1732 } 1733 1734 /* Output assembler code for the constant pool of a function and associated 1735 with defining the name of the function. DECL describes the function. 1736 NAME is the function's name. For the constant pool, we use the current 1737 constant pool data. */ 1738 1739 void 1740 assemble_start_function (tree decl, const char *fnname) 1741 { 1742 int align; 1743 char tmp_label[100]; 1744 bool hot_label_written = false; 1745 1746 if (crtl->has_bb_partition) 1747 { 1748 ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LHOTB", const_labelno); 1749 crtl->subsections.hot_section_label = ggc_strdup (tmp_label); 1750 ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LCOLDB", const_labelno); 1751 crtl->subsections.cold_section_label = ggc_strdup (tmp_label); 1752 ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LHOTE", const_labelno); 1753 crtl->subsections.hot_section_end_label = ggc_strdup (tmp_label); 1754 ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LCOLDE", const_labelno); 1755 crtl->subsections.cold_section_end_label = ggc_strdup (tmp_label); 1756 const_labelno++; 1757 cold_function_name = NULL_TREE; 1758 } 1759 else 1760 { 1761 crtl->subsections.hot_section_label = NULL; 1762 crtl->subsections.cold_section_label = NULL; 1763 crtl->subsections.hot_section_end_label = NULL; 1764 crtl->subsections.cold_section_end_label = NULL; 1765 } 1766 1767 /* The following code does not need preprocessing in the assembler. */ 1768 1769 app_disable (); 1770 1771 if (CONSTANT_POOL_BEFORE_FUNCTION) 1772 output_constant_pool (fnname, decl); 1773 1774 align = symtab_node::get (decl)->definition_alignment (); 1775 1776 /* Make sure the not and cold text (code) sections are properly 1777 aligned. This is necessary here in the case where the function 1778 has both hot and cold sections, because we don't want to re-set 1779 the alignment when the section switch happens mid-function. */ 1780 1781 if (crtl->has_bb_partition) 1782 { 1783 first_function_block_is_cold = false; 1784 1785 switch_to_section (unlikely_text_section ()); 1786 assemble_align (align); 1787 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.cold_section_label); 1788 1789 /* When the function starts with a cold section, we need to explicitly 1790 align the hot section and write out the hot section label. 1791 But if the current function is a thunk, we do not have a CFG. */ 1792 if (!cfun->is_thunk 1793 && BB_PARTITION (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) == BB_COLD_PARTITION) 1794 { 1795 switch_to_section (text_section); 1796 assemble_align (align); 1797 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.hot_section_label); 1798 hot_label_written = true; 1799 first_function_block_is_cold = true; 1800 } 1801 in_cold_section_p = first_function_block_is_cold; 1802 } 1803 1804 1805 /* Switch to the correct text section for the start of the function. */ 1806 1807 switch_to_section (function_section (decl)); 1808 if (crtl->has_bb_partition && !hot_label_written) 1809 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.hot_section_label); 1810 1811 /* Tell assembler to move to target machine's alignment for functions. */ 1812 align = floor_log2 (align / BITS_PER_UNIT); 1813 if (align > 0) 1814 { 1815 ASM_OUTPUT_ALIGN (asm_out_file, align); 1816 } 1817 1818 /* Handle a user-specified function alignment. 1819 Note that we still need to align to DECL_ALIGN, as above, 1820 because ASM_OUTPUT_MAX_SKIP_ALIGN might not do any alignment at all. */ 1821 if (! DECL_USER_ALIGN (decl) 1822 && align_functions.levels[0].log > align 1823 && optimize_function_for_speed_p (cfun)) 1824 { 1825 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN 1826 int align_log = align_functions.levels[0].log; 1827 #endif 1828 int max_skip = align_functions.levels[0].maxskip; 1829 if (flag_limit_function_alignment && crtl->max_insn_address > 0 1830 && max_skip >= crtl->max_insn_address) 1831 max_skip = crtl->max_insn_address - 1; 1832 1833 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN 1834 ASM_OUTPUT_MAX_SKIP_ALIGN (asm_out_file, align_log, max_skip); 1835 if (max_skip == align_functions.levels[0].maxskip) 1836 ASM_OUTPUT_MAX_SKIP_ALIGN (asm_out_file, 1837 align_functions.levels[1].log, 1838 align_functions.levels[1].maxskip); 1839 #else 1840 ASM_OUTPUT_ALIGN (asm_out_file, align_functions.levels[0].log); 1841 #endif 1842 } 1843 1844 #ifdef ASM_OUTPUT_FUNCTION_PREFIX 1845 ASM_OUTPUT_FUNCTION_PREFIX (asm_out_file, fnname); 1846 #endif 1847 1848 if (!DECL_IGNORED_P (decl)) 1849 (*debug_hooks->begin_function) (decl); 1850 1851 /* Make function name accessible from other files, if appropriate. */ 1852 1853 if (TREE_PUBLIC (decl)) 1854 { 1855 notice_global_symbol (decl); 1856 1857 globalize_decl (decl); 1858 1859 maybe_assemble_visibility (decl); 1860 } 1861 1862 if (DECL_PRESERVE_P (decl)) 1863 targetm.asm_out.mark_decl_preserved (fnname); 1864 1865 unsigned HOST_WIDE_INT patch_area_size = function_entry_patch_area_size; 1866 unsigned HOST_WIDE_INT patch_area_entry = function_entry_patch_area_start; 1867 1868 tree patchable_function_entry_attr 1869 = lookup_attribute ("patchable_function_entry", DECL_ATTRIBUTES (decl)); 1870 if (patchable_function_entry_attr) 1871 { 1872 tree pp_val = TREE_VALUE (patchable_function_entry_attr); 1873 tree patchable_function_entry_value1 = TREE_VALUE (pp_val); 1874 1875 patch_area_size = tree_to_uhwi (patchable_function_entry_value1); 1876 patch_area_entry = 0; 1877 if (TREE_CHAIN (pp_val) != NULL_TREE) 1878 { 1879 tree patchable_function_entry_value2 1880 = TREE_VALUE (TREE_CHAIN (pp_val)); 1881 patch_area_entry = tree_to_uhwi (patchable_function_entry_value2); 1882 } 1883 } 1884 1885 if (patch_area_entry > patch_area_size) 1886 { 1887 if (patch_area_size > 0) 1888 warning (OPT_Wattributes, 1889 "patchable function entry %wu exceeds size %wu", 1890 patch_area_entry, patch_area_size); 1891 patch_area_entry = 0; 1892 } 1893 1894 /* Emit the patching area before the entry label, if any. */ 1895 if (patch_area_entry > 0) 1896 targetm.asm_out.print_patchable_function_entry (asm_out_file, 1897 patch_area_entry, true); 1898 1899 /* Do any machine/system dependent processing of the function name. */ 1900 #ifdef ASM_DECLARE_FUNCTION_NAME 1901 ASM_DECLARE_FUNCTION_NAME (asm_out_file, fnname, current_function_decl); 1902 #else 1903 /* Standard thing is just output label for the function. */ 1904 ASM_OUTPUT_FUNCTION_LABEL (asm_out_file, fnname, current_function_decl); 1905 #endif /* ASM_DECLARE_FUNCTION_NAME */ 1906 1907 /* And the area after the label. Record it if we haven't done so yet. */ 1908 if (patch_area_size > patch_area_entry) 1909 targetm.asm_out.print_patchable_function_entry (asm_out_file, 1910 patch_area_size 1911 - patch_area_entry, 1912 patch_area_entry == 0); 1913 1914 if (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (decl))) 1915 saw_no_split_stack = true; 1916 } 1917 1918 /* Output assembler code associated with defining the size of the 1919 function. DECL describes the function. NAME is the function's name. */ 1920 1921 void 1922 assemble_end_function (tree decl, const char *fnname ATTRIBUTE_UNUSED) 1923 { 1924 #ifdef ASM_DECLARE_FUNCTION_SIZE 1925 /* We could have switched section in the middle of the function. */ 1926 if (crtl->has_bb_partition) 1927 switch_to_section (function_section (decl)); 1928 ASM_DECLARE_FUNCTION_SIZE (asm_out_file, fnname, decl); 1929 #endif 1930 if (! CONSTANT_POOL_BEFORE_FUNCTION) 1931 { 1932 output_constant_pool (fnname, decl); 1933 switch_to_section (function_section (decl)); /* need to switch back */ 1934 } 1935 /* Output labels for end of hot/cold text sections (to be used by 1936 debug info.) */ 1937 if (crtl->has_bb_partition) 1938 { 1939 section *save_text_section; 1940 1941 save_text_section = in_section; 1942 switch_to_section (unlikely_text_section ()); 1943 #ifdef ASM_DECLARE_COLD_FUNCTION_SIZE 1944 if (cold_function_name != NULL_TREE) 1945 ASM_DECLARE_COLD_FUNCTION_SIZE (asm_out_file, 1946 IDENTIFIER_POINTER (cold_function_name), 1947 decl); 1948 #endif 1949 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.cold_section_end_label); 1950 if (first_function_block_is_cold) 1951 switch_to_section (text_section); 1952 else 1953 switch_to_section (function_section (decl)); 1954 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.hot_section_end_label); 1955 switch_to_section (save_text_section); 1956 } 1957 } 1958 1959 /* Assemble code to leave SIZE bytes of zeros. */ 1960 1961 void 1962 assemble_zeros (unsigned HOST_WIDE_INT size) 1963 { 1964 /* Do no output if -fsyntax-only. */ 1965 if (flag_syntax_only) 1966 return; 1967 1968 #ifdef ASM_NO_SKIP_IN_TEXT 1969 /* The `space' pseudo in the text section outputs nop insns rather than 0s, 1970 so we must output 0s explicitly in the text section. */ 1971 if (ASM_NO_SKIP_IN_TEXT && (in_section->common.flags & SECTION_CODE) != 0) 1972 { 1973 unsigned HOST_WIDE_INT i; 1974 for (i = 0; i < size; i++) 1975 assemble_integer (const0_rtx, 1, BITS_PER_UNIT, 1); 1976 } 1977 else 1978 #endif 1979 if (size > 0) 1980 ASM_OUTPUT_SKIP (asm_out_file, size); 1981 } 1982 1983 /* Assemble an alignment pseudo op for an ALIGN-bit boundary. */ 1984 1985 void 1986 assemble_align (unsigned int align) 1987 { 1988 if (align > BITS_PER_UNIT) 1989 { 1990 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT)); 1991 } 1992 } 1993 1994 /* Assemble a string constant with the specified C string as contents. */ 1995 1996 void 1997 assemble_string (const char *p, int size) 1998 { 1999 int pos = 0; 2000 int maximum = 2000; 2001 2002 /* If the string is very long, split it up. */ 2003 2004 while (pos < size) 2005 { 2006 int thissize = size - pos; 2007 if (thissize > maximum) 2008 thissize = maximum; 2009 2010 ASM_OUTPUT_ASCII (asm_out_file, p, thissize); 2011 2012 pos += thissize; 2013 p += thissize; 2014 } 2015 } 2016 2017 2018 /* A noswitch_section_callback for lcomm_section. */ 2019 2020 static bool 2021 emit_local (tree decl ATTRIBUTE_UNUSED, 2022 const char *name ATTRIBUTE_UNUSED, 2023 unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED, 2024 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED) 2025 { 2026 #if defined ASM_OUTPUT_ALIGNED_DECL_LOCAL 2027 unsigned int align = symtab_node::get (decl)->definition_alignment (); 2028 ASM_OUTPUT_ALIGNED_DECL_LOCAL (asm_out_file, decl, name, 2029 size, align); 2030 return true; 2031 #elif defined ASM_OUTPUT_ALIGNED_LOCAL 2032 unsigned int align = symtab_node::get (decl)->definition_alignment (); 2033 ASM_OUTPUT_ALIGNED_LOCAL (asm_out_file, name, size, align); 2034 return true; 2035 #else 2036 ASM_OUTPUT_LOCAL (asm_out_file, name, size, rounded); 2037 return false; 2038 #endif 2039 } 2040 2041 /* A noswitch_section_callback for bss_noswitch_section. */ 2042 2043 #if defined ASM_OUTPUT_ALIGNED_BSS 2044 static bool 2045 emit_bss (tree decl ATTRIBUTE_UNUSED, 2046 const char *name ATTRIBUTE_UNUSED, 2047 unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED, 2048 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED) 2049 { 2050 ASM_OUTPUT_ALIGNED_BSS (asm_out_file, decl, name, size, 2051 get_variable_align (decl)); 2052 return true; 2053 } 2054 #endif 2055 2056 /* A noswitch_section_callback for comm_section. */ 2057 2058 static bool 2059 emit_common (tree decl ATTRIBUTE_UNUSED, 2060 const char *name ATTRIBUTE_UNUSED, 2061 unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED, 2062 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED) 2063 { 2064 #if defined ASM_OUTPUT_ALIGNED_DECL_COMMON 2065 ASM_OUTPUT_ALIGNED_DECL_COMMON (asm_out_file, decl, name, 2066 size, get_variable_align (decl)); 2067 return true; 2068 #elif defined ASM_OUTPUT_ALIGNED_COMMON 2069 ASM_OUTPUT_ALIGNED_COMMON (asm_out_file, name, size, 2070 get_variable_align (decl)); 2071 return true; 2072 #else 2073 ASM_OUTPUT_COMMON (asm_out_file, name, size, rounded); 2074 return false; 2075 #endif 2076 } 2077 2078 /* A noswitch_section_callback for tls_comm_section. */ 2079 2080 static bool 2081 emit_tls_common (tree decl ATTRIBUTE_UNUSED, 2082 const char *name ATTRIBUTE_UNUSED, 2083 unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED, 2084 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED) 2085 { 2086 #ifdef ASM_OUTPUT_TLS_COMMON 2087 ASM_OUTPUT_TLS_COMMON (asm_out_file, decl, name, size); 2088 return true; 2089 #else 2090 sorry ("thread-local COMMON data not implemented"); 2091 return true; 2092 #endif 2093 } 2094 2095 /* Assemble DECL given that it belongs in SECTION_NOSWITCH section SECT. 2096 NAME is the name of DECL's SYMBOL_REF. */ 2097 2098 static void 2099 assemble_noswitch_variable (tree decl, const char *name, section *sect, 2100 unsigned int align) 2101 { 2102 unsigned HOST_WIDE_INT size, rounded; 2103 2104 size = tree_to_uhwi (DECL_SIZE_UNIT (decl)); 2105 rounded = size; 2106 2107 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_protect_global (decl)) 2108 size += asan_red_zone_size (size); 2109 2110 /* Don't allocate zero bytes of common, 2111 since that means "undefined external" in the linker. */ 2112 if (size == 0) 2113 rounded = 1; 2114 2115 /* Round size up to multiple of BIGGEST_ALIGNMENT bits 2116 so that each uninitialized object starts on such a boundary. */ 2117 rounded += (BIGGEST_ALIGNMENT / BITS_PER_UNIT) - 1; 2118 rounded = (rounded / (BIGGEST_ALIGNMENT / BITS_PER_UNIT) 2119 * (BIGGEST_ALIGNMENT / BITS_PER_UNIT)); 2120 2121 if (!sect->noswitch.callback (decl, name, size, rounded) 2122 && (unsigned HOST_WIDE_INT) (align / BITS_PER_UNIT) > rounded) 2123 error ("requested alignment for %q+D is greater than " 2124 "implemented alignment of %wu", decl, rounded); 2125 } 2126 2127 /* A subroutine of assemble_variable. Output the label and contents of 2128 DECL, whose address is a SYMBOL_REF with name NAME. DONT_OUTPUT_DATA 2129 is as for assemble_variable. */ 2130 2131 static void 2132 assemble_variable_contents (tree decl, const char *name, 2133 bool dont_output_data, bool merge_strings) 2134 { 2135 /* Do any machine/system dependent processing of the object. */ 2136 #ifdef ASM_DECLARE_OBJECT_NAME 2137 last_assemble_variable_decl = decl; 2138 ASM_DECLARE_OBJECT_NAME (asm_out_file, name, decl); 2139 #else 2140 /* Standard thing is just output label for the object. */ 2141 ASM_OUTPUT_LABEL (asm_out_file, name); 2142 #endif /* ASM_DECLARE_OBJECT_NAME */ 2143 2144 if (!dont_output_data) 2145 { 2146 /* Caller is supposed to use varpool_get_constructor when it wants 2147 to output the body. */ 2148 gcc_assert (!in_lto_p || DECL_INITIAL (decl) != error_mark_node); 2149 if (DECL_INITIAL (decl) 2150 && DECL_INITIAL (decl) != error_mark_node 2151 && !initializer_zerop (DECL_INITIAL (decl))) 2152 /* Output the actual data. */ 2153 output_constant (DECL_INITIAL (decl), 2154 tree_to_uhwi (DECL_SIZE_UNIT (decl)), 2155 get_variable_align (decl), 2156 false, merge_strings); 2157 else 2158 /* Leave space for it. */ 2159 assemble_zeros (tree_to_uhwi (DECL_SIZE_UNIT (decl))); 2160 targetm.asm_out.decl_end (); 2161 } 2162 } 2163 2164 /* Write out assembly for the variable DECL, which is not defined in 2165 the current translation unit. */ 2166 void 2167 assemble_undefined_decl (tree decl) 2168 { 2169 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0); 2170 targetm.asm_out.assemble_undefined_decl (asm_out_file, name, decl); 2171 } 2172 2173 /* Assemble everything that is needed for a variable or function declaration. 2174 Not used for automatic variables, and not used for function definitions. 2175 Should not be called for variables of incomplete structure type. 2176 2177 TOP_LEVEL is nonzero if this variable has file scope. 2178 AT_END is nonzero if this is the special handling, at end of compilation, 2179 to define things that have had only tentative definitions. 2180 DONT_OUTPUT_DATA if nonzero means don't actually output the 2181 initial value (that will be done by the caller). */ 2182 2183 void 2184 assemble_variable (tree decl, int top_level ATTRIBUTE_UNUSED, 2185 int at_end ATTRIBUTE_UNUSED, int dont_output_data) 2186 { 2187 const char *name; 2188 rtx decl_rtl, symbol; 2189 section *sect; 2190 unsigned int align; 2191 bool asan_protected = false; 2192 2193 /* This function is supposed to handle VARIABLES. Ensure we have one. */ 2194 gcc_assert (VAR_P (decl)); 2195 2196 /* Emulated TLS had better not get this far. */ 2197 gcc_checking_assert (targetm.have_tls || !DECL_THREAD_LOCAL_P (decl)); 2198 2199 last_assemble_variable_decl = 0; 2200 2201 /* Normally no need to say anything here for external references, 2202 since assemble_external is called by the language-specific code 2203 when a declaration is first seen. */ 2204 2205 if (DECL_EXTERNAL (decl)) 2206 return; 2207 2208 /* Do nothing for global register variables. */ 2209 if (DECL_RTL_SET_P (decl) && REG_P (DECL_RTL (decl))) 2210 { 2211 TREE_ASM_WRITTEN (decl) = 1; 2212 return; 2213 } 2214 2215 /* If type was incomplete when the variable was declared, 2216 see if it is complete now. */ 2217 2218 if (DECL_SIZE (decl) == 0) 2219 layout_decl (decl, 0); 2220 2221 /* Still incomplete => don't allocate it; treat the tentative defn 2222 (which is what it must have been) as an `extern' reference. */ 2223 2224 if (!dont_output_data && DECL_SIZE (decl) == 0) 2225 { 2226 error ("storage size of %q+D isn%'t known", decl); 2227 TREE_ASM_WRITTEN (decl) = 1; 2228 return; 2229 } 2230 2231 /* The first declaration of a variable that comes through this function 2232 decides whether it is global (in C, has external linkage) 2233 or local (in C, has internal linkage). So do nothing more 2234 if this function has already run. */ 2235 2236 if (TREE_ASM_WRITTEN (decl)) 2237 return; 2238 2239 /* Make sure targetm.encode_section_info is invoked before we set 2240 ASM_WRITTEN. */ 2241 decl_rtl = DECL_RTL (decl); 2242 2243 TREE_ASM_WRITTEN (decl) = 1; 2244 2245 /* Do no output if -fsyntax-only. */ 2246 if (flag_syntax_only) 2247 return; 2248 2249 if (! dont_output_data 2250 && ! valid_constant_size_p (DECL_SIZE_UNIT (decl))) 2251 { 2252 error ("size of variable %q+D is too large", decl); 2253 return; 2254 } 2255 2256 gcc_assert (MEM_P (decl_rtl)); 2257 gcc_assert (GET_CODE (XEXP (decl_rtl, 0)) == SYMBOL_REF); 2258 symbol = XEXP (decl_rtl, 0); 2259 2260 /* If this symbol belongs to the tree constant pool, output the constant 2261 if it hasn't already been written. */ 2262 if (TREE_CONSTANT_POOL_ADDRESS_P (symbol)) 2263 { 2264 tree decl = SYMBOL_REF_DECL (symbol); 2265 if (!TREE_ASM_WRITTEN (DECL_INITIAL (decl))) 2266 output_constant_def_contents (symbol); 2267 return; 2268 } 2269 2270 app_disable (); 2271 2272 name = XSTR (symbol, 0); 2273 if (TREE_PUBLIC (decl) && DECL_NAME (decl)) 2274 notice_global_symbol (decl); 2275 2276 /* Compute the alignment of this data. */ 2277 2278 align_variable (decl, dont_output_data); 2279 2280 if ((flag_sanitize & SANITIZE_ADDRESS) 2281 && asan_protect_global (decl)) 2282 { 2283 asan_protected = true; 2284 SET_DECL_ALIGN (decl, MAX (DECL_ALIGN (decl), 2285 ASAN_RED_ZONE_SIZE * BITS_PER_UNIT)); 2286 } 2287 2288 set_mem_align (decl_rtl, DECL_ALIGN (decl)); 2289 2290 align = get_variable_align (decl); 2291 2292 if (TREE_PUBLIC (decl)) 2293 maybe_assemble_visibility (decl); 2294 2295 if (DECL_PRESERVE_P (decl)) 2296 targetm.asm_out.mark_decl_preserved (name); 2297 2298 /* First make the assembler name(s) global if appropriate. */ 2299 sect = get_variable_section (decl, false); 2300 if (TREE_PUBLIC (decl) 2301 && (sect->common.flags & SECTION_COMMON) == 0) 2302 globalize_decl (decl); 2303 2304 /* Output any data that we will need to use the address of. */ 2305 if (DECL_INITIAL (decl) && DECL_INITIAL (decl) != error_mark_node) 2306 output_addressed_constants (DECL_INITIAL (decl)); 2307 2308 /* dbxout.c needs to know this. */ 2309 if (sect && (sect->common.flags & SECTION_CODE) != 0) 2310 DECL_IN_TEXT_SECTION (decl) = 1; 2311 2312 /* If the decl is part of an object_block, make sure that the decl 2313 has been positioned within its block, but do not write out its 2314 definition yet. output_object_blocks will do that later. */ 2315 if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol) && SYMBOL_REF_BLOCK (symbol)) 2316 { 2317 gcc_assert (!dont_output_data); 2318 place_block_symbol (symbol); 2319 } 2320 else if (SECTION_STYLE (sect) == SECTION_NOSWITCH) 2321 assemble_noswitch_variable (decl, name, sect, align); 2322 else 2323 { 2324 /* Special-case handling of vtv comdat sections. */ 2325 if (SECTION_STYLE (sect) == SECTION_NAMED 2326 && (strcmp (sect->named.name, ".vtable_map_vars") == 0)) 2327 handle_vtv_comdat_section (sect, decl); 2328 else 2329 switch_to_section (sect); 2330 if (align > BITS_PER_UNIT) 2331 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT)); 2332 assemble_variable_contents (decl, name, dont_output_data, 2333 (sect->common.flags & SECTION_MERGE) 2334 && (sect->common.flags & SECTION_STRINGS)); 2335 if (asan_protected) 2336 { 2337 unsigned HOST_WIDE_INT int size 2338 = tree_to_uhwi (DECL_SIZE_UNIT (decl)); 2339 assemble_zeros (asan_red_zone_size (size)); 2340 } 2341 } 2342 } 2343 2344 2345 /* Given a function declaration (FN_DECL), this function assembles the 2346 function into the .preinit_array section. */ 2347 2348 void 2349 assemble_vtv_preinit_initializer (tree fn_decl) 2350 { 2351 section *sect; 2352 unsigned flags = SECTION_WRITE; 2353 rtx symbol = XEXP (DECL_RTL (fn_decl), 0); 2354 2355 flags |= SECTION_NOTYPE; 2356 sect = get_section (".preinit_array", flags, fn_decl); 2357 switch_to_section (sect); 2358 assemble_addr_to_section (symbol, sect); 2359 } 2360 2361 /* Return 1 if type TYPE contains any pointers. */ 2362 2363 static int 2364 contains_pointers_p (tree type) 2365 { 2366 switch (TREE_CODE (type)) 2367 { 2368 case POINTER_TYPE: 2369 case REFERENCE_TYPE: 2370 /* I'm not sure whether OFFSET_TYPE needs this treatment, 2371 so I'll play safe and return 1. */ 2372 case OFFSET_TYPE: 2373 return 1; 2374 2375 case RECORD_TYPE: 2376 case UNION_TYPE: 2377 case QUAL_UNION_TYPE: 2378 { 2379 tree fields; 2380 /* For a type that has fields, see if the fields have pointers. */ 2381 for (fields = TYPE_FIELDS (type); fields; fields = DECL_CHAIN (fields)) 2382 if (TREE_CODE (fields) == FIELD_DECL 2383 && contains_pointers_p (TREE_TYPE (fields))) 2384 return 1; 2385 return 0; 2386 } 2387 2388 case ARRAY_TYPE: 2389 /* An array type contains pointers if its element type does. */ 2390 return contains_pointers_p (TREE_TYPE (type)); 2391 2392 default: 2393 return 0; 2394 } 2395 } 2396 2397 /* We delay assemble_external processing until 2398 the compilation unit is finalized. This is the best we can do for 2399 right now (i.e. stage 3 of GCC 4.0) - the right thing is to delay 2400 it all the way to final. See PR 17982 for further discussion. */ 2401 static GTY(()) tree pending_assemble_externals; 2402 2403 #ifdef ASM_OUTPUT_EXTERNAL 2404 /* Some targets delay some output to final using TARGET_ASM_FILE_END. 2405 As a result, assemble_external can be called after the list of externals 2406 is processed and the pointer set destroyed. */ 2407 static bool pending_assemble_externals_processed; 2408 2409 /* Avoid O(external_decls**2) lookups in the pending_assemble_externals 2410 TREE_LIST in assemble_external. */ 2411 static hash_set<tree> *pending_assemble_externals_set; 2412 2413 /* True if DECL is a function decl for which no out-of-line copy exists. 2414 It is assumed that DECL's assembler name has been set. */ 2415 2416 static bool 2417 incorporeal_function_p (tree decl) 2418 { 2419 if (TREE_CODE (decl) == FUNCTION_DECL && fndecl_built_in_p (decl)) 2420 { 2421 const char *name; 2422 2423 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL 2424 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl))) 2425 return true; 2426 2427 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)); 2428 /* Atomic or sync builtins which have survived this far will be 2429 resolved externally and therefore are not incorporeal. */ 2430 if (strncmp (name, "__builtin_", 10) == 0) 2431 return true; 2432 } 2433 return false; 2434 } 2435 2436 /* Actually do the tests to determine if this is necessary, and invoke 2437 ASM_OUTPUT_EXTERNAL. */ 2438 static void 2439 assemble_external_real (tree decl) 2440 { 2441 rtx rtl = DECL_RTL (decl); 2442 2443 if (MEM_P (rtl) && GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF 2444 && !SYMBOL_REF_USED (XEXP (rtl, 0)) 2445 && !incorporeal_function_p (decl)) 2446 { 2447 /* Some systems do require some output. */ 2448 SYMBOL_REF_USED (XEXP (rtl, 0)) = 1; 2449 ASM_OUTPUT_EXTERNAL (asm_out_file, decl, XSTR (XEXP (rtl, 0), 0)); 2450 } 2451 } 2452 #endif 2453 2454 void 2455 process_pending_assemble_externals (void) 2456 { 2457 #ifdef ASM_OUTPUT_EXTERNAL 2458 tree list; 2459 for (list = pending_assemble_externals; list; list = TREE_CHAIN (list)) 2460 assemble_external_real (TREE_VALUE (list)); 2461 2462 pending_assemble_externals = 0; 2463 pending_assemble_externals_processed = true; 2464 delete pending_assemble_externals_set; 2465 #endif 2466 } 2467 2468 /* This TREE_LIST contains any weak symbol declarations waiting 2469 to be emitted. */ 2470 static GTY(()) tree weak_decls; 2471 2472 /* Output something to declare an external symbol to the assembler, 2473 and qualifiers such as weakness. (Most assemblers don't need 2474 extern declaration, so we normally output nothing.) Do nothing if 2475 DECL is not external. */ 2476 2477 void 2478 assemble_external (tree decl ATTRIBUTE_UNUSED) 2479 { 2480 /* Make sure that the ASM_OUT_FILE is open. 2481 If it's not, we should not be calling this function. */ 2482 gcc_assert (asm_out_file); 2483 2484 /* In a perfect world, the following condition would be true. 2485 Sadly, the Go front end emit assembly *from the front end*, 2486 bypassing the call graph. See PR52739. Fix before GCC 4.8. */ 2487 #if 0 2488 /* This function should only be called if we are expanding, or have 2489 expanded, to RTL. 2490 Ideally, only final.c would be calling this function, but it is 2491 not clear whether that would break things somehow. See PR 17982 2492 for further discussion. */ 2493 gcc_assert (state == EXPANSION 2494 || state == FINISHED); 2495 #endif 2496 2497 if (!DECL_P (decl) || !DECL_EXTERNAL (decl) || !TREE_PUBLIC (decl)) 2498 return; 2499 2500 /* We want to output annotation for weak and external symbols at 2501 very last to check if they are references or not. */ 2502 2503 if (TARGET_SUPPORTS_WEAK 2504 && DECL_WEAK (decl) 2505 /* TREE_STATIC is a weird and abused creature which is not 2506 generally the right test for whether an entity has been 2507 locally emitted, inlined or otherwise not-really-extern, but 2508 for declarations that can be weak, it happens to be 2509 match. */ 2510 && !TREE_STATIC (decl) 2511 && lookup_attribute ("weak", DECL_ATTRIBUTES (decl)) 2512 && value_member (decl, weak_decls) == NULL_TREE) 2513 weak_decls = tree_cons (NULL, decl, weak_decls); 2514 2515 #ifdef ASM_OUTPUT_EXTERNAL 2516 if (pending_assemble_externals_processed) 2517 { 2518 assemble_external_real (decl); 2519 return; 2520 } 2521 2522 if (! pending_assemble_externals_set->add (decl)) 2523 pending_assemble_externals = tree_cons (NULL, decl, 2524 pending_assemble_externals); 2525 #endif 2526 } 2527 2528 /* Similar, for calling a library function FUN. */ 2529 2530 void 2531 assemble_external_libcall (rtx fun) 2532 { 2533 /* Declare library function name external when first used, if nec. */ 2534 if (! SYMBOL_REF_USED (fun)) 2535 { 2536 SYMBOL_REF_USED (fun) = 1; 2537 targetm.asm_out.external_libcall (fun); 2538 } 2539 } 2540 2541 /* Assemble a label named NAME. */ 2542 2543 void 2544 assemble_label (FILE *file, const char *name) 2545 { 2546 ASM_OUTPUT_LABEL (file, name); 2547 } 2548 2549 /* Set the symbol_referenced flag for ID. */ 2550 void 2551 mark_referenced (tree id) 2552 { 2553 TREE_SYMBOL_REFERENCED (id) = 1; 2554 } 2555 2556 /* Set the symbol_referenced flag for DECL and notify callgraph. */ 2557 void 2558 mark_decl_referenced (tree decl) 2559 { 2560 if (TREE_CODE (decl) == FUNCTION_DECL) 2561 { 2562 /* Extern inline functions don't become needed when referenced. 2563 If we know a method will be emitted in other TU and no new 2564 functions can be marked reachable, just use the external 2565 definition. */ 2566 struct cgraph_node *node = cgraph_node::get_create (decl); 2567 if (!DECL_EXTERNAL (decl) 2568 && !node->definition) 2569 node->mark_force_output (); 2570 } 2571 else if (VAR_P (decl)) 2572 { 2573 varpool_node *node = varpool_node::get_create (decl); 2574 /* C++ frontend use mark_decl_references to force COMDAT variables 2575 to be output that might appear dead otherwise. */ 2576 node->force_output = true; 2577 } 2578 /* else do nothing - we can get various sorts of CST nodes here, 2579 which do not need to be marked. */ 2580 } 2581 2582 2583 /* Output to FILE (an assembly file) a reference to NAME. If NAME 2584 starts with a *, the rest of NAME is output verbatim. Otherwise 2585 NAME is transformed in a target-specific way (usually by the 2586 addition of an underscore). */ 2587 2588 void 2589 assemble_name_raw (FILE *file, const char *name) 2590 { 2591 if (name[0] == '*') 2592 fputs (&name[1], file); 2593 else 2594 ASM_OUTPUT_LABELREF (file, name); 2595 } 2596 2597 /* Return NAME that should actually be emitted, looking through 2598 transparent aliases. If NAME refers to an entity that is also 2599 represented as a tree (like a function or variable), mark the entity 2600 as referenced. */ 2601 const char * 2602 assemble_name_resolve (const char *name) 2603 { 2604 const char *real_name = targetm.strip_name_encoding (name); 2605 tree id = maybe_get_identifier (real_name); 2606 2607 if (id) 2608 { 2609 tree id_orig = id; 2610 2611 mark_referenced (id); 2612 ultimate_transparent_alias_target (&id); 2613 if (id != id_orig) 2614 name = IDENTIFIER_POINTER (id); 2615 gcc_assert (! TREE_CHAIN (id)); 2616 } 2617 2618 return name; 2619 } 2620 2621 /* Like assemble_name_raw, but should be used when NAME might refer to 2622 an entity that is also represented as a tree (like a function or 2623 variable). If NAME does refer to such an entity, that entity will 2624 be marked as referenced. */ 2625 2626 void 2627 assemble_name (FILE *file, const char *name) 2628 { 2629 assemble_name_raw (file, assemble_name_resolve (name)); 2630 } 2631 2632 /* Allocate SIZE bytes writable static space with a gensym name 2633 and return an RTX to refer to its address. */ 2634 2635 rtx 2636 assemble_static_space (unsigned HOST_WIDE_INT size) 2637 { 2638 char name[17]; 2639 const char *namestring; 2640 rtx x; 2641 2642 ASM_GENERATE_INTERNAL_LABEL (name, "LF", const_labelno); 2643 ++const_labelno; 2644 namestring = ggc_strdup (name); 2645 2646 x = gen_rtx_SYMBOL_REF (Pmode, namestring); 2647 SYMBOL_REF_FLAGS (x) = SYMBOL_FLAG_LOCAL; 2648 2649 #ifdef ASM_OUTPUT_ALIGNED_DECL_LOCAL 2650 ASM_OUTPUT_ALIGNED_DECL_LOCAL (asm_out_file, NULL_TREE, name, size, 2651 BIGGEST_ALIGNMENT); 2652 #else 2653 #ifdef ASM_OUTPUT_ALIGNED_LOCAL 2654 ASM_OUTPUT_ALIGNED_LOCAL (asm_out_file, name, size, BIGGEST_ALIGNMENT); 2655 #else 2656 { 2657 /* Round size up to multiple of BIGGEST_ALIGNMENT bits 2658 so that each uninitialized object starts on such a boundary. */ 2659 /* Variable `rounded' might or might not be used in ASM_OUTPUT_LOCAL. */ 2660 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED 2661 = ((size + (BIGGEST_ALIGNMENT / BITS_PER_UNIT) - 1) 2662 / (BIGGEST_ALIGNMENT / BITS_PER_UNIT) 2663 * (BIGGEST_ALIGNMENT / BITS_PER_UNIT)); 2664 ASM_OUTPUT_LOCAL (asm_out_file, name, size, rounded); 2665 } 2666 #endif 2667 #endif 2668 return x; 2669 } 2670 2671 /* Assemble the static constant template for function entry trampolines. 2672 This is done at most once per compilation. 2673 Returns an RTX for the address of the template. */ 2674 2675 static GTY(()) rtx initial_trampoline; 2676 2677 rtx 2678 assemble_trampoline_template (void) 2679 { 2680 char label[256]; 2681 const char *name; 2682 int align; 2683 rtx symbol; 2684 2685 gcc_assert (targetm.asm_out.trampoline_template != NULL); 2686 2687 if (initial_trampoline) 2688 return initial_trampoline; 2689 2690 /* By default, put trampoline templates in read-only data section. */ 2691 2692 #ifdef TRAMPOLINE_SECTION 2693 switch_to_section (TRAMPOLINE_SECTION); 2694 #else 2695 switch_to_section (readonly_data_section); 2696 #endif 2697 2698 /* Write the assembler code to define one. */ 2699 align = floor_log2 (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT); 2700 if (align > 0) 2701 ASM_OUTPUT_ALIGN (asm_out_file, align); 2702 2703 targetm.asm_out.internal_label (asm_out_file, "LTRAMP", 0); 2704 targetm.asm_out.trampoline_template (asm_out_file); 2705 2706 /* Record the rtl to refer to it. */ 2707 ASM_GENERATE_INTERNAL_LABEL (label, "LTRAMP", 0); 2708 name = ggc_strdup (label); 2709 symbol = gen_rtx_SYMBOL_REF (Pmode, name); 2710 SYMBOL_REF_FLAGS (symbol) = SYMBOL_FLAG_LOCAL; 2711 2712 initial_trampoline = gen_const_mem (BLKmode, symbol); 2713 set_mem_align (initial_trampoline, TRAMPOLINE_ALIGNMENT); 2714 set_mem_size (initial_trampoline, TRAMPOLINE_SIZE); 2715 2716 return initial_trampoline; 2717 } 2718 2719 /* A and B are either alignments or offsets. Return the minimum alignment 2720 that may be assumed after adding the two together. */ 2721 2722 static inline unsigned 2723 min_align (unsigned int a, unsigned int b) 2724 { 2725 return least_bit_hwi (a | b); 2726 } 2727 2728 /* Return the assembler directive for creating a given kind of integer 2729 object. SIZE is the number of bytes in the object and ALIGNED_P 2730 indicates whether it is known to be aligned. Return NULL if the 2731 assembly dialect has no such directive. 2732 2733 The returned string should be printed at the start of a new line and 2734 be followed immediately by the object's initial value. */ 2735 2736 const char * 2737 integer_asm_op (int size, int aligned_p) 2738 { 2739 struct asm_int_op *ops; 2740 2741 if (aligned_p) 2742 ops = &targetm.asm_out.aligned_op; 2743 else 2744 ops = &targetm.asm_out.unaligned_op; 2745 2746 switch (size) 2747 { 2748 case 1: 2749 return targetm.asm_out.byte_op; 2750 case 2: 2751 return ops->hi; 2752 case 3: 2753 return ops->psi; 2754 case 4: 2755 return ops->si; 2756 case 5: 2757 case 6: 2758 case 7: 2759 return ops->pdi; 2760 case 8: 2761 return ops->di; 2762 case 9: 2763 case 10: 2764 case 11: 2765 case 12: 2766 case 13: 2767 case 14: 2768 case 15: 2769 return ops->pti; 2770 case 16: 2771 return ops->ti; 2772 default: 2773 return NULL; 2774 } 2775 } 2776 2777 /* Use directive OP to assemble an integer object X. Print OP at the 2778 start of the line, followed immediately by the value of X. */ 2779 2780 void 2781 assemble_integer_with_op (const char *op, rtx x) 2782 { 2783 fputs (op, asm_out_file); 2784 output_addr_const (asm_out_file, x); 2785 fputc ('\n', asm_out_file); 2786 } 2787 2788 /* The default implementation of the asm_out.integer target hook. */ 2789 2790 bool 2791 default_assemble_integer (rtx x ATTRIBUTE_UNUSED, 2792 unsigned int size ATTRIBUTE_UNUSED, 2793 int aligned_p ATTRIBUTE_UNUSED) 2794 { 2795 const char *op = integer_asm_op (size, aligned_p); 2796 /* Avoid GAS bugs for large values. Specifically negative values whose 2797 absolute value fits in a bfd_vma, but not in a bfd_signed_vma. */ 2798 if (size > UNITS_PER_WORD && size > POINTER_SIZE_UNITS) 2799 return false; 2800 return op && (assemble_integer_with_op (op, x), true); 2801 } 2802 2803 /* Assemble the integer constant X into an object of SIZE bytes. ALIGN is 2804 the alignment of the integer in bits. Return 1 if we were able to output 2805 the constant, otherwise 0. We must be able to output the constant, 2806 if FORCE is nonzero. */ 2807 2808 bool 2809 assemble_integer (rtx x, unsigned int size, unsigned int align, int force) 2810 { 2811 int aligned_p; 2812 2813 aligned_p = (align >= MIN (size * BITS_PER_UNIT, BIGGEST_ALIGNMENT)); 2814 2815 /* See if the target hook can handle this kind of object. */ 2816 if (targetm.asm_out.integer (x, size, aligned_p)) 2817 return true; 2818 2819 /* If the object is a multi-byte one, try splitting it up. Split 2820 it into words it if is multi-word, otherwise split it into bytes. */ 2821 if (size > 1) 2822 { 2823 machine_mode omode, imode; 2824 unsigned int subalign; 2825 unsigned int subsize, i; 2826 enum mode_class mclass; 2827 2828 subsize = size > UNITS_PER_WORD? UNITS_PER_WORD : 1; 2829 subalign = MIN (align, subsize * BITS_PER_UNIT); 2830 if (GET_CODE (x) == CONST_FIXED) 2831 mclass = GET_MODE_CLASS (GET_MODE (x)); 2832 else 2833 mclass = MODE_INT; 2834 2835 omode = mode_for_size (subsize * BITS_PER_UNIT, mclass, 0).require (); 2836 imode = mode_for_size (size * BITS_PER_UNIT, mclass, 0).require (); 2837 2838 for (i = 0; i < size; i += subsize) 2839 { 2840 rtx partial = simplify_subreg (omode, x, imode, i); 2841 if (!partial || !assemble_integer (partial, subsize, subalign, 0)) 2842 break; 2843 } 2844 if (i == size) 2845 return true; 2846 2847 /* If we've printed some of it, but not all of it, there's no going 2848 back now. */ 2849 gcc_assert (!i); 2850 } 2851 2852 gcc_assert (!force); 2853 2854 return false; 2855 } 2856 2857 /* Assemble the floating-point constant D into an object of size MODE. ALIGN 2858 is the alignment of the constant in bits. If REVERSE is true, D is output 2859 in reverse storage order. */ 2860 2861 void 2862 assemble_real (REAL_VALUE_TYPE d, scalar_float_mode mode, unsigned int align, 2863 bool reverse) 2864 { 2865 long data[4] = {0, 0, 0, 0}; 2866 int bitsize, nelts, nunits, units_per; 2867 rtx elt; 2868 2869 /* This is hairy. We have a quantity of known size. real_to_target 2870 will put it into an array of *host* longs, 32 bits per element 2871 (even if long is more than 32 bits). We need to determine the 2872 number of array elements that are occupied (nelts) and the number 2873 of *target* min-addressable units that will be occupied in the 2874 object file (nunits). We cannot assume that 32 divides the 2875 mode's bitsize (size * BITS_PER_UNIT) evenly. 2876 2877 size * BITS_PER_UNIT is used here to make sure that padding bits 2878 (which might appear at either end of the value; real_to_target 2879 will include the padding bits in its output array) are included. */ 2880 2881 nunits = GET_MODE_SIZE (mode); 2882 bitsize = nunits * BITS_PER_UNIT; 2883 nelts = CEIL (bitsize, 32); 2884 units_per = 32 / BITS_PER_UNIT; 2885 2886 real_to_target (data, &d, mode); 2887 2888 /* Put out the first word with the specified alignment. */ 2889 unsigned int chunk_nunits = MIN (nunits, units_per); 2890 if (reverse) 2891 elt = flip_storage_order (SImode, gen_int_mode (data[nelts - 1], SImode)); 2892 else 2893 elt = GEN_INT (sext_hwi (data[0], chunk_nunits * BITS_PER_UNIT)); 2894 assemble_integer (elt, chunk_nunits, align, 1); 2895 nunits -= chunk_nunits; 2896 2897 /* Subsequent words need only 32-bit alignment. */ 2898 align = min_align (align, 32); 2899 2900 for (int i = 1; i < nelts; i++) 2901 { 2902 chunk_nunits = MIN (nunits, units_per); 2903 if (reverse) 2904 elt = flip_storage_order (SImode, 2905 gen_int_mode (data[nelts - 1 - i], SImode)); 2906 else 2907 elt = GEN_INT (sext_hwi (data[i], chunk_nunits * BITS_PER_UNIT)); 2908 assemble_integer (elt, chunk_nunits, align, 1); 2909 nunits -= chunk_nunits; 2910 } 2911 } 2912 2913 /* Given an expression EXP with a constant value, 2914 reduce it to the sum of an assembler symbol and an integer. 2915 Store them both in the structure *VALUE. 2916 EXP must be reducible. */ 2917 2918 class addr_const { 2919 public: 2920 rtx base; 2921 poly_int64 offset; 2922 }; 2923 2924 static void 2925 decode_addr_const (tree exp, class addr_const *value) 2926 { 2927 tree target = TREE_OPERAND (exp, 0); 2928 poly_int64 offset = 0; 2929 rtx x; 2930 2931 while (1) 2932 { 2933 poly_int64 bytepos; 2934 if (TREE_CODE (target) == COMPONENT_REF 2935 && poly_int_tree_p (byte_position (TREE_OPERAND (target, 1)), 2936 &bytepos)) 2937 { 2938 offset += bytepos; 2939 target = TREE_OPERAND (target, 0); 2940 } 2941 else if (TREE_CODE (target) == ARRAY_REF 2942 || TREE_CODE (target) == ARRAY_RANGE_REF) 2943 { 2944 /* Truncate big offset. */ 2945 offset 2946 += (TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (target))) 2947 * wi::to_poly_widest (TREE_OPERAND (target, 1)).force_shwi ()); 2948 target = TREE_OPERAND (target, 0); 2949 } 2950 else if (TREE_CODE (target) == MEM_REF 2951 && TREE_CODE (TREE_OPERAND (target, 0)) == ADDR_EXPR) 2952 { 2953 offset += mem_ref_offset (target).force_shwi (); 2954 target = TREE_OPERAND (TREE_OPERAND (target, 0), 0); 2955 } 2956 else if (TREE_CODE (target) == INDIRECT_REF 2957 && TREE_CODE (TREE_OPERAND (target, 0)) == NOP_EXPR 2958 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (target, 0), 0)) 2959 == ADDR_EXPR) 2960 target = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (target, 0), 0), 0); 2961 else 2962 break; 2963 } 2964 2965 switch (TREE_CODE (target)) 2966 { 2967 case VAR_DECL: 2968 case FUNCTION_DECL: 2969 x = DECL_RTL (target); 2970 break; 2971 2972 case LABEL_DECL: 2973 x = gen_rtx_MEM (FUNCTION_MODE, 2974 gen_rtx_LABEL_REF (Pmode, force_label_rtx (target))); 2975 break; 2976 2977 case REAL_CST: 2978 case FIXED_CST: 2979 case STRING_CST: 2980 case COMPLEX_CST: 2981 case CONSTRUCTOR: 2982 case INTEGER_CST: 2983 x = lookup_constant_def (target); 2984 /* Should have been added by output_addressed_constants. */ 2985 gcc_assert (x); 2986 break; 2987 2988 case INDIRECT_REF: 2989 /* This deals with absolute addresses. */ 2990 offset += tree_to_shwi (TREE_OPERAND (target, 0)); 2991 x = gen_rtx_MEM (QImode, 2992 gen_rtx_SYMBOL_REF (Pmode, "origin of addresses")); 2993 break; 2994 2995 case COMPOUND_LITERAL_EXPR: 2996 gcc_assert (COMPOUND_LITERAL_EXPR_DECL (target)); 2997 x = DECL_RTL (COMPOUND_LITERAL_EXPR_DECL (target)); 2998 break; 2999 3000 default: 3001 gcc_unreachable (); 3002 } 3003 3004 gcc_assert (MEM_P (x)); 3005 x = XEXP (x, 0); 3006 3007 value->base = x; 3008 value->offset = offset; 3009 } 3010 3011 static GTY(()) hash_table<tree_descriptor_hasher> *const_desc_htab; 3012 3013 static void maybe_output_constant_def_contents (struct constant_descriptor_tree *, int); 3014 3015 /* Constant pool accessor function. */ 3016 3017 hash_table<tree_descriptor_hasher> * 3018 constant_pool_htab (void) 3019 { 3020 return const_desc_htab; 3021 } 3022 3023 /* Compute a hash code for a constant expression. */ 3024 3025 hashval_t 3026 tree_descriptor_hasher::hash (constant_descriptor_tree *ptr) 3027 { 3028 return ptr->hash; 3029 } 3030 3031 static hashval_t 3032 const_hash_1 (const tree exp) 3033 { 3034 const char *p; 3035 hashval_t hi; 3036 int len, i; 3037 enum tree_code code = TREE_CODE (exp); 3038 3039 /* Either set P and LEN to the address and len of something to hash and 3040 exit the switch or return a value. */ 3041 3042 switch (code) 3043 { 3044 case INTEGER_CST: 3045 p = (char *) &TREE_INT_CST_ELT (exp, 0); 3046 len = TREE_INT_CST_NUNITS (exp) * sizeof (HOST_WIDE_INT); 3047 break; 3048 3049 case REAL_CST: 3050 return real_hash (TREE_REAL_CST_PTR (exp)); 3051 3052 case FIXED_CST: 3053 return fixed_hash (TREE_FIXED_CST_PTR (exp)); 3054 3055 case STRING_CST: 3056 p = TREE_STRING_POINTER (exp); 3057 len = TREE_STRING_LENGTH (exp); 3058 break; 3059 3060 case COMPLEX_CST: 3061 return (const_hash_1 (TREE_REALPART (exp)) * 5 3062 + const_hash_1 (TREE_IMAGPART (exp))); 3063 3064 case VECTOR_CST: 3065 { 3066 hi = 7 + VECTOR_CST_NPATTERNS (exp); 3067 hi = hi * 563 + VECTOR_CST_NELTS_PER_PATTERN (exp); 3068 unsigned int count = vector_cst_encoded_nelts (exp); 3069 for (unsigned int i = 0; i < count; ++i) 3070 hi = hi * 563 + const_hash_1 (VECTOR_CST_ENCODED_ELT (exp, i)); 3071 return hi; 3072 } 3073 3074 case CONSTRUCTOR: 3075 { 3076 unsigned HOST_WIDE_INT idx; 3077 tree value; 3078 3079 hi = 5 + int_size_in_bytes (TREE_TYPE (exp)); 3080 3081 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value) 3082 if (value) 3083 hi = hi * 603 + const_hash_1 (value); 3084 3085 return hi; 3086 } 3087 3088 case ADDR_EXPR: 3089 if (CONSTANT_CLASS_P (TREE_OPERAND (exp, 0))) 3090 return const_hash_1 (TREE_OPERAND (exp, 0)); 3091 3092 /* Fallthru. */ 3093 case FDESC_EXPR: 3094 { 3095 class addr_const value; 3096 3097 decode_addr_const (exp, &value); 3098 switch (GET_CODE (value.base)) 3099 { 3100 case SYMBOL_REF: 3101 /* Don't hash the address of the SYMBOL_REF; 3102 only use the offset and the symbol name. */ 3103 hi = value.offset.coeffs[0]; 3104 p = XSTR (value.base, 0); 3105 for (i = 0; p[i] != 0; i++) 3106 hi = ((hi * 613) + (unsigned) (p[i])); 3107 break; 3108 3109 case LABEL_REF: 3110 hi = (value.offset.coeffs[0] 3111 + CODE_LABEL_NUMBER (label_ref_label (value.base)) * 13); 3112 break; 3113 3114 default: 3115 gcc_unreachable (); 3116 } 3117 } 3118 return hi; 3119 3120 case PLUS_EXPR: 3121 case POINTER_PLUS_EXPR: 3122 case MINUS_EXPR: 3123 return (const_hash_1 (TREE_OPERAND (exp, 0)) * 9 3124 + const_hash_1 (TREE_OPERAND (exp, 1))); 3125 3126 CASE_CONVERT: 3127 return const_hash_1 (TREE_OPERAND (exp, 0)) * 7 + 2; 3128 3129 default: 3130 /* A language specific constant. Just hash the code. */ 3131 return code; 3132 } 3133 3134 /* Compute hashing function. */ 3135 hi = len; 3136 for (i = 0; i < len; i++) 3137 hi = ((hi * 613) + (unsigned) (p[i])); 3138 3139 return hi; 3140 } 3141 3142 /* Wrapper of compare_constant, for the htab interface. */ 3143 bool 3144 tree_descriptor_hasher::equal (constant_descriptor_tree *c1, 3145 constant_descriptor_tree *c2) 3146 { 3147 if (c1->hash != c2->hash) 3148 return 0; 3149 return compare_constant (c1->value, c2->value); 3150 } 3151 3152 /* Compare t1 and t2, and return 1 only if they are known to result in 3153 the same bit pattern on output. */ 3154 3155 static int 3156 compare_constant (const tree t1, const tree t2) 3157 { 3158 enum tree_code typecode; 3159 3160 if (t1 == NULL_TREE) 3161 return t2 == NULL_TREE; 3162 if (t2 == NULL_TREE) 3163 return 0; 3164 3165 if (TREE_CODE (t1) != TREE_CODE (t2)) 3166 return 0; 3167 3168 switch (TREE_CODE (t1)) 3169 { 3170 case INTEGER_CST: 3171 /* Integer constants are the same only if the same width of type. */ 3172 if (TYPE_PRECISION (TREE_TYPE (t1)) != TYPE_PRECISION (TREE_TYPE (t2))) 3173 return 0; 3174 if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2))) 3175 return 0; 3176 return tree_int_cst_equal (t1, t2); 3177 3178 case REAL_CST: 3179 /* Real constants are the same only if the same width of type. In 3180 addition to the same width, we need to check whether the modes are the 3181 same. There might be two floating point modes that are the same size 3182 but have different representations, such as the PowerPC that has 2 3183 different 128-bit floating point types (IBM extended double and IEEE 3184 128-bit floating point). */ 3185 if (TYPE_PRECISION (TREE_TYPE (t1)) != TYPE_PRECISION (TREE_TYPE (t2))) 3186 return 0; 3187 if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2))) 3188 return 0; 3189 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2)); 3190 3191 case FIXED_CST: 3192 /* Fixed constants are the same only if the same width of type. */ 3193 if (TYPE_PRECISION (TREE_TYPE (t1)) != TYPE_PRECISION (TREE_TYPE (t2))) 3194 return 0; 3195 3196 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2)); 3197 3198 case STRING_CST: 3199 if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2)) 3200 || int_size_in_bytes (TREE_TYPE (t1)) 3201 != int_size_in_bytes (TREE_TYPE (t2))) 3202 return 0; 3203 3204 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2) 3205 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2), 3206 TREE_STRING_LENGTH (t1))); 3207 3208 case COMPLEX_CST: 3209 return (compare_constant (TREE_REALPART (t1), TREE_REALPART (t2)) 3210 && compare_constant (TREE_IMAGPART (t1), TREE_IMAGPART (t2))); 3211 3212 case VECTOR_CST: 3213 { 3214 if (VECTOR_CST_NPATTERNS (t1) 3215 != VECTOR_CST_NPATTERNS (t2)) 3216 return 0; 3217 3218 if (VECTOR_CST_NELTS_PER_PATTERN (t1) 3219 != VECTOR_CST_NELTS_PER_PATTERN (t2)) 3220 return 0; 3221 3222 unsigned int count = vector_cst_encoded_nelts (t1); 3223 for (unsigned int i = 0; i < count; ++i) 3224 if (!compare_constant (VECTOR_CST_ENCODED_ELT (t1, i), 3225 VECTOR_CST_ENCODED_ELT (t2, i))) 3226 return 0; 3227 3228 return 1; 3229 } 3230 3231 case CONSTRUCTOR: 3232 { 3233 vec<constructor_elt, va_gc> *v1, *v2; 3234 unsigned HOST_WIDE_INT idx; 3235 3236 typecode = TREE_CODE (TREE_TYPE (t1)); 3237 if (typecode != TREE_CODE (TREE_TYPE (t2))) 3238 return 0; 3239 3240 if (typecode == ARRAY_TYPE) 3241 { 3242 HOST_WIDE_INT size_1 = int_size_in_bytes (TREE_TYPE (t1)); 3243 /* For arrays, check that mode, size and storage order match. */ 3244 if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2)) 3245 || size_1 == -1 3246 || size_1 != int_size_in_bytes (TREE_TYPE (t2)) 3247 || TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (t1)) 3248 != TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (t2))) 3249 return 0; 3250 } 3251 else 3252 { 3253 /* For record and union constructors, require exact type 3254 equality. */ 3255 if (TREE_TYPE (t1) != TREE_TYPE (t2)) 3256 return 0; 3257 } 3258 3259 v1 = CONSTRUCTOR_ELTS (t1); 3260 v2 = CONSTRUCTOR_ELTS (t2); 3261 if (vec_safe_length (v1) != vec_safe_length (v2)) 3262 return 0; 3263 3264 for (idx = 0; idx < vec_safe_length (v1); ++idx) 3265 { 3266 constructor_elt *c1 = &(*v1)[idx]; 3267 constructor_elt *c2 = &(*v2)[idx]; 3268 3269 /* Check that each value is the same... */ 3270 if (!compare_constant (c1->value, c2->value)) 3271 return 0; 3272 /* ... and that they apply to the same fields! */ 3273 if (typecode == ARRAY_TYPE) 3274 { 3275 if (!compare_constant (c1->index, c2->index)) 3276 return 0; 3277 } 3278 else 3279 { 3280 if (c1->index != c2->index) 3281 return 0; 3282 } 3283 } 3284 3285 return 1; 3286 } 3287 3288 case ADDR_EXPR: 3289 case FDESC_EXPR: 3290 { 3291 class addr_const value1, value2; 3292 enum rtx_code code; 3293 int ret; 3294 3295 decode_addr_const (t1, &value1); 3296 decode_addr_const (t2, &value2); 3297 3298 if (maybe_ne (value1.offset, value2.offset)) 3299 return 0; 3300 3301 code = GET_CODE (value1.base); 3302 if (code != GET_CODE (value2.base)) 3303 return 0; 3304 3305 switch (code) 3306 { 3307 case SYMBOL_REF: 3308 ret = (strcmp (XSTR (value1.base, 0), XSTR (value2.base, 0)) == 0); 3309 break; 3310 3311 case LABEL_REF: 3312 ret = (CODE_LABEL_NUMBER (label_ref_label (value1.base)) 3313 == CODE_LABEL_NUMBER (label_ref_label (value2.base))); 3314 break; 3315 3316 default: 3317 gcc_unreachable (); 3318 } 3319 return ret; 3320 } 3321 3322 case PLUS_EXPR: 3323 case POINTER_PLUS_EXPR: 3324 case MINUS_EXPR: 3325 case RANGE_EXPR: 3326 return (compare_constant (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0)) 3327 && compare_constant (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1))); 3328 3329 CASE_CONVERT: 3330 case VIEW_CONVERT_EXPR: 3331 return compare_constant (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0)); 3332 3333 default: 3334 return 0; 3335 } 3336 3337 gcc_unreachable (); 3338 } 3339 3340 /* Return the section into which constant EXP should be placed. */ 3341 3342 static section * 3343 get_constant_section (tree exp, unsigned int align) 3344 { 3345 return targetm.asm_out.select_section (exp, 3346 compute_reloc_for_constant (exp), 3347 align); 3348 } 3349 3350 /* Return the size of constant EXP in bytes. */ 3351 3352 static HOST_WIDE_INT 3353 get_constant_size (tree exp) 3354 { 3355 HOST_WIDE_INT size; 3356 3357 size = int_size_in_bytes (TREE_TYPE (exp)); 3358 gcc_checking_assert (size >= 0); 3359 gcc_checking_assert (TREE_CODE (exp) != STRING_CST 3360 || size >= TREE_STRING_LENGTH (exp)); 3361 return size; 3362 } 3363 3364 /* Subroutine of output_constant_def: 3365 No constant equal to EXP is known to have been output. 3366 Make a constant descriptor to enter EXP in the hash table. 3367 Assign the label number and construct RTL to refer to the 3368 constant's location in memory. 3369 Caller is responsible for updating the hash table. */ 3370 3371 static struct constant_descriptor_tree * 3372 build_constant_desc (tree exp) 3373 { 3374 struct constant_descriptor_tree *desc; 3375 rtx symbol, rtl; 3376 char label[256]; 3377 int labelno; 3378 tree decl; 3379 3380 desc = ggc_alloc<constant_descriptor_tree> (); 3381 desc->value = exp; 3382 3383 /* Create a string containing the label name, in LABEL. */ 3384 labelno = const_labelno++; 3385 ASM_GENERATE_INTERNAL_LABEL (label, "LC", labelno); 3386 3387 /* Construct the VAR_DECL associated with the constant. */ 3388 decl = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (label), 3389 TREE_TYPE (exp)); 3390 DECL_ARTIFICIAL (decl) = 1; 3391 DECL_IGNORED_P (decl) = 1; 3392 TREE_READONLY (decl) = 1; 3393 TREE_STATIC (decl) = 1; 3394 TREE_ADDRESSABLE (decl) = 1; 3395 /* We don't set the RTL yet as this would cause varpool to assume that the 3396 variable is referenced. Moreover, it would just be dropped in LTO mode. 3397 Instead we set the flag that will be recognized in make_decl_rtl. */ 3398 DECL_IN_CONSTANT_POOL (decl) = 1; 3399 DECL_INITIAL (decl) = desc->value; 3400 /* ??? targetm.constant_alignment hasn't been updated for vector types on 3401 most architectures so use DATA_ALIGNMENT as well, except for strings. */ 3402 if (TREE_CODE (exp) == STRING_CST) 3403 SET_DECL_ALIGN (decl, targetm.constant_alignment (exp, DECL_ALIGN (decl))); 3404 else 3405 { 3406 align_variable (decl, 0); 3407 if (DECL_ALIGN (decl) < GET_MODE_ALIGNMENT (DECL_MODE (decl)) 3408 && ((optab_handler (movmisalign_optab, DECL_MODE (decl)) 3409 != CODE_FOR_nothing) 3410 || targetm.slow_unaligned_access (DECL_MODE (decl), 3411 DECL_ALIGN (decl)))) 3412 SET_DECL_ALIGN (decl, GET_MODE_ALIGNMENT (DECL_MODE (decl))); 3413 } 3414 3415 /* Now construct the SYMBOL_REF and the MEM. */ 3416 if (use_object_blocks_p ()) 3417 { 3418 int align = (TREE_CODE (decl) == CONST_DECL 3419 || (VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl)) 3420 ? DECL_ALIGN (decl) 3421 : symtab_node::get (decl)->definition_alignment ()); 3422 section *sect = get_constant_section (exp, align); 3423 symbol = create_block_symbol (ggc_strdup (label), 3424 get_block_for_section (sect), -1); 3425 } 3426 else 3427 symbol = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (label)); 3428 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_LOCAL; 3429 SET_SYMBOL_REF_DECL (symbol, decl); 3430 TREE_CONSTANT_POOL_ADDRESS_P (symbol) = 1; 3431 3432 rtl = gen_const_mem (TYPE_MODE (TREE_TYPE (exp)), symbol); 3433 set_mem_alias_set (rtl, 0); 3434 3435 /* Putting EXP into the literal pool might have imposed a different 3436 alignment which should be visible in the RTX as well. */ 3437 set_mem_align (rtl, DECL_ALIGN (decl)); 3438 3439 /* We cannot share RTX'es in pool entries. 3440 Mark this piece of RTL as required for unsharing. */ 3441 RTX_FLAG (rtl, used) = 1; 3442 3443 /* Set flags or add text to the name to record information, such as 3444 that it is a local symbol. If the name is changed, the macro 3445 ASM_OUTPUT_LABELREF will have to know how to strip this 3446 information. This call might invalidate our local variable 3447 SYMBOL; we can't use it afterward. */ 3448 targetm.encode_section_info (exp, rtl, true); 3449 3450 desc->rtl = rtl; 3451 3452 return desc; 3453 } 3454 3455 /* Subroutine of output_constant_def and tree_output_constant_def: 3456 Add a constant to the hash table that tracks which constants 3457 already have labels. */ 3458 3459 static constant_descriptor_tree * 3460 add_constant_to_table (tree exp) 3461 { 3462 /* The hash table methods may call output_constant_def for addressed 3463 constants, so handle them first. */ 3464 output_addressed_constants (exp); 3465 3466 /* Sanity check to catch recursive insertion. */ 3467 static bool inserting; 3468 gcc_assert (!inserting); 3469 inserting = true; 3470 3471 /* Look up EXP in the table of constant descriptors. If we didn't 3472 find it, create a new one. */ 3473 struct constant_descriptor_tree key; 3474 key.value = exp; 3475 key.hash = const_hash_1 (exp); 3476 constant_descriptor_tree **loc 3477 = const_desc_htab->find_slot_with_hash (&key, key.hash, INSERT); 3478 3479 inserting = false; 3480 3481 struct constant_descriptor_tree *desc = *loc; 3482 if (!desc) 3483 { 3484 desc = build_constant_desc (exp); 3485 desc->hash = key.hash; 3486 *loc = desc; 3487 } 3488 3489 return desc; 3490 } 3491 3492 /* Return an rtx representing a reference to constant data in memory 3493 for the constant expression EXP. 3494 3495 If assembler code for such a constant has already been output, 3496 return an rtx to refer to it. 3497 Otherwise, output such a constant in memory 3498 and generate an rtx for it. 3499 3500 If DEFER is nonzero, this constant can be deferred and output only 3501 if referenced in the function after all optimizations. 3502 3503 `const_desc_table' records which constants already have label strings. */ 3504 3505 rtx 3506 output_constant_def (tree exp, int defer) 3507 { 3508 struct constant_descriptor_tree *desc = add_constant_to_table (exp); 3509 maybe_output_constant_def_contents (desc, defer); 3510 return desc->rtl; 3511 } 3512 3513 /* Subroutine of output_constant_def: Decide whether or not we need to 3514 output the constant DESC now, and if so, do it. */ 3515 static void 3516 maybe_output_constant_def_contents (struct constant_descriptor_tree *desc, 3517 int defer) 3518 { 3519 rtx symbol = XEXP (desc->rtl, 0); 3520 tree exp = desc->value; 3521 3522 if (flag_syntax_only) 3523 return; 3524 3525 if (TREE_ASM_WRITTEN (exp)) 3526 /* Already output; don't do it again. */ 3527 return; 3528 3529 /* We can always defer constants as long as the context allows 3530 doing so. */ 3531 if (defer) 3532 { 3533 /* Increment n_deferred_constants if it exists. It needs to be at 3534 least as large as the number of constants actually referred to 3535 by the function. If it's too small we'll stop looking too early 3536 and fail to emit constants; if it's too large we'll only look 3537 through the entire function when we could have stopped earlier. */ 3538 if (cfun) 3539 n_deferred_constants++; 3540 return; 3541 } 3542 3543 output_constant_def_contents (symbol); 3544 } 3545 3546 /* Subroutine of output_constant_def_contents. Output the definition 3547 of constant EXP, which is pointed to by label LABEL. ALIGN is the 3548 constant's alignment in bits. */ 3549 3550 static void 3551 assemble_constant_contents (tree exp, const char *label, unsigned int align, 3552 bool merge_strings) 3553 { 3554 HOST_WIDE_INT size; 3555 3556 size = get_constant_size (exp); 3557 3558 /* Do any machine/system dependent processing of the constant. */ 3559 targetm.asm_out.declare_constant_name (asm_out_file, label, exp, size); 3560 3561 /* Output the value of EXP. */ 3562 output_constant (exp, size, align, false, merge_strings); 3563 3564 targetm.asm_out.decl_end (); 3565 } 3566 3567 /* We must output the constant data referred to by SYMBOL; do so. */ 3568 3569 static void 3570 output_constant_def_contents (rtx symbol) 3571 { 3572 tree decl = SYMBOL_REF_DECL (symbol); 3573 tree exp = DECL_INITIAL (decl); 3574 bool asan_protected = false; 3575 3576 /* Make sure any other constants whose addresses appear in EXP 3577 are assigned label numbers. */ 3578 output_addressed_constants (exp); 3579 3580 /* We are no longer deferring this constant. */ 3581 TREE_ASM_WRITTEN (decl) = TREE_ASM_WRITTEN (exp) = 1; 3582 3583 if ((flag_sanitize & SANITIZE_ADDRESS) 3584 && TREE_CODE (exp) == STRING_CST 3585 && asan_protect_global (exp)) 3586 { 3587 asan_protected = true; 3588 SET_DECL_ALIGN (decl, MAX (DECL_ALIGN (decl), 3589 ASAN_RED_ZONE_SIZE * BITS_PER_UNIT)); 3590 } 3591 3592 /* If the constant is part of an object block, make sure that the 3593 decl has been positioned within its block, but do not write out 3594 its definition yet. output_object_blocks will do that later. */ 3595 if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol) && SYMBOL_REF_BLOCK (symbol)) 3596 place_block_symbol (symbol); 3597 else 3598 { 3599 int align = (TREE_CODE (decl) == CONST_DECL 3600 || (VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl)) 3601 ? DECL_ALIGN (decl) 3602 : symtab_node::get (decl)->definition_alignment ()); 3603 section *sect = get_constant_section (exp, align); 3604 switch_to_section (sect); 3605 if (align > BITS_PER_UNIT) 3606 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT)); 3607 assemble_constant_contents (exp, XSTR (symbol, 0), align, 3608 (sect->common.flags & SECTION_MERGE) 3609 && (sect->common.flags & SECTION_STRINGS)); 3610 if (asan_protected) 3611 { 3612 HOST_WIDE_INT size = get_constant_size (exp); 3613 assemble_zeros (asan_red_zone_size (size)); 3614 } 3615 } 3616 } 3617 3618 /* Look up EXP in the table of constant descriptors. Return the rtl 3619 if it has been emitted, else null. */ 3620 3621 rtx 3622 lookup_constant_def (tree exp) 3623 { 3624 struct constant_descriptor_tree key; 3625 3626 key.value = exp; 3627 key.hash = const_hash_1 (exp); 3628 constant_descriptor_tree *desc 3629 = const_desc_htab->find_with_hash (&key, key.hash); 3630 3631 return (desc ? desc->rtl : NULL_RTX); 3632 } 3633 3634 /* Return a tree representing a reference to constant data in memory 3635 for the constant expression EXP. 3636 3637 This is the counterpart of output_constant_def at the Tree level. */ 3638 3639 tree 3640 tree_output_constant_def (tree exp) 3641 { 3642 struct constant_descriptor_tree *desc = add_constant_to_table (exp); 3643 tree decl = SYMBOL_REF_DECL (XEXP (desc->rtl, 0)); 3644 varpool_node::finalize_decl (decl); 3645 return decl; 3646 } 3647 3648 class GTY((chain_next ("%h.next"), for_user)) constant_descriptor_rtx { 3649 public: 3650 class constant_descriptor_rtx *next; 3651 rtx mem; 3652 rtx sym; 3653 rtx constant; 3654 HOST_WIDE_INT offset; 3655 hashval_t hash; 3656 fixed_size_mode mode; 3657 unsigned int align; 3658 int labelno; 3659 int mark; 3660 }; 3661 3662 struct const_rtx_desc_hasher : ggc_ptr_hash<constant_descriptor_rtx> 3663 { 3664 static hashval_t hash (constant_descriptor_rtx *); 3665 static bool equal (constant_descriptor_rtx *, constant_descriptor_rtx *); 3666 }; 3667 3668 /* Used in the hash tables to avoid outputting the same constant 3669 twice. Unlike 'struct constant_descriptor_tree', RTX constants 3670 are output once per function, not once per file. */ 3671 /* ??? Only a few targets need per-function constant pools. Most 3672 can use one per-file pool. Should add a targetm bit to tell the 3673 difference. */ 3674 3675 struct GTY(()) rtx_constant_pool { 3676 /* Pointers to first and last constant in pool, as ordered by offset. */ 3677 class constant_descriptor_rtx *first; 3678 class constant_descriptor_rtx *last; 3679 3680 /* Hash facility for making memory-constants from constant rtl-expressions. 3681 It is used on RISC machines where immediate integer arguments and 3682 constant addresses are restricted so that such constants must be stored 3683 in memory. */ 3684 hash_table<const_rtx_desc_hasher> *const_rtx_htab; 3685 3686 /* Current offset in constant pool (does not include any 3687 machine-specific header). */ 3688 HOST_WIDE_INT offset; 3689 }; 3690 3691 /* Hash and compare functions for const_rtx_htab. */ 3692 3693 hashval_t 3694 const_rtx_desc_hasher::hash (constant_descriptor_rtx *desc) 3695 { 3696 return desc->hash; 3697 } 3698 3699 bool 3700 const_rtx_desc_hasher::equal (constant_descriptor_rtx *x, 3701 constant_descriptor_rtx *y) 3702 { 3703 if (x->mode != y->mode) 3704 return 0; 3705 return rtx_equal_p (x->constant, y->constant); 3706 } 3707 3708 /* Hash one component of a constant. */ 3709 3710 static hashval_t 3711 const_rtx_hash_1 (const_rtx x) 3712 { 3713 unsigned HOST_WIDE_INT hwi; 3714 machine_mode mode; 3715 enum rtx_code code; 3716 hashval_t h; 3717 int i; 3718 3719 code = GET_CODE (x); 3720 mode = GET_MODE (x); 3721 h = (hashval_t) code * 1048573 + mode; 3722 3723 switch (code) 3724 { 3725 case CONST_INT: 3726 hwi = INTVAL (x); 3727 3728 fold_hwi: 3729 { 3730 int shift = sizeof (hashval_t) * CHAR_BIT; 3731 const int n = sizeof (HOST_WIDE_INT) / sizeof (hashval_t); 3732 3733 h ^= (hashval_t) hwi; 3734 for (i = 1; i < n; ++i) 3735 { 3736 hwi >>= shift; 3737 h ^= (hashval_t) hwi; 3738 } 3739 } 3740 break; 3741 3742 case CONST_WIDE_INT: 3743 hwi = 0; 3744 { 3745 for (i = 0; i < CONST_WIDE_INT_NUNITS (x); i++) 3746 hwi ^= CONST_WIDE_INT_ELT (x, i); 3747 goto fold_hwi; 3748 } 3749 3750 case CONST_DOUBLE: 3751 if (TARGET_SUPPORTS_WIDE_INT == 0 && mode == VOIDmode) 3752 { 3753 hwi = CONST_DOUBLE_LOW (x) ^ CONST_DOUBLE_HIGH (x); 3754 goto fold_hwi; 3755 } 3756 else 3757 h ^= real_hash (CONST_DOUBLE_REAL_VALUE (x)); 3758 break; 3759 3760 case CONST_FIXED: 3761 h ^= fixed_hash (CONST_FIXED_VALUE (x)); 3762 break; 3763 3764 case SYMBOL_REF: 3765 h ^= htab_hash_string (XSTR (x, 0)); 3766 break; 3767 3768 case LABEL_REF: 3769 h = h * 251 + CODE_LABEL_NUMBER (label_ref_label (x)); 3770 break; 3771 3772 case UNSPEC: 3773 case UNSPEC_VOLATILE: 3774 h = h * 251 + XINT (x, 1); 3775 break; 3776 3777 default: 3778 break; 3779 } 3780 3781 return h; 3782 } 3783 3784 /* Compute a hash value for X, which should be a constant. */ 3785 3786 static hashval_t 3787 const_rtx_hash (rtx x) 3788 { 3789 hashval_t h = 0; 3790 subrtx_iterator::array_type array; 3791 FOR_EACH_SUBRTX (iter, array, x, ALL) 3792 h = h * 509 + const_rtx_hash_1 (*iter); 3793 return h; 3794 } 3795 3796 3797 /* Create and return a new rtx constant pool. */ 3798 3799 static struct rtx_constant_pool * 3800 create_constant_pool (void) 3801 { 3802 struct rtx_constant_pool *pool; 3803 3804 pool = ggc_alloc<rtx_constant_pool> (); 3805 pool->const_rtx_htab = hash_table<const_rtx_desc_hasher>::create_ggc (31); 3806 pool->first = NULL; 3807 pool->last = NULL; 3808 pool->offset = 0; 3809 return pool; 3810 } 3811 3812 /* Initialize constant pool hashing for a new function. */ 3813 3814 void 3815 init_varasm_status (void) 3816 { 3817 crtl->varasm.pool = create_constant_pool (); 3818 crtl->varasm.deferred_constants = 0; 3819 } 3820 3821 /* Given a MINUS expression, simplify it if both sides 3822 include the same symbol. */ 3823 3824 rtx 3825 simplify_subtraction (rtx x) 3826 { 3827 rtx r = simplify_rtx (x); 3828 return r ? r : x; 3829 } 3830 3831 /* Given a constant rtx X, make (or find) a memory constant for its value 3832 and return a MEM rtx to refer to it in memory. IN_MODE is the mode 3833 of X. */ 3834 3835 rtx 3836 force_const_mem (machine_mode in_mode, rtx x) 3837 { 3838 class constant_descriptor_rtx *desc, tmp; 3839 struct rtx_constant_pool *pool; 3840 char label[256]; 3841 rtx def, symbol; 3842 hashval_t hash; 3843 unsigned int align; 3844 constant_descriptor_rtx **slot; 3845 fixed_size_mode mode; 3846 3847 /* We can't force variable-sized objects to memory. */ 3848 if (!is_a <fixed_size_mode> (in_mode, &mode)) 3849 return NULL_RTX; 3850 3851 /* If we're not allowed to drop X into the constant pool, don't. */ 3852 if (targetm.cannot_force_const_mem (mode, x)) 3853 return NULL_RTX; 3854 3855 /* Record that this function has used a constant pool entry. */ 3856 crtl->uses_const_pool = 1; 3857 3858 /* Decide which pool to use. */ 3859 pool = (targetm.use_blocks_for_constant_p (mode, x) 3860 ? shared_constant_pool 3861 : crtl->varasm.pool); 3862 3863 /* Lookup the value in the hashtable. */ 3864 tmp.constant = x; 3865 tmp.mode = mode; 3866 hash = const_rtx_hash (x); 3867 slot = pool->const_rtx_htab->find_slot_with_hash (&tmp, hash, INSERT); 3868 desc = *slot; 3869 3870 /* If the constant was already present, return its memory. */ 3871 if (desc) 3872 return copy_rtx (desc->mem); 3873 3874 /* Otherwise, create a new descriptor. */ 3875 desc = ggc_alloc<constant_descriptor_rtx> (); 3876 *slot = desc; 3877 3878 /* Align the location counter as required by EXP's data type. */ 3879 machine_mode align_mode = (mode == VOIDmode ? word_mode : mode); 3880 align = targetm.static_rtx_alignment (align_mode); 3881 3882 pool->offset += (align / BITS_PER_UNIT) - 1; 3883 pool->offset &= ~ ((align / BITS_PER_UNIT) - 1); 3884 3885 desc->next = NULL; 3886 desc->constant = copy_rtx (tmp.constant); 3887 desc->offset = pool->offset; 3888 desc->hash = hash; 3889 desc->mode = mode; 3890 desc->align = align; 3891 desc->labelno = const_labelno; 3892 desc->mark = 0; 3893 3894 pool->offset += GET_MODE_SIZE (mode); 3895 if (pool->last) 3896 pool->last->next = desc; 3897 else 3898 pool->first = pool->last = desc; 3899 pool->last = desc; 3900 3901 /* Create a string containing the label name, in LABEL. */ 3902 ASM_GENERATE_INTERNAL_LABEL (label, "LC", const_labelno); 3903 ++const_labelno; 3904 3905 /* Construct the SYMBOL_REF. Make sure to mark it as belonging to 3906 the constants pool. */ 3907 if (use_object_blocks_p () && targetm.use_blocks_for_constant_p (mode, x)) 3908 { 3909 section *sect = targetm.asm_out.select_rtx_section (mode, x, align); 3910 symbol = create_block_symbol (ggc_strdup (label), 3911 get_block_for_section (sect), -1); 3912 } 3913 else 3914 symbol = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (label)); 3915 desc->sym = symbol; 3916 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_LOCAL; 3917 CONSTANT_POOL_ADDRESS_P (symbol) = 1; 3918 SET_SYMBOL_REF_CONSTANT (symbol, desc); 3919 3920 /* Construct the MEM. */ 3921 desc->mem = def = gen_const_mem (mode, symbol); 3922 set_mem_align (def, align); 3923 3924 /* If we're dropping a label to the constant pool, make sure we 3925 don't delete it. */ 3926 if (GET_CODE (x) == LABEL_REF) 3927 LABEL_PRESERVE_P (XEXP (x, 0)) = 1; 3928 3929 return copy_rtx (def); 3930 } 3931 3932 /* Given a constant pool SYMBOL_REF, return the corresponding constant. */ 3933 3934 rtx 3935 get_pool_constant (const_rtx addr) 3936 { 3937 return SYMBOL_REF_CONSTANT (addr)->constant; 3938 } 3939 3940 /* Given a constant pool SYMBOL_REF, return the corresponding constant 3941 and whether it has been output or not. */ 3942 3943 rtx 3944 get_pool_constant_mark (rtx addr, bool *pmarked) 3945 { 3946 class constant_descriptor_rtx *desc; 3947 3948 desc = SYMBOL_REF_CONSTANT (addr); 3949 *pmarked = (desc->mark != 0); 3950 return desc->constant; 3951 } 3952 3953 /* Similar, return the mode. */ 3954 3955 fixed_size_mode 3956 get_pool_mode (const_rtx addr) 3957 { 3958 return SYMBOL_REF_CONSTANT (addr)->mode; 3959 } 3960 3961 /* Return TRUE if and only if the constant pool has no entries. Note 3962 that even entries we might end up choosing not to emit are counted 3963 here, so there is the potential for missed optimizations. */ 3964 3965 bool 3966 constant_pool_empty_p (void) 3967 { 3968 return crtl->varasm.pool->first == NULL; 3969 } 3970 3971 /* Worker function for output_constant_pool_1. Emit assembly for X 3972 in MODE with known alignment ALIGN. */ 3973 3974 static void 3975 output_constant_pool_2 (fixed_size_mode mode, rtx x, unsigned int align) 3976 { 3977 switch (GET_MODE_CLASS (mode)) 3978 { 3979 case MODE_FLOAT: 3980 case MODE_DECIMAL_FLOAT: 3981 { 3982 gcc_assert (CONST_DOUBLE_AS_FLOAT_P (x)); 3983 assemble_real (*CONST_DOUBLE_REAL_VALUE (x), 3984 as_a <scalar_float_mode> (mode), align, false); 3985 break; 3986 } 3987 3988 case MODE_INT: 3989 case MODE_PARTIAL_INT: 3990 case MODE_FRACT: 3991 case MODE_UFRACT: 3992 case MODE_ACCUM: 3993 case MODE_UACCUM: 3994 assemble_integer (x, GET_MODE_SIZE (mode), align, 1); 3995 break; 3996 3997 case MODE_VECTOR_BOOL: 3998 { 3999 gcc_assert (GET_CODE (x) == CONST_VECTOR); 4000 4001 /* Pick the smallest integer mode that contains at least one 4002 whole element. Often this is byte_mode and contains more 4003 than one element. */ 4004 unsigned int nelts = GET_MODE_NUNITS (mode); 4005 unsigned int elt_bits = GET_MODE_BITSIZE (mode) / nelts; 4006 unsigned int int_bits = MAX (elt_bits, BITS_PER_UNIT); 4007 scalar_int_mode int_mode = int_mode_for_size (int_bits, 0).require (); 4008 4009 /* Build the constant up one integer at a time. */ 4010 unsigned int elts_per_int = int_bits / elt_bits; 4011 for (unsigned int i = 0; i < nelts; i += elts_per_int) 4012 { 4013 unsigned HOST_WIDE_INT value = 0; 4014 unsigned int limit = MIN (nelts - i, elts_per_int); 4015 for (unsigned int j = 0; j < limit; ++j) 4016 if (INTVAL (CONST_VECTOR_ELT (x, i + j)) != 0) 4017 value |= 1 << (j * elt_bits); 4018 output_constant_pool_2 (int_mode, gen_int_mode (value, int_mode), 4019 i != 0 ? MIN (align, int_bits) : align); 4020 } 4021 break; 4022 } 4023 case MODE_VECTOR_FLOAT: 4024 case MODE_VECTOR_INT: 4025 case MODE_VECTOR_FRACT: 4026 case MODE_VECTOR_UFRACT: 4027 case MODE_VECTOR_ACCUM: 4028 case MODE_VECTOR_UACCUM: 4029 { 4030 int i, units; 4031 scalar_mode submode = GET_MODE_INNER (mode); 4032 unsigned int subalign = MIN (align, GET_MODE_BITSIZE (submode)); 4033 4034 gcc_assert (GET_CODE (x) == CONST_VECTOR); 4035 units = GET_MODE_NUNITS (mode); 4036 4037 for (i = 0; i < units; i++) 4038 { 4039 rtx elt = CONST_VECTOR_ELT (x, i); 4040 output_constant_pool_2 (submode, elt, i ? subalign : align); 4041 } 4042 } 4043 break; 4044 4045 default: 4046 gcc_unreachable (); 4047 } 4048 } 4049 4050 /* Worker function for output_constant_pool. Emit constant DESC, 4051 giving it ALIGN bits of alignment. */ 4052 4053 static void 4054 output_constant_pool_1 (class constant_descriptor_rtx *desc, 4055 unsigned int align) 4056 { 4057 rtx x, tmp; 4058 4059 x = desc->constant; 4060 4061 /* See if X is a LABEL_REF (or a CONST referring to a LABEL_REF) 4062 whose CODE_LABEL has been deleted. This can occur if a jump table 4063 is eliminated by optimization. If so, write a constant of zero 4064 instead. Note that this can also happen by turning the 4065 CODE_LABEL into a NOTE. */ 4066 /* ??? This seems completely and utterly wrong. Certainly it's 4067 not true for NOTE_INSN_DELETED_LABEL, but I disbelieve proper 4068 functioning even with rtx_insn::deleted and friends. */ 4069 4070 tmp = x; 4071 switch (GET_CODE (tmp)) 4072 { 4073 case CONST: 4074 if (GET_CODE (XEXP (tmp, 0)) != PLUS 4075 || GET_CODE (XEXP (XEXP (tmp, 0), 0)) != LABEL_REF) 4076 break; 4077 tmp = XEXP (XEXP (tmp, 0), 0); 4078 /* FALLTHRU */ 4079 4080 case LABEL_REF: 4081 { 4082 rtx_insn *insn = label_ref_label (tmp); 4083 gcc_assert (!insn->deleted ()); 4084 gcc_assert (!NOTE_P (insn) 4085 || NOTE_KIND (insn) != NOTE_INSN_DELETED); 4086 break; 4087 } 4088 4089 default: 4090 break; 4091 } 4092 4093 #ifdef ASM_OUTPUT_SPECIAL_POOL_ENTRY 4094 ASM_OUTPUT_SPECIAL_POOL_ENTRY (asm_out_file, x, desc->mode, 4095 align, desc->labelno, done); 4096 #endif 4097 4098 assemble_align (align); 4099 4100 /* Output the label. */ 4101 targetm.asm_out.internal_label (asm_out_file, "LC", desc->labelno); 4102 4103 /* Output the data. 4104 Pass actual alignment value while emitting string constant to asm code 4105 as function 'output_constant_pool_1' explicitly passes the alignment as 1 4106 assuming that the data is already aligned which prevents the generation 4107 of fix-up table entries. */ 4108 output_constant_pool_2 (desc->mode, x, desc->align); 4109 4110 /* Make sure all constants in SECTION_MERGE and not SECTION_STRINGS 4111 sections have proper size. */ 4112 if (align > GET_MODE_BITSIZE (desc->mode) 4113 && in_section 4114 && (in_section->common.flags & SECTION_MERGE)) 4115 assemble_align (align); 4116 4117 #ifdef ASM_OUTPUT_SPECIAL_POOL_ENTRY 4118 done: 4119 #endif 4120 return; 4121 } 4122 4123 /* Recompute the offsets of entries in POOL, and the overall size of 4124 POOL. Do this after calling mark_constant_pool to ensure that we 4125 are computing the offset values for the pool which we will actually 4126 emit. */ 4127 4128 static void 4129 recompute_pool_offsets (struct rtx_constant_pool *pool) 4130 { 4131 class constant_descriptor_rtx *desc; 4132 pool->offset = 0; 4133 4134 for (desc = pool->first; desc ; desc = desc->next) 4135 if (desc->mark) 4136 { 4137 /* Recalculate offset. */ 4138 unsigned int align = desc->align; 4139 pool->offset += (align / BITS_PER_UNIT) - 1; 4140 pool->offset &= ~ ((align / BITS_PER_UNIT) - 1); 4141 desc->offset = pool->offset; 4142 pool->offset += GET_MODE_SIZE (desc->mode); 4143 } 4144 } 4145 4146 /* Mark all constants that are referenced by SYMBOL_REFs in X. 4147 Emit referenced deferred strings. */ 4148 4149 static void 4150 mark_constants_in_pattern (rtx insn) 4151 { 4152 subrtx_iterator::array_type array; 4153 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), ALL) 4154 { 4155 const_rtx x = *iter; 4156 if (GET_CODE (x) == SYMBOL_REF) 4157 { 4158 if (CONSTANT_POOL_ADDRESS_P (x)) 4159 { 4160 class constant_descriptor_rtx *desc = SYMBOL_REF_CONSTANT (x); 4161 if (desc->mark == 0) 4162 { 4163 desc->mark = 1; 4164 iter.substitute (desc->constant); 4165 } 4166 } 4167 else if (TREE_CONSTANT_POOL_ADDRESS_P (x)) 4168 { 4169 tree decl = SYMBOL_REF_DECL (x); 4170 if (!TREE_ASM_WRITTEN (DECL_INITIAL (decl))) 4171 { 4172 n_deferred_constants--; 4173 output_constant_def_contents (CONST_CAST_RTX (x)); 4174 } 4175 } 4176 } 4177 } 4178 } 4179 4180 /* Look through appropriate parts of INSN, marking all entries in the 4181 constant pool which are actually being used. Entries that are only 4182 referenced by other constants are also marked as used. Emit 4183 deferred strings that are used. */ 4184 4185 static void 4186 mark_constants (rtx_insn *insn) 4187 { 4188 if (!INSN_P (insn)) 4189 return; 4190 4191 /* Insns may appear inside a SEQUENCE. Only check the patterns of 4192 insns, not any notes that may be attached. We don't want to mark 4193 a constant just because it happens to appear in a REG_EQUIV note. */ 4194 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn))) 4195 { 4196 int i, n = seq->len (); 4197 for (i = 0; i < n; ++i) 4198 { 4199 rtx subinsn = seq->element (i); 4200 if (INSN_P (subinsn)) 4201 mark_constants_in_pattern (subinsn); 4202 } 4203 } 4204 else 4205 mark_constants_in_pattern (insn); 4206 } 4207 4208 /* Look through the instructions for this function, and mark all the 4209 entries in POOL which are actually being used. Emit deferred constants 4210 which have indeed been used. */ 4211 4212 static void 4213 mark_constant_pool (void) 4214 { 4215 rtx_insn *insn; 4216 4217 if (!crtl->uses_const_pool && n_deferred_constants == 0) 4218 return; 4219 4220 for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) 4221 mark_constants (insn); 4222 } 4223 4224 /* Write all the constants in POOL. */ 4225 4226 static void 4227 output_constant_pool_contents (struct rtx_constant_pool *pool) 4228 { 4229 class constant_descriptor_rtx *desc; 4230 4231 for (desc = pool->first; desc ; desc = desc->next) 4232 if (desc->mark) 4233 { 4234 /* If the constant is part of an object_block, make sure that 4235 the constant has been positioned within its block, but do not 4236 write out its definition yet. output_object_blocks will do 4237 that later. */ 4238 if (SYMBOL_REF_HAS_BLOCK_INFO_P (desc->sym) 4239 && SYMBOL_REF_BLOCK (desc->sym)) 4240 place_block_symbol (desc->sym); 4241 else 4242 { 4243 switch_to_section (targetm.asm_out.select_rtx_section 4244 (desc->mode, desc->constant, desc->align)); 4245 output_constant_pool_1 (desc, desc->align); 4246 } 4247 } 4248 } 4249 4250 /* Mark all constants that are used in the current function, then write 4251 out the function's private constant pool. */ 4252 4253 static void 4254 output_constant_pool (const char *fnname ATTRIBUTE_UNUSED, 4255 tree fndecl ATTRIBUTE_UNUSED) 4256 { 4257 struct rtx_constant_pool *pool = crtl->varasm.pool; 4258 4259 /* It is possible for gcc to call force_const_mem and then to later 4260 discard the instructions which refer to the constant. In such a 4261 case we do not need to output the constant. */ 4262 mark_constant_pool (); 4263 4264 /* Having marked the constant pool entries we'll actually emit, we 4265 now need to rebuild the offset information, which may have become 4266 stale. */ 4267 recompute_pool_offsets (pool); 4268 4269 #ifdef ASM_OUTPUT_POOL_PROLOGUE 4270 ASM_OUTPUT_POOL_PROLOGUE (asm_out_file, fnname, fndecl, pool->offset); 4271 #endif 4272 4273 output_constant_pool_contents (pool); 4274 4275 #ifdef ASM_OUTPUT_POOL_EPILOGUE 4276 ASM_OUTPUT_POOL_EPILOGUE (asm_out_file, fnname, fndecl, pool->offset); 4277 #endif 4278 } 4279 4280 /* Write the contents of the shared constant pool. */ 4281 4282 void 4283 output_shared_constant_pool (void) 4284 { 4285 output_constant_pool_contents (shared_constant_pool); 4286 } 4287 4288 /* Determine what kind of relocations EXP may need. */ 4289 4290 int 4291 compute_reloc_for_constant (tree exp) 4292 { 4293 int reloc = 0, reloc2; 4294 tree tem; 4295 4296 switch (TREE_CODE (exp)) 4297 { 4298 case ADDR_EXPR: 4299 case FDESC_EXPR: 4300 /* Go inside any operations that get_inner_reference can handle and see 4301 if what's inside is a constant: no need to do anything here for 4302 addresses of variables or functions. */ 4303 for (tem = TREE_OPERAND (exp, 0); handled_component_p (tem); 4304 tem = TREE_OPERAND (tem, 0)) 4305 ; 4306 4307 if (TREE_CODE (tem) == MEM_REF 4308 && TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR) 4309 { 4310 reloc = compute_reloc_for_constant (TREE_OPERAND (tem, 0)); 4311 break; 4312 } 4313 4314 if (!targetm.binds_local_p (tem)) 4315 reloc |= 2; 4316 else 4317 reloc |= 1; 4318 break; 4319 4320 case PLUS_EXPR: 4321 case POINTER_PLUS_EXPR: 4322 reloc = compute_reloc_for_constant (TREE_OPERAND (exp, 0)); 4323 reloc |= compute_reloc_for_constant (TREE_OPERAND (exp, 1)); 4324 break; 4325 4326 case MINUS_EXPR: 4327 reloc = compute_reloc_for_constant (TREE_OPERAND (exp, 0)); 4328 reloc2 = compute_reloc_for_constant (TREE_OPERAND (exp, 1)); 4329 /* The difference of two local labels is computable at link time. */ 4330 if (reloc == 1 && reloc2 == 1) 4331 reloc = 0; 4332 else 4333 reloc |= reloc2; 4334 break; 4335 4336 CASE_CONVERT: 4337 case VIEW_CONVERT_EXPR: 4338 reloc = compute_reloc_for_constant (TREE_OPERAND (exp, 0)); 4339 break; 4340 4341 case CONSTRUCTOR: 4342 { 4343 unsigned HOST_WIDE_INT idx; 4344 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, tem) 4345 if (tem != 0) 4346 reloc |= compute_reloc_for_constant (tem); 4347 } 4348 break; 4349 4350 default: 4351 break; 4352 } 4353 return reloc; 4354 } 4355 4356 /* Find all the constants whose addresses are referenced inside of EXP, 4357 and make sure assembler code with a label has been output for each one. 4358 Indicate whether an ADDR_EXPR has been encountered. */ 4359 4360 static void 4361 output_addressed_constants (tree exp) 4362 { 4363 tree tem; 4364 4365 switch (TREE_CODE (exp)) 4366 { 4367 case ADDR_EXPR: 4368 case FDESC_EXPR: 4369 /* Go inside any operations that get_inner_reference can handle and see 4370 if what's inside is a constant: no need to do anything here for 4371 addresses of variables or functions. */ 4372 for (tem = TREE_OPERAND (exp, 0); handled_component_p (tem); 4373 tem = TREE_OPERAND (tem, 0)) 4374 ; 4375 4376 /* If we have an initialized CONST_DECL, retrieve the initializer. */ 4377 if (TREE_CODE (tem) == CONST_DECL && DECL_INITIAL (tem)) 4378 tem = DECL_INITIAL (tem); 4379 4380 if (CONSTANT_CLASS_P (tem) || TREE_CODE (tem) == CONSTRUCTOR) 4381 output_constant_def (tem, 0); 4382 4383 if (TREE_CODE (tem) == MEM_REF) 4384 output_addressed_constants (TREE_OPERAND (tem, 0)); 4385 break; 4386 4387 case PLUS_EXPR: 4388 case POINTER_PLUS_EXPR: 4389 case MINUS_EXPR: 4390 output_addressed_constants (TREE_OPERAND (exp, 1)); 4391 gcc_fallthrough (); 4392 4393 CASE_CONVERT: 4394 case VIEW_CONVERT_EXPR: 4395 output_addressed_constants (TREE_OPERAND (exp, 0)); 4396 break; 4397 4398 case CONSTRUCTOR: 4399 { 4400 unsigned HOST_WIDE_INT idx; 4401 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, tem) 4402 if (tem != 0) 4403 output_addressed_constants (tem); 4404 } 4405 break; 4406 4407 default: 4408 break; 4409 } 4410 } 4411 4412 /* Whether a constructor CTOR is a valid static constant initializer if all 4413 its elements are. This used to be internal to initializer_constant_valid_p 4414 and has been exposed to let other functions like categorize_ctor_elements 4415 evaluate the property while walking a constructor for other purposes. */ 4416 4417 bool 4418 constructor_static_from_elts_p (const_tree ctor) 4419 { 4420 return (TREE_CONSTANT (ctor) 4421 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE 4422 || TREE_CODE (TREE_TYPE (ctor)) == RECORD_TYPE 4423 || TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)); 4424 } 4425 4426 static tree initializer_constant_valid_p_1 (tree value, tree endtype, 4427 tree *cache); 4428 4429 /* A subroutine of initializer_constant_valid_p. VALUE is a MINUS_EXPR, 4430 PLUS_EXPR or POINTER_PLUS_EXPR. This looks for cases of VALUE 4431 which are valid when ENDTYPE is an integer of any size; in 4432 particular, this does not accept a pointer minus a constant. This 4433 returns null_pointer_node if the VALUE is an absolute constant 4434 which can be used to initialize a static variable. Otherwise it 4435 returns NULL. */ 4436 4437 static tree 4438 narrowing_initializer_constant_valid_p (tree value, tree endtype, tree *cache) 4439 { 4440 tree op0, op1; 4441 4442 if (!INTEGRAL_TYPE_P (endtype)) 4443 return NULL_TREE; 4444 4445 op0 = TREE_OPERAND (value, 0); 4446 op1 = TREE_OPERAND (value, 1); 4447 4448 /* Like STRIP_NOPS except allow the operand mode to widen. This 4449 works around a feature of fold that simplifies (int)(p1 - p2) to 4450 ((int)p1 - (int)p2) under the theory that the narrower operation 4451 is cheaper. */ 4452 4453 while (CONVERT_EXPR_P (op0) 4454 || TREE_CODE (op0) == NON_LVALUE_EXPR) 4455 { 4456 tree inner = TREE_OPERAND (op0, 0); 4457 if (inner == error_mark_node 4458 || ! INTEGRAL_TYPE_P (TREE_TYPE (op0)) 4459 || ! SCALAR_INT_MODE_P (TYPE_MODE (TREE_TYPE (op0))) 4460 || ! INTEGRAL_TYPE_P (TREE_TYPE (inner)) 4461 || ! SCALAR_INT_MODE_P (TYPE_MODE (TREE_TYPE (inner))) 4462 || (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (op0))) 4463 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (inner))))) 4464 break; 4465 op0 = inner; 4466 } 4467 4468 while (CONVERT_EXPR_P (op1) 4469 || TREE_CODE (op1) == NON_LVALUE_EXPR) 4470 { 4471 tree inner = TREE_OPERAND (op1, 0); 4472 if (inner == error_mark_node 4473 || ! INTEGRAL_TYPE_P (TREE_TYPE (op1)) 4474 || ! SCALAR_INT_MODE_P (TYPE_MODE (TREE_TYPE (op1))) 4475 || ! INTEGRAL_TYPE_P (TREE_TYPE (inner)) 4476 || ! SCALAR_INT_MODE_P (TYPE_MODE (TREE_TYPE (inner))) 4477 || (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (op1))) 4478 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (inner))))) 4479 break; 4480 op1 = inner; 4481 } 4482 4483 op0 = initializer_constant_valid_p_1 (op0, endtype, cache); 4484 if (!op0) 4485 return NULL_TREE; 4486 4487 op1 = initializer_constant_valid_p_1 (op1, endtype, 4488 cache ? cache + 2 : NULL); 4489 /* Both initializers must be known. */ 4490 if (op1) 4491 { 4492 if (op0 == op1 4493 && (op0 == null_pointer_node 4494 || TREE_CODE (value) == MINUS_EXPR)) 4495 return null_pointer_node; 4496 4497 /* Support differences between labels. */ 4498 if (TREE_CODE (op0) == LABEL_DECL 4499 && TREE_CODE (op1) == LABEL_DECL) 4500 return null_pointer_node; 4501 4502 if (TREE_CODE (op0) == STRING_CST 4503 && TREE_CODE (op1) == STRING_CST 4504 && operand_equal_p (op0, op1, 1)) 4505 return null_pointer_node; 4506 } 4507 4508 return NULL_TREE; 4509 } 4510 4511 /* Helper function of initializer_constant_valid_p. 4512 Return nonzero if VALUE is a valid constant-valued expression 4513 for use in initializing a static variable; one that can be an 4514 element of a "constant" initializer. 4515 4516 Return null_pointer_node if the value is absolute; 4517 if it is relocatable, return the variable that determines the relocation. 4518 We assume that VALUE has been folded as much as possible; 4519 therefore, we do not need to check for such things as 4520 arithmetic-combinations of integers. 4521 4522 Use CACHE (pointer to 2 tree values) for caching if non-NULL. */ 4523 4524 static tree 4525 initializer_constant_valid_p_1 (tree value, tree endtype, tree *cache) 4526 { 4527 tree ret; 4528 4529 switch (TREE_CODE (value)) 4530 { 4531 case CONSTRUCTOR: 4532 if (constructor_static_from_elts_p (value)) 4533 { 4534 unsigned HOST_WIDE_INT idx; 4535 tree elt; 4536 bool absolute = true; 4537 4538 if (cache && cache[0] == value) 4539 return cache[1]; 4540 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (value), idx, elt) 4541 { 4542 tree reloc; 4543 reloc = initializer_constant_valid_p_1 (elt, TREE_TYPE (elt), 4544 NULL); 4545 if (!reloc 4546 /* An absolute value is required with reverse SSO. */ 4547 || (reloc != null_pointer_node 4548 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (value)) 4549 && !AGGREGATE_TYPE_P (TREE_TYPE (elt)))) 4550 { 4551 if (cache) 4552 { 4553 cache[0] = value; 4554 cache[1] = NULL_TREE; 4555 } 4556 return NULL_TREE; 4557 } 4558 if (reloc != null_pointer_node) 4559 absolute = false; 4560 } 4561 /* For a non-absolute relocation, there is no single 4562 variable that can be "the variable that determines the 4563 relocation." */ 4564 if (cache) 4565 { 4566 cache[0] = value; 4567 cache[1] = absolute ? null_pointer_node : error_mark_node; 4568 } 4569 return absolute ? null_pointer_node : error_mark_node; 4570 } 4571 4572 return TREE_STATIC (value) ? null_pointer_node : NULL_TREE; 4573 4574 case INTEGER_CST: 4575 case VECTOR_CST: 4576 case REAL_CST: 4577 case FIXED_CST: 4578 case STRING_CST: 4579 case COMPLEX_CST: 4580 return null_pointer_node; 4581 4582 case ADDR_EXPR: 4583 case FDESC_EXPR: 4584 { 4585 tree op0 = staticp (TREE_OPERAND (value, 0)); 4586 if (op0) 4587 { 4588 /* "&(*a).f" is like unto pointer arithmetic. If "a" turns out 4589 to be a constant, this is old-skool offsetof-like nonsense. */ 4590 if (TREE_CODE (op0) == INDIRECT_REF 4591 && TREE_CONSTANT (TREE_OPERAND (op0, 0))) 4592 return null_pointer_node; 4593 /* Taking the address of a nested function involves a trampoline, 4594 unless we don't need or want one. */ 4595 if (TREE_CODE (op0) == FUNCTION_DECL 4596 && DECL_STATIC_CHAIN (op0) 4597 && !TREE_NO_TRAMPOLINE (value)) 4598 return NULL_TREE; 4599 /* "&{...}" requires a temporary to hold the constructed 4600 object. */ 4601 if (TREE_CODE (op0) == CONSTRUCTOR) 4602 return NULL_TREE; 4603 } 4604 return op0; 4605 } 4606 4607 case NON_LVALUE_EXPR: 4608 return initializer_constant_valid_p_1 (TREE_OPERAND (value, 0), 4609 endtype, cache); 4610 4611 case VIEW_CONVERT_EXPR: 4612 { 4613 tree src = TREE_OPERAND (value, 0); 4614 tree src_type = TREE_TYPE (src); 4615 tree dest_type = TREE_TYPE (value); 4616 4617 /* Allow view-conversions from aggregate to non-aggregate type only 4618 if the bit pattern is fully preserved afterwards; otherwise, the 4619 RTL expander won't be able to apply a subsequent transformation 4620 to the underlying constructor. */ 4621 if (AGGREGATE_TYPE_P (src_type) && !AGGREGATE_TYPE_P (dest_type)) 4622 { 4623 if (TYPE_MODE (endtype) == TYPE_MODE (dest_type)) 4624 return initializer_constant_valid_p_1 (src, endtype, cache); 4625 else 4626 return NULL_TREE; 4627 } 4628 4629 /* Allow all other kinds of view-conversion. */ 4630 return initializer_constant_valid_p_1 (src, endtype, cache); 4631 } 4632 4633 CASE_CONVERT: 4634 { 4635 tree src = TREE_OPERAND (value, 0); 4636 tree src_type = TREE_TYPE (src); 4637 tree dest_type = TREE_TYPE (value); 4638 4639 /* Allow conversions between pointer types, floating-point 4640 types, and offset types. */ 4641 if ((POINTER_TYPE_P (dest_type) && POINTER_TYPE_P (src_type)) 4642 || (FLOAT_TYPE_P (dest_type) && FLOAT_TYPE_P (src_type)) 4643 || (TREE_CODE (dest_type) == OFFSET_TYPE 4644 && TREE_CODE (src_type) == OFFSET_TYPE)) 4645 return initializer_constant_valid_p_1 (src, endtype, cache); 4646 4647 /* Allow length-preserving conversions between integer types. */ 4648 if (INTEGRAL_TYPE_P (dest_type) && INTEGRAL_TYPE_P (src_type) 4649 && (TYPE_PRECISION (dest_type) == TYPE_PRECISION (src_type))) 4650 return initializer_constant_valid_p_1 (src, endtype, cache); 4651 4652 /* Allow conversions between other integer types only if 4653 explicit value. Don't allow sign-extension to a type larger 4654 than word and pointer, there aren't relocations that would 4655 allow to sign extend it to a wider type. */ 4656 if (INTEGRAL_TYPE_P (dest_type) 4657 && INTEGRAL_TYPE_P (src_type) 4658 && (TYPE_UNSIGNED (src_type) 4659 || TYPE_PRECISION (dest_type) <= TYPE_PRECISION (src_type) 4660 || TYPE_PRECISION (dest_type) <= BITS_PER_WORD 4661 || TYPE_PRECISION (dest_type) <= POINTER_SIZE)) 4662 { 4663 tree inner = initializer_constant_valid_p_1 (src, endtype, cache); 4664 if (inner == null_pointer_node) 4665 return null_pointer_node; 4666 break; 4667 } 4668 4669 /* Allow (int) &foo provided int is as wide as a pointer. */ 4670 if (INTEGRAL_TYPE_P (dest_type) && POINTER_TYPE_P (src_type) 4671 && (TYPE_PRECISION (dest_type) >= TYPE_PRECISION (src_type))) 4672 return initializer_constant_valid_p_1 (src, endtype, cache); 4673 4674 /* Likewise conversions from int to pointers, but also allow 4675 conversions from 0. */ 4676 if ((POINTER_TYPE_P (dest_type) 4677 || TREE_CODE (dest_type) == OFFSET_TYPE) 4678 && INTEGRAL_TYPE_P (src_type)) 4679 { 4680 if (TREE_CODE (src) == INTEGER_CST 4681 && TYPE_PRECISION (dest_type) >= TYPE_PRECISION (src_type)) 4682 return null_pointer_node; 4683 if (integer_zerop (src)) 4684 return null_pointer_node; 4685 else if (TYPE_PRECISION (dest_type) <= TYPE_PRECISION (src_type)) 4686 return initializer_constant_valid_p_1 (src, endtype, cache); 4687 } 4688 4689 /* Allow conversions to struct or union types if the value 4690 inside is okay. */ 4691 if (TREE_CODE (dest_type) == RECORD_TYPE 4692 || TREE_CODE (dest_type) == UNION_TYPE) 4693 return initializer_constant_valid_p_1 (src, endtype, cache); 4694 } 4695 break; 4696 4697 case POINTER_PLUS_EXPR: 4698 case PLUS_EXPR: 4699 /* Any valid floating-point constants will have been folded by now; 4700 with -frounding-math we hit this with addition of two constants. */ 4701 if (TREE_CODE (endtype) == REAL_TYPE) 4702 return NULL_TREE; 4703 if (cache && cache[0] == value) 4704 return cache[1]; 4705 if (! INTEGRAL_TYPE_P (endtype) 4706 || TYPE_PRECISION (endtype) >= TYPE_PRECISION (TREE_TYPE (value))) 4707 { 4708 tree ncache[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE }; 4709 tree valid0 4710 = initializer_constant_valid_p_1 (TREE_OPERAND (value, 0), 4711 endtype, ncache); 4712 tree valid1 4713 = initializer_constant_valid_p_1 (TREE_OPERAND (value, 1), 4714 endtype, ncache + 2); 4715 /* If either term is absolute, use the other term's relocation. */ 4716 if (valid0 == null_pointer_node) 4717 ret = valid1; 4718 else if (valid1 == null_pointer_node) 4719 ret = valid0; 4720 /* Support narrowing pointer differences. */ 4721 else 4722 ret = narrowing_initializer_constant_valid_p (value, endtype, 4723 ncache); 4724 } 4725 else 4726 /* Support narrowing pointer differences. */ 4727 ret = narrowing_initializer_constant_valid_p (value, endtype, NULL); 4728 if (cache) 4729 { 4730 cache[0] = value; 4731 cache[1] = ret; 4732 } 4733 return ret; 4734 4735 case POINTER_DIFF_EXPR: 4736 case MINUS_EXPR: 4737 if (TREE_CODE (endtype) == REAL_TYPE) 4738 return NULL_TREE; 4739 if (cache && cache[0] == value) 4740 return cache[1]; 4741 if (! INTEGRAL_TYPE_P (endtype) 4742 || TYPE_PRECISION (endtype) >= TYPE_PRECISION (TREE_TYPE (value))) 4743 { 4744 tree ncache[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE }; 4745 tree valid0 4746 = initializer_constant_valid_p_1 (TREE_OPERAND (value, 0), 4747 endtype, ncache); 4748 tree valid1 4749 = initializer_constant_valid_p_1 (TREE_OPERAND (value, 1), 4750 endtype, ncache + 2); 4751 /* Win if second argument is absolute. */ 4752 if (valid1 == null_pointer_node) 4753 ret = valid0; 4754 /* Win if both arguments have the same relocation. 4755 Then the value is absolute. */ 4756 else if (valid0 == valid1 && valid0 != 0) 4757 ret = null_pointer_node; 4758 /* Since GCC guarantees that string constants are unique in the 4759 generated code, a subtraction between two copies of the same 4760 constant string is absolute. */ 4761 else if (valid0 && TREE_CODE (valid0) == STRING_CST 4762 && valid1 && TREE_CODE (valid1) == STRING_CST 4763 && operand_equal_p (valid0, valid1, 1)) 4764 ret = null_pointer_node; 4765 /* Support narrowing differences. */ 4766 else 4767 ret = narrowing_initializer_constant_valid_p (value, endtype, 4768 ncache); 4769 } 4770 else 4771 /* Support narrowing differences. */ 4772 ret = narrowing_initializer_constant_valid_p (value, endtype, NULL); 4773 if (cache) 4774 { 4775 cache[0] = value; 4776 cache[1] = ret; 4777 } 4778 return ret; 4779 4780 default: 4781 break; 4782 } 4783 4784 return NULL_TREE; 4785 } 4786 4787 /* Return nonzero if VALUE is a valid constant-valued expression 4788 for use in initializing a static variable; one that can be an 4789 element of a "constant" initializer. 4790 4791 Return null_pointer_node if the value is absolute; 4792 if it is relocatable, return the variable that determines the relocation. 4793 We assume that VALUE has been folded as much as possible; 4794 therefore, we do not need to check for such things as 4795 arithmetic-combinations of integers. */ 4796 tree 4797 initializer_constant_valid_p (tree value, tree endtype, bool reverse) 4798 { 4799 tree reloc = initializer_constant_valid_p_1 (value, endtype, NULL); 4800 4801 /* An absolute value is required with reverse storage order. */ 4802 if (reloc 4803 && reloc != null_pointer_node 4804 && reverse 4805 && !AGGREGATE_TYPE_P (endtype) 4806 && !VECTOR_TYPE_P (endtype)) 4807 reloc = NULL_TREE; 4808 4809 return reloc; 4810 } 4811 4812 /* Return true if VALUE is a valid constant-valued expression 4813 for use in initializing a static bit-field; one that can be 4814 an element of a "constant" initializer. */ 4815 4816 bool 4817 initializer_constant_valid_for_bitfield_p (tree value) 4818 { 4819 /* For bitfields we support integer constants or possibly nested aggregates 4820 of such. */ 4821 switch (TREE_CODE (value)) 4822 { 4823 case CONSTRUCTOR: 4824 { 4825 unsigned HOST_WIDE_INT idx; 4826 tree elt; 4827 4828 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (value), idx, elt) 4829 if (!initializer_constant_valid_for_bitfield_p (elt)) 4830 return false; 4831 return true; 4832 } 4833 4834 case INTEGER_CST: 4835 case REAL_CST: 4836 return true; 4837 4838 case VIEW_CONVERT_EXPR: 4839 case NON_LVALUE_EXPR: 4840 return 4841 initializer_constant_valid_for_bitfield_p (TREE_OPERAND (value, 0)); 4842 4843 default: 4844 break; 4845 } 4846 4847 return false; 4848 } 4849 4850 /* Check if a STRING_CST fits into the field. 4851 Tolerate only the case when the NUL termination 4852 does not fit into the field. */ 4853 4854 static bool 4855 check_string_literal (tree string, unsigned HOST_WIDE_INT size) 4856 { 4857 tree type = TREE_TYPE (string); 4858 tree eltype = TREE_TYPE (type); 4859 unsigned HOST_WIDE_INT elts = tree_to_uhwi (TYPE_SIZE_UNIT (eltype)); 4860 unsigned HOST_WIDE_INT mem_size = tree_to_uhwi (TYPE_SIZE_UNIT (type)); 4861 int len = TREE_STRING_LENGTH (string); 4862 4863 if (elts != 1 && elts != 2 && elts != 4) 4864 return false; 4865 if (len < 0 || len % elts != 0) 4866 return false; 4867 if (size < (unsigned)len) 4868 return false; 4869 if (mem_size != size) 4870 return false; 4871 return true; 4872 } 4873 4874 /* output_constructor outer state of relevance in recursive calls, typically 4875 for nested aggregate bitfields. */ 4876 4877 struct oc_outer_state { 4878 unsigned int bit_offset; /* current position in ... */ 4879 int byte; /* ... the outer byte buffer. */ 4880 }; 4881 4882 static unsigned HOST_WIDE_INT 4883 output_constructor (tree, unsigned HOST_WIDE_INT, unsigned int, bool, 4884 oc_outer_state *); 4885 4886 /* Output assembler code for constant EXP, with no label. 4887 This includes the pseudo-op such as ".int" or ".byte", and a newline. 4888 Assumes output_addressed_constants has been done on EXP already. 4889 4890 Generate at least SIZE bytes of assembler data, padding at the end 4891 with zeros if necessary. SIZE must always be specified. The returned 4892 value is the actual number of bytes of assembler data generated, which 4893 may be bigger than SIZE if the object contains a variable length field. 4894 4895 SIZE is important for structure constructors, 4896 since trailing members may have been omitted from the constructor. 4897 It is also important for initialization of arrays from string constants 4898 since the full length of the string constant might not be wanted. 4899 It is also needed for initialization of unions, where the initializer's 4900 type is just one member, and that may not be as long as the union. 4901 4902 There a case in which we would fail to output exactly SIZE bytes: 4903 for a structure constructor that wants to produce more than SIZE bytes. 4904 But such constructors will never be generated for any possible input. 4905 4906 ALIGN is the alignment of the data in bits. 4907 4908 If REVERSE is true, EXP is output in reverse storage order. */ 4909 4910 static unsigned HOST_WIDE_INT 4911 output_constant (tree exp, unsigned HOST_WIDE_INT size, unsigned int align, 4912 bool reverse, bool merge_strings) 4913 { 4914 enum tree_code code; 4915 unsigned HOST_WIDE_INT thissize; 4916 rtx cst; 4917 4918 if (size == 0 || flag_syntax_only) 4919 return size; 4920 4921 /* See if we're trying to initialize a pointer in a non-default mode 4922 to the address of some declaration somewhere. If the target says 4923 the mode is valid for pointers, assume the target has a way of 4924 resolving it. */ 4925 if (TREE_CODE (exp) == NOP_EXPR 4926 && POINTER_TYPE_P (TREE_TYPE (exp)) 4927 && targetm.addr_space.valid_pointer_mode 4928 (SCALAR_INT_TYPE_MODE (TREE_TYPE (exp)), 4929 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp))))) 4930 { 4931 tree saved_type = TREE_TYPE (exp); 4932 4933 /* Peel off any intermediate conversions-to-pointer for valid 4934 pointer modes. */ 4935 while (TREE_CODE (exp) == NOP_EXPR 4936 && POINTER_TYPE_P (TREE_TYPE (exp)) 4937 && targetm.addr_space.valid_pointer_mode 4938 (SCALAR_INT_TYPE_MODE (TREE_TYPE (exp)), 4939 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp))))) 4940 exp = TREE_OPERAND (exp, 0); 4941 4942 /* If what we're left with is the address of something, we can 4943 convert the address to the final type and output it that 4944 way. */ 4945 if (TREE_CODE (exp) == ADDR_EXPR) 4946 exp = build1 (ADDR_EXPR, saved_type, TREE_OPERAND (exp, 0)); 4947 /* Likewise for constant ints. */ 4948 else if (TREE_CODE (exp) == INTEGER_CST) 4949 exp = fold_convert (saved_type, exp); 4950 4951 } 4952 4953 /* Eliminate any conversions since we'll be outputting the underlying 4954 constant. */ 4955 while (CONVERT_EXPR_P (exp) 4956 || TREE_CODE (exp) == NON_LVALUE_EXPR 4957 || TREE_CODE (exp) == VIEW_CONVERT_EXPR) 4958 { 4959 HOST_WIDE_INT type_size = int_size_in_bytes (TREE_TYPE (exp)); 4960 HOST_WIDE_INT op_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))); 4961 4962 /* Make sure eliminating the conversion is really a no-op, except with 4963 VIEW_CONVERT_EXPRs to allow for wild Ada unchecked conversions and 4964 union types to allow for Ada unchecked unions. */ 4965 if (type_size > op_size 4966 && TREE_CODE (exp) != VIEW_CONVERT_EXPR 4967 && TREE_CODE (TREE_TYPE (exp)) != UNION_TYPE) 4968 /* Keep the conversion. */ 4969 break; 4970 else 4971 exp = TREE_OPERAND (exp, 0); 4972 } 4973 4974 code = TREE_CODE (TREE_TYPE (exp)); 4975 thissize = int_size_in_bytes (TREE_TYPE (exp)); 4976 4977 /* Allow a constructor with no elements for any data type. 4978 This means to fill the space with zeros. */ 4979 if (TREE_CODE (exp) == CONSTRUCTOR 4980 && vec_safe_is_empty (CONSTRUCTOR_ELTS (exp))) 4981 { 4982 assemble_zeros (size); 4983 return size; 4984 } 4985 4986 if (TREE_CODE (exp) == FDESC_EXPR) 4987 { 4988 #ifdef ASM_OUTPUT_FDESC 4989 HOST_WIDE_INT part = tree_to_shwi (TREE_OPERAND (exp, 1)); 4990 tree decl = TREE_OPERAND (exp, 0); 4991 ASM_OUTPUT_FDESC (asm_out_file, decl, part); 4992 #else 4993 gcc_unreachable (); 4994 #endif 4995 return size; 4996 } 4997 4998 /* Now output the underlying data. If we've handling the padding, return. 4999 Otherwise, break and ensure SIZE is the size written. */ 5000 switch (code) 5001 { 5002 case BOOLEAN_TYPE: 5003 case INTEGER_TYPE: 5004 case ENUMERAL_TYPE: 5005 case POINTER_TYPE: 5006 case REFERENCE_TYPE: 5007 case OFFSET_TYPE: 5008 case FIXED_POINT_TYPE: 5009 case NULLPTR_TYPE: 5010 cst = expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_INITIALIZER); 5011 if (reverse) 5012 cst = flip_storage_order (TYPE_MODE (TREE_TYPE (exp)), cst); 5013 if (!assemble_integer (cst, MIN (size, thissize), align, 0)) 5014 error ("initializer for integer/fixed-point value is too complicated"); 5015 break; 5016 5017 case REAL_TYPE: 5018 if (TREE_CODE (exp) != REAL_CST) 5019 error ("initializer for floating value is not a floating constant"); 5020 else 5021 assemble_real (TREE_REAL_CST (exp), 5022 SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (exp)), 5023 align, reverse); 5024 break; 5025 5026 case COMPLEX_TYPE: 5027 output_constant (TREE_REALPART (exp), thissize / 2, align, 5028 reverse, false); 5029 output_constant (TREE_IMAGPART (exp), thissize / 2, 5030 min_align (align, BITS_PER_UNIT * (thissize / 2)), 5031 reverse, false); 5032 break; 5033 5034 case ARRAY_TYPE: 5035 case VECTOR_TYPE: 5036 switch (TREE_CODE (exp)) 5037 { 5038 case CONSTRUCTOR: 5039 return output_constructor (exp, size, align, reverse, NULL); 5040 case STRING_CST: 5041 thissize = (unsigned HOST_WIDE_INT)TREE_STRING_LENGTH (exp); 5042 if (merge_strings 5043 && (thissize == 0 5044 || TREE_STRING_POINTER (exp) [thissize - 1] != '\0')) 5045 thissize++; 5046 gcc_checking_assert (check_string_literal (exp, size)); 5047 assemble_string (TREE_STRING_POINTER (exp), thissize); 5048 break; 5049 case VECTOR_CST: 5050 { 5051 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (TREE_TYPE (exp))); 5052 unsigned int nalign = MIN (align, GET_MODE_ALIGNMENT (inner)); 5053 int elt_size = GET_MODE_SIZE (inner); 5054 output_constant (VECTOR_CST_ELT (exp, 0), elt_size, align, 5055 reverse, false); 5056 thissize = elt_size; 5057 /* Static constants must have a fixed size. */ 5058 unsigned int nunits = VECTOR_CST_NELTS (exp).to_constant (); 5059 for (unsigned int i = 1; i < nunits; i++) 5060 { 5061 output_constant (VECTOR_CST_ELT (exp, i), elt_size, nalign, 5062 reverse, false); 5063 thissize += elt_size; 5064 } 5065 break; 5066 } 5067 default: 5068 gcc_unreachable (); 5069 } 5070 break; 5071 5072 case RECORD_TYPE: 5073 case UNION_TYPE: 5074 gcc_assert (TREE_CODE (exp) == CONSTRUCTOR); 5075 return output_constructor (exp, size, align, reverse, NULL); 5076 5077 case ERROR_MARK: 5078 return 0; 5079 5080 default: 5081 gcc_unreachable (); 5082 } 5083 5084 if (size > thissize) 5085 assemble_zeros (size - thissize); 5086 5087 return size; 5088 } 5089 5090 /* Subroutine of output_constructor, used for computing the size of 5091 arrays of unspecified length. VAL must be a CONSTRUCTOR of an array 5092 type with an unspecified upper bound. */ 5093 5094 static unsigned HOST_WIDE_INT 5095 array_size_for_constructor (tree val) 5096 { 5097 tree max_index; 5098 unsigned HOST_WIDE_INT cnt; 5099 tree index, value, tmp; 5100 offset_int i; 5101 5102 /* This code used to attempt to handle string constants that are not 5103 arrays of single-bytes, but nothing else does, so there's no point in 5104 doing it here. */ 5105 if (TREE_CODE (val) == STRING_CST) 5106 return TREE_STRING_LENGTH (val); 5107 5108 max_index = NULL_TREE; 5109 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (val), cnt, index, value) 5110 { 5111 if (TREE_CODE (index) == RANGE_EXPR) 5112 index = TREE_OPERAND (index, 1); 5113 if (max_index == NULL_TREE || tree_int_cst_lt (max_index, index)) 5114 max_index = index; 5115 } 5116 5117 if (max_index == NULL_TREE) 5118 return 0; 5119 5120 /* Compute the total number of array elements. */ 5121 tmp = TYPE_MIN_VALUE (TYPE_DOMAIN (TREE_TYPE (val))); 5122 i = wi::to_offset (max_index) - wi::to_offset (tmp) + 1; 5123 5124 /* Multiply by the array element unit size to find number of bytes. */ 5125 i *= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (val)))); 5126 5127 gcc_assert (wi::fits_uhwi_p (i)); 5128 return i.to_uhwi (); 5129 } 5130 5131 /* Other datastructures + helpers for output_constructor. */ 5132 5133 /* output_constructor local state to support interaction with helpers. */ 5134 5135 struct oc_local_state { 5136 5137 /* Received arguments. */ 5138 tree exp; /* Constructor expression. */ 5139 tree type; /* Type of constructor expression. */ 5140 unsigned HOST_WIDE_INT size; /* # bytes to output - pad if necessary. */ 5141 unsigned int align; /* Known initial alignment. */ 5142 tree min_index; /* Lower bound if specified for an array. */ 5143 5144 /* Output processing state. */ 5145 HOST_WIDE_INT total_bytes; /* # bytes output so far / current position. */ 5146 int byte; /* Part of a bitfield byte yet to be output. */ 5147 int last_relative_index; /* Implicit or explicit index of the last 5148 array element output within a bitfield. */ 5149 bool byte_buffer_in_use; /* Whether BYTE is in use. */ 5150 bool reverse; /* Whether reverse storage order is in use. */ 5151 5152 /* Current element. */ 5153 tree field; /* Current field decl in a record. */ 5154 tree val; /* Current element value. */ 5155 tree index; /* Current element index. */ 5156 5157 }; 5158 5159 /* Helper for output_constructor. From the current LOCAL state, output a 5160 RANGE_EXPR element. */ 5161 5162 static void 5163 output_constructor_array_range (oc_local_state *local) 5164 { 5165 /* Perform the index calculation in modulo arithmetic but 5166 sign-extend the result because Ada has negative DECL_FIELD_OFFSETs 5167 but we are using an unsigned sizetype. */ 5168 unsigned prec = TYPE_PRECISION (sizetype); 5169 offset_int idx = wi::sext (wi::to_offset (TREE_OPERAND (local->index, 0)) 5170 - wi::to_offset (local->min_index), prec); 5171 tree valtype = TREE_TYPE (local->val); 5172 HOST_WIDE_INT fieldpos 5173 = (idx * wi::to_offset (TYPE_SIZE_UNIT (valtype))).to_short_addr (); 5174 5175 /* Advance to offset of this element. */ 5176 if (fieldpos > local->total_bytes) 5177 { 5178 assemble_zeros (fieldpos - local->total_bytes); 5179 local->total_bytes = fieldpos; 5180 } 5181 else 5182 /* Must not go backwards. */ 5183 gcc_assert (fieldpos == local->total_bytes); 5184 5185 unsigned HOST_WIDE_INT fieldsize 5186 = int_size_in_bytes (TREE_TYPE (local->type)); 5187 5188 HOST_WIDE_INT lo_index 5189 = tree_to_shwi (TREE_OPERAND (local->index, 0)); 5190 HOST_WIDE_INT hi_index 5191 = tree_to_shwi (TREE_OPERAND (local->index, 1)); 5192 HOST_WIDE_INT index; 5193 5194 unsigned int align2 5195 = min_align (local->align, fieldsize * BITS_PER_UNIT); 5196 5197 for (index = lo_index; index <= hi_index; index++) 5198 { 5199 /* Output the element's initial value. */ 5200 if (local->val == NULL_TREE) 5201 assemble_zeros (fieldsize); 5202 else 5203 fieldsize = output_constant (local->val, fieldsize, align2, 5204 local->reverse, false); 5205 5206 /* Count its size. */ 5207 local->total_bytes += fieldsize; 5208 } 5209 } 5210 5211 /* Helper for output_constructor. From the current LOCAL state, output a 5212 field element that is not true bitfield or part of an outer one. */ 5213 5214 static void 5215 output_constructor_regular_field (oc_local_state *local) 5216 { 5217 /* Field size and position. Since this structure is static, we know the 5218 positions are constant. */ 5219 unsigned HOST_WIDE_INT fieldsize; 5220 HOST_WIDE_INT fieldpos; 5221 5222 unsigned int align2; 5223 5224 /* Output any buffered-up bit-fields preceding this element. */ 5225 if (local->byte_buffer_in_use) 5226 { 5227 assemble_integer (GEN_INT (local->byte), 1, BITS_PER_UNIT, 1); 5228 local->total_bytes++; 5229 local->byte_buffer_in_use = false; 5230 } 5231 5232 if (local->index != NULL_TREE) 5233 { 5234 /* Perform the index calculation in modulo arithmetic but 5235 sign-extend the result because Ada has negative DECL_FIELD_OFFSETs 5236 but we are using an unsigned sizetype. */ 5237 unsigned prec = TYPE_PRECISION (sizetype); 5238 offset_int idx = wi::sext (wi::to_offset (local->index) 5239 - wi::to_offset (local->min_index), prec); 5240 fieldpos = (idx * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (local->val)))) 5241 .to_short_addr (); 5242 } 5243 else if (local->field != NULL_TREE) 5244 fieldpos = int_byte_position (local->field); 5245 else 5246 fieldpos = 0; 5247 5248 /* Advance to offset of this element. 5249 Note no alignment needed in an array, since that is guaranteed 5250 if each element has the proper size. */ 5251 if (local->field != NULL_TREE || local->index != NULL_TREE) 5252 { 5253 if (fieldpos > local->total_bytes) 5254 { 5255 assemble_zeros (fieldpos - local->total_bytes); 5256 local->total_bytes = fieldpos; 5257 } 5258 else 5259 /* Must not go backwards. */ 5260 gcc_assert (fieldpos == local->total_bytes); 5261 } 5262 5263 /* Find the alignment of this element. */ 5264 align2 = min_align (local->align, BITS_PER_UNIT * fieldpos); 5265 5266 /* Determine size this element should occupy. */ 5267 if (local->field) 5268 { 5269 fieldsize = 0; 5270 5271 /* If this is an array with an unspecified upper bound, 5272 the initializer determines the size. */ 5273 /* ??? This ought to only checked if DECL_SIZE_UNIT is NULL, 5274 but we cannot do this until the deprecated support for 5275 initializing zero-length array members is removed. */ 5276 if (TREE_CODE (TREE_TYPE (local->field)) == ARRAY_TYPE 5277 && (!TYPE_DOMAIN (TREE_TYPE (local->field)) 5278 || !TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (local->field))))) 5279 { 5280 unsigned HOST_WIDE_INT fldsize 5281 = array_size_for_constructor (local->val); 5282 fieldsize = int_size_in_bytes (TREE_TYPE (local->val)); 5283 /* In most cases fieldsize == fldsize as the size of the initializer 5284 determines how many elements the flexible array member has. For 5285 C++ fldsize can be smaller though, if the last or several last or 5286 all initializers of the flexible array member have side-effects 5287 and the FE splits them into dynamic initialization. */ 5288 gcc_checking_assert (fieldsize >= fldsize); 5289 /* Given a non-empty initialization, this field had better 5290 be last. Given a flexible array member, the next field 5291 on the chain is a TYPE_DECL of the enclosing struct. */ 5292 const_tree next = DECL_CHAIN (local->field); 5293 gcc_assert (!fieldsize || !next || TREE_CODE (next) != FIELD_DECL); 5294 } 5295 else 5296 fieldsize = tree_to_uhwi (DECL_SIZE_UNIT (local->field)); 5297 } 5298 else 5299 fieldsize = int_size_in_bytes (TREE_TYPE (local->type)); 5300 5301 /* Output the element's initial value. */ 5302 if (local->val == NULL_TREE) 5303 assemble_zeros (fieldsize); 5304 else 5305 fieldsize = output_constant (local->val, fieldsize, align2, 5306 local->reverse, false); 5307 5308 /* Count its size. */ 5309 local->total_bytes += fieldsize; 5310 } 5311 5312 /* Helper for output_constructor. From the LOCAL state, output an element 5313 that is a true bitfield or part of an outer one. BIT_OFFSET is the offset 5314 from the start of a possibly ongoing outer byte buffer. */ 5315 5316 static void 5317 output_constructor_bitfield (oc_local_state *local, unsigned int bit_offset) 5318 { 5319 /* Bit size of this element. */ 5320 HOST_WIDE_INT ebitsize 5321 = (local->field 5322 ? tree_to_uhwi (DECL_SIZE (local->field)) 5323 : tree_to_uhwi (TYPE_SIZE (TREE_TYPE (local->type)))); 5324 5325 /* Relative index of this element if this is an array component. */ 5326 HOST_WIDE_INT relative_index 5327 = (!local->field 5328 ? (local->index 5329 ? (tree_to_shwi (local->index) 5330 - tree_to_shwi (local->min_index)) 5331 : local->last_relative_index + 1) 5332 : 0); 5333 5334 /* Bit position of this element from the start of the containing 5335 constructor. */ 5336 HOST_WIDE_INT constructor_relative_ebitpos 5337 = (local->field 5338 ? int_bit_position (local->field) 5339 : ebitsize * relative_index); 5340 5341 /* Bit position of this element from the start of a possibly ongoing 5342 outer byte buffer. */ 5343 HOST_WIDE_INT byte_relative_ebitpos 5344 = bit_offset + constructor_relative_ebitpos; 5345 5346 /* From the start of a possibly ongoing outer byte buffer, offsets to 5347 the first bit of this element and to the first bit past the end of 5348 this element. */ 5349 HOST_WIDE_INT next_offset = byte_relative_ebitpos; 5350 HOST_WIDE_INT end_offset = byte_relative_ebitpos + ebitsize; 5351 5352 local->last_relative_index = relative_index; 5353 5354 if (local->val == NULL_TREE) 5355 local->val = integer_zero_node; 5356 5357 while (TREE_CODE (local->val) == VIEW_CONVERT_EXPR 5358 || TREE_CODE (local->val) == NON_LVALUE_EXPR) 5359 local->val = TREE_OPERAND (local->val, 0); 5360 5361 if (TREE_CODE (local->val) != INTEGER_CST 5362 && TREE_CODE (local->val) != CONSTRUCTOR) 5363 { 5364 error ("invalid initial value for member %qE", DECL_NAME (local->field)); 5365 return; 5366 } 5367 5368 /* If this field does not start in this (or next) byte, skip some bytes. */ 5369 if (next_offset / BITS_PER_UNIT != local->total_bytes) 5370 { 5371 /* Output remnant of any bit field in previous bytes. */ 5372 if (local->byte_buffer_in_use) 5373 { 5374 assemble_integer (GEN_INT (local->byte), 1, BITS_PER_UNIT, 1); 5375 local->total_bytes++; 5376 local->byte_buffer_in_use = false; 5377 } 5378 5379 /* If still not at proper byte, advance to there. */ 5380 if (next_offset / BITS_PER_UNIT != local->total_bytes) 5381 { 5382 gcc_assert (next_offset / BITS_PER_UNIT >= local->total_bytes); 5383 assemble_zeros (next_offset / BITS_PER_UNIT - local->total_bytes); 5384 local->total_bytes = next_offset / BITS_PER_UNIT; 5385 } 5386 } 5387 5388 /* Set up the buffer if necessary. */ 5389 if (!local->byte_buffer_in_use) 5390 { 5391 local->byte = 0; 5392 if (ebitsize > 0) 5393 local->byte_buffer_in_use = true; 5394 } 5395 5396 /* If this is nested constructor, recurse passing the bit offset and the 5397 pending data, then retrieve the new pending data afterwards. */ 5398 if (TREE_CODE (local->val) == CONSTRUCTOR) 5399 { 5400 oc_outer_state temp_state; 5401 temp_state.bit_offset = next_offset % BITS_PER_UNIT; 5402 temp_state.byte = local->byte; 5403 local->total_bytes 5404 += output_constructor (local->val, 0, 0, local->reverse, &temp_state); 5405 local->byte = temp_state.byte; 5406 return; 5407 } 5408 5409 /* Otherwise, we must split the element into pieces that fall within 5410 separate bytes, and combine each byte with previous or following 5411 bit-fields. */ 5412 while (next_offset < end_offset) 5413 { 5414 int this_time; 5415 int shift; 5416 unsigned HOST_WIDE_INT value; 5417 HOST_WIDE_INT next_byte = next_offset / BITS_PER_UNIT; 5418 HOST_WIDE_INT next_bit = next_offset % BITS_PER_UNIT; 5419 5420 /* Advance from byte to byte within this element when necessary. */ 5421 while (next_byte != local->total_bytes) 5422 { 5423 assemble_integer (GEN_INT (local->byte), 1, BITS_PER_UNIT, 1); 5424 local->total_bytes++; 5425 local->byte = 0; 5426 } 5427 5428 /* Number of bits we can process at once (all part of the same byte). */ 5429 this_time = MIN (end_offset - next_offset, BITS_PER_UNIT - next_bit); 5430 if (local->reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN) 5431 { 5432 /* For big-endian data, take the most significant bits (of the 5433 bits that are significant) first and put them into bytes from 5434 the most significant end. */ 5435 shift = end_offset - next_offset - this_time; 5436 5437 /* Don't try to take a bunch of bits that cross 5438 the word boundary in the INTEGER_CST. We can 5439 only select bits from one element. */ 5440 if ((shift / HOST_BITS_PER_WIDE_INT) 5441 != ((shift + this_time - 1) / HOST_BITS_PER_WIDE_INT)) 5442 { 5443 const int end = shift + this_time - 1; 5444 shift = end & -HOST_BITS_PER_WIDE_INT; 5445 this_time = end - shift + 1; 5446 } 5447 5448 /* Now get the bits we want to insert. */ 5449 value = wi::extract_uhwi (wi::to_widest (local->val), 5450 shift, this_time); 5451 5452 /* Get the result. This works only when: 5453 1 <= this_time <= HOST_BITS_PER_WIDE_INT. */ 5454 local->byte |= value << (BITS_PER_UNIT - this_time - next_bit); 5455 } 5456 else 5457 { 5458 /* On little-endian machines, take the least significant bits of 5459 the value first and pack them starting at the least significant 5460 bits of the bytes. */ 5461 shift = next_offset - byte_relative_ebitpos; 5462 5463 /* Don't try to take a bunch of bits that cross 5464 the word boundary in the INTEGER_CST. We can 5465 only select bits from one element. */ 5466 if ((shift / HOST_BITS_PER_WIDE_INT) 5467 != ((shift + this_time - 1) / HOST_BITS_PER_WIDE_INT)) 5468 this_time 5469 = HOST_BITS_PER_WIDE_INT - (shift & (HOST_BITS_PER_WIDE_INT - 1)); 5470 5471 /* Now get the bits we want to insert. */ 5472 value = wi::extract_uhwi (wi::to_widest (local->val), 5473 shift, this_time); 5474 5475 /* Get the result. This works only when: 5476 1 <= this_time <= HOST_BITS_PER_WIDE_INT. */ 5477 local->byte |= value << next_bit; 5478 } 5479 5480 next_offset += this_time; 5481 local->byte_buffer_in_use = true; 5482 } 5483 } 5484 5485 /* Subroutine of output_constant, used for CONSTRUCTORs (aggregate constants). 5486 Generate at least SIZE bytes, padding if necessary. OUTER designates the 5487 caller output state of relevance in recursive invocations. */ 5488 5489 static unsigned HOST_WIDE_INT 5490 output_constructor (tree exp, unsigned HOST_WIDE_INT size, unsigned int align, 5491 bool reverse, oc_outer_state *outer) 5492 { 5493 unsigned HOST_WIDE_INT cnt; 5494 constructor_elt *ce; 5495 oc_local_state local; 5496 5497 /* Setup our local state to communicate with helpers. */ 5498 local.exp = exp; 5499 local.type = TREE_TYPE (exp); 5500 local.size = size; 5501 local.align = align; 5502 if (TREE_CODE (local.type) == ARRAY_TYPE && TYPE_DOMAIN (local.type)) 5503 local.min_index = TYPE_MIN_VALUE (TYPE_DOMAIN (local.type)); 5504 else 5505 local.min_index = integer_zero_node; 5506 5507 local.total_bytes = 0; 5508 local.byte_buffer_in_use = outer != NULL; 5509 local.byte = outer ? outer->byte : 0; 5510 local.last_relative_index = -1; 5511 /* The storage order is specified for every aggregate type. */ 5512 if (AGGREGATE_TYPE_P (local.type)) 5513 local.reverse = TYPE_REVERSE_STORAGE_ORDER (local.type); 5514 else 5515 local.reverse = reverse; 5516 5517 gcc_assert (HOST_BITS_PER_WIDE_INT >= BITS_PER_UNIT); 5518 5519 /* As CE goes through the elements of the constant, FIELD goes through the 5520 structure fields if the constant is a structure. If the constant is a 5521 union, we override this by getting the field from the TREE_LIST element. 5522 But the constant could also be an array. Then FIELD is zero. 5523 5524 There is always a maximum of one element in the chain LINK for unions 5525 (even if the initializer in a source program incorrectly contains 5526 more one). */ 5527 5528 if (TREE_CODE (local.type) == RECORD_TYPE) 5529 local.field = TYPE_FIELDS (local.type); 5530 else 5531 local.field = NULL_TREE; 5532 5533 for (cnt = 0; 5534 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), cnt, &ce); 5535 cnt++, local.field = local.field ? DECL_CHAIN (local.field) : 0) 5536 { 5537 local.val = ce->value; 5538 local.index = NULL_TREE; 5539 5540 /* The element in a union constructor specifies the proper field 5541 or index. */ 5542 if (RECORD_OR_UNION_TYPE_P (local.type) && ce->index != NULL_TREE) 5543 local.field = ce->index; 5544 5545 else if (TREE_CODE (local.type) == ARRAY_TYPE) 5546 local.index = ce->index; 5547 5548 if (local.field && flag_verbose_asm) 5549 fprintf (asm_out_file, "%s %s:\n", 5550 ASM_COMMENT_START, 5551 DECL_NAME (local.field) 5552 ? IDENTIFIER_POINTER (DECL_NAME (local.field)) 5553 : "<anonymous>"); 5554 5555 /* Eliminate the marker that makes a cast not be an lvalue. */ 5556 if (local.val != NULL_TREE) 5557 STRIP_NOPS (local.val); 5558 5559 /* Output the current element, using the appropriate helper ... */ 5560 5561 /* For an array slice not part of an outer bitfield. */ 5562 if (!outer 5563 && local.index != NULL_TREE 5564 && TREE_CODE (local.index) == RANGE_EXPR) 5565 output_constructor_array_range (&local); 5566 5567 /* For a field that is neither a true bitfield nor part of an outer one, 5568 known to be at least byte aligned and multiple-of-bytes long. */ 5569 else if (!outer 5570 && (local.field == NULL_TREE 5571 || !CONSTRUCTOR_BITFIELD_P (local.field))) 5572 output_constructor_regular_field (&local); 5573 5574 /* For a true bitfield or part of an outer one. Only INTEGER_CSTs are 5575 supported for scalar fields, so we may need to convert first. */ 5576 else 5577 { 5578 if (TREE_CODE (local.val) == REAL_CST) 5579 local.val 5580 = fold_unary (VIEW_CONVERT_EXPR, 5581 build_nonstandard_integer_type 5582 (TYPE_PRECISION (TREE_TYPE (local.val)), 0), 5583 local.val); 5584 output_constructor_bitfield (&local, outer ? outer->bit_offset : 0); 5585 } 5586 } 5587 5588 /* If we are not at toplevel, save the pending data for our caller. 5589 Otherwise output the pending data and padding zeros as needed. */ 5590 if (outer) 5591 outer->byte = local.byte; 5592 else 5593 { 5594 if (local.byte_buffer_in_use) 5595 { 5596 assemble_integer (GEN_INT (local.byte), 1, BITS_PER_UNIT, 1); 5597 local.total_bytes++; 5598 } 5599 5600 if ((unsigned HOST_WIDE_INT)local.total_bytes < local.size) 5601 { 5602 assemble_zeros (local.size - local.total_bytes); 5603 local.total_bytes = local.size; 5604 } 5605 } 5606 5607 return local.total_bytes; 5608 } 5609 5610 /* Mark DECL as weak. */ 5611 5612 static void 5613 mark_weak (tree decl) 5614 { 5615 if (DECL_WEAK (decl)) 5616 return; 5617 5618 struct symtab_node *n = symtab_node::get (decl); 5619 if (n && n->refuse_visibility_changes) 5620 error ("%+qD declared weak after being used", decl); 5621 DECL_WEAK (decl) = 1; 5622 5623 if (DECL_RTL_SET_P (decl) 5624 && MEM_P (DECL_RTL (decl)) 5625 && XEXP (DECL_RTL (decl), 0) 5626 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == SYMBOL_REF) 5627 SYMBOL_REF_WEAK (XEXP (DECL_RTL (decl), 0)) = 1; 5628 } 5629 5630 /* Merge weak status between NEWDECL and OLDDECL. */ 5631 5632 void 5633 merge_weak (tree newdecl, tree olddecl) 5634 { 5635 if (DECL_WEAK (newdecl) == DECL_WEAK (olddecl)) 5636 { 5637 if (DECL_WEAK (newdecl) && TARGET_SUPPORTS_WEAK) 5638 { 5639 tree *pwd; 5640 /* We put the NEWDECL on the weak_decls list at some point 5641 and OLDDECL as well. Keep just OLDDECL on the list. */ 5642 for (pwd = &weak_decls; *pwd; pwd = &TREE_CHAIN (*pwd)) 5643 if (TREE_VALUE (*pwd) == newdecl) 5644 { 5645 *pwd = TREE_CHAIN (*pwd); 5646 break; 5647 } 5648 } 5649 return; 5650 } 5651 5652 if (DECL_WEAK (newdecl)) 5653 { 5654 tree wd; 5655 5656 /* NEWDECL is weak, but OLDDECL is not. */ 5657 5658 /* If we already output the OLDDECL, we're in trouble; we can't 5659 go back and make it weak. This should never happen in 5660 unit-at-a-time compilation. */ 5661 gcc_assert (!TREE_ASM_WRITTEN (olddecl)); 5662 5663 /* If we've already generated rtl referencing OLDDECL, we may 5664 have done so in a way that will not function properly with 5665 a weak symbol. Again in unit-at-a-time this should be 5666 impossible. */ 5667 gcc_assert (!TREE_USED (olddecl) 5668 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (olddecl))); 5669 5670 /* PR 49899: You cannot convert a static function into a weak, public function. */ 5671 if (! TREE_PUBLIC (olddecl) && TREE_PUBLIC (newdecl)) 5672 error ("weak declaration of %q+D being applied to a already " 5673 "existing, static definition", newdecl); 5674 5675 if (TARGET_SUPPORTS_WEAK) 5676 { 5677 /* We put the NEWDECL on the weak_decls list at some point. 5678 Replace it with the OLDDECL. */ 5679 for (wd = weak_decls; wd; wd = TREE_CHAIN (wd)) 5680 if (TREE_VALUE (wd) == newdecl) 5681 { 5682 TREE_VALUE (wd) = olddecl; 5683 break; 5684 } 5685 /* We may not find the entry on the list. If NEWDECL is a 5686 weak alias, then we will have already called 5687 globalize_decl to remove the entry; in that case, we do 5688 not need to do anything. */ 5689 } 5690 5691 /* Make the OLDDECL weak; it's OLDDECL that we'll be keeping. */ 5692 mark_weak (olddecl); 5693 } 5694 else 5695 /* OLDDECL was weak, but NEWDECL was not explicitly marked as 5696 weak. Just update NEWDECL to indicate that it's weak too. */ 5697 mark_weak (newdecl); 5698 } 5699 5700 /* Declare DECL to be a weak symbol. */ 5701 5702 void 5703 declare_weak (tree decl) 5704 { 5705 /* With -fsyntax-only, TREE_ASM_WRITTEN might be set on certain function 5706 decls earlier than normally, but as with -fsyntax-only nothing is really 5707 emitted, there is no harm in marking it weak later. */ 5708 gcc_assert (TREE_CODE (decl) != FUNCTION_DECL 5709 || !TREE_ASM_WRITTEN (decl) 5710 || flag_syntax_only); 5711 if (! TREE_PUBLIC (decl)) 5712 { 5713 error ("weak declaration of %q+D must be public", decl); 5714 return; 5715 } 5716 else if (!TARGET_SUPPORTS_WEAK) 5717 warning (0, "weak declaration of %q+D not supported", decl); 5718 5719 mark_weak (decl); 5720 if (!lookup_attribute ("weak", DECL_ATTRIBUTES (decl))) 5721 DECL_ATTRIBUTES (decl) 5722 = tree_cons (get_identifier ("weak"), NULL, DECL_ATTRIBUTES (decl)); 5723 } 5724 5725 static void 5726 weak_finish_1 (tree decl) 5727 { 5728 #if defined (ASM_WEAKEN_DECL) || defined (ASM_WEAKEN_LABEL) 5729 const char *const name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)); 5730 #endif 5731 5732 if (! TREE_USED (decl)) 5733 return; 5734 5735 #ifdef ASM_WEAKEN_DECL 5736 ASM_WEAKEN_DECL (asm_out_file, decl, name, NULL); 5737 #else 5738 #ifdef ASM_WEAKEN_LABEL 5739 ASM_WEAKEN_LABEL (asm_out_file, name); 5740 #else 5741 #ifdef ASM_OUTPUT_WEAK_ALIAS 5742 { 5743 static bool warn_once = 0; 5744 if (! warn_once) 5745 { 5746 warning (0, "only weak aliases are supported in this configuration"); 5747 warn_once = 1; 5748 } 5749 return; 5750 } 5751 #endif 5752 #endif 5753 #endif 5754 } 5755 5756 /* Fiven an assembly name, find the decl it is associated with. */ 5757 static tree 5758 find_decl (tree target) 5759 { 5760 symtab_node *node = symtab_node::get_for_asmname (target); 5761 if (node) 5762 return node->decl; 5763 return NULL_TREE; 5764 } 5765 5766 /* This TREE_LIST contains weakref targets. */ 5767 5768 static GTY(()) tree weakref_targets; 5769 5770 /* Emit any pending weak declarations. */ 5771 5772 void 5773 weak_finish (void) 5774 { 5775 tree t; 5776 5777 for (t = weakref_targets; t; t = TREE_CHAIN (t)) 5778 { 5779 tree alias_decl = TREE_PURPOSE (t); 5780 tree target = ultimate_transparent_alias_target (&TREE_VALUE (t)); 5781 5782 if (! TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (alias_decl)) 5783 || TREE_SYMBOL_REFERENCED (target)) 5784 /* Remove alias_decl from the weak list, but leave entries for 5785 the target alone. */ 5786 target = NULL_TREE; 5787 #ifndef ASM_OUTPUT_WEAKREF 5788 else if (! TREE_SYMBOL_REFERENCED (target)) 5789 { 5790 /* Use ASM_WEAKEN_LABEL only if ASM_WEAKEN_DECL is not 5791 defined, otherwise we and weak_finish_1 would use 5792 different macros. */ 5793 # if defined ASM_WEAKEN_LABEL && ! defined ASM_WEAKEN_DECL 5794 ASM_WEAKEN_LABEL (asm_out_file, IDENTIFIER_POINTER (target)); 5795 # else 5796 tree decl = find_decl (target); 5797 5798 if (! decl) 5799 { 5800 decl = build_decl (DECL_SOURCE_LOCATION (alias_decl), 5801 TREE_CODE (alias_decl), target, 5802 TREE_TYPE (alias_decl)); 5803 5804 DECL_EXTERNAL (decl) = 1; 5805 TREE_PUBLIC (decl) = 1; 5806 DECL_ARTIFICIAL (decl) = 1; 5807 TREE_NOTHROW (decl) = TREE_NOTHROW (alias_decl); 5808 TREE_USED (decl) = 1; 5809 } 5810 5811 weak_finish_1 (decl); 5812 # endif 5813 } 5814 #endif 5815 5816 { 5817 tree *p; 5818 tree t2; 5819 5820 /* Remove the alias and the target from the pending weak list 5821 so that we do not emit any .weak directives for the former, 5822 nor multiple .weak directives for the latter. */ 5823 for (p = &weak_decls; (t2 = *p) ; ) 5824 { 5825 if (TREE_VALUE (t2) == alias_decl 5826 || target == DECL_ASSEMBLER_NAME (TREE_VALUE (t2))) 5827 *p = TREE_CHAIN (t2); 5828 else 5829 p = &TREE_CHAIN (t2); 5830 } 5831 5832 /* Remove other weakrefs to the same target, to speed things up. */ 5833 for (p = &TREE_CHAIN (t); (t2 = *p) ; ) 5834 { 5835 if (target == ultimate_transparent_alias_target (&TREE_VALUE (t2))) 5836 *p = TREE_CHAIN (t2); 5837 else 5838 p = &TREE_CHAIN (t2); 5839 } 5840 } 5841 } 5842 5843 for (t = weak_decls; t; t = TREE_CHAIN (t)) 5844 { 5845 tree decl = TREE_VALUE (t); 5846 5847 weak_finish_1 (decl); 5848 } 5849 } 5850 5851 /* Emit the assembly bits to indicate that DECL is globally visible. */ 5852 5853 static void 5854 globalize_decl (tree decl) 5855 { 5856 5857 #if defined (ASM_WEAKEN_LABEL) || defined (ASM_WEAKEN_DECL) 5858 if (DECL_WEAK (decl)) 5859 { 5860 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0); 5861 tree *p, t; 5862 5863 #ifdef ASM_WEAKEN_DECL 5864 ASM_WEAKEN_DECL (asm_out_file, decl, name, 0); 5865 #else 5866 ASM_WEAKEN_LABEL (asm_out_file, name); 5867 #endif 5868 5869 /* Remove this function from the pending weak list so that 5870 we do not emit multiple .weak directives for it. */ 5871 for (p = &weak_decls; (t = *p) ; ) 5872 { 5873 if (DECL_ASSEMBLER_NAME (decl) == DECL_ASSEMBLER_NAME (TREE_VALUE (t))) 5874 *p = TREE_CHAIN (t); 5875 else 5876 p = &TREE_CHAIN (t); 5877 } 5878 5879 /* Remove weakrefs to the same target from the pending weakref 5880 list, for the same reason. */ 5881 for (p = &weakref_targets; (t = *p) ; ) 5882 { 5883 if (DECL_ASSEMBLER_NAME (decl) 5884 == ultimate_transparent_alias_target (&TREE_VALUE (t))) 5885 *p = TREE_CHAIN (t); 5886 else 5887 p = &TREE_CHAIN (t); 5888 } 5889 5890 return; 5891 } 5892 #endif 5893 5894 targetm.asm_out.globalize_decl_name (asm_out_file, decl); 5895 } 5896 5897 vec<alias_pair, va_gc> *alias_pairs; 5898 5899 /* Output the assembler code for a define (equate) using ASM_OUTPUT_DEF 5900 or ASM_OUTPUT_DEF_FROM_DECLS. The function defines the symbol whose 5901 tree node is DECL to have the value of the tree node TARGET. */ 5902 5903 void 5904 do_assemble_alias (tree decl, tree target) 5905 { 5906 tree id; 5907 5908 /* Emulated TLS had better not get this var. */ 5909 gcc_assert (!(!targetm.have_tls 5910 && VAR_P (decl) 5911 && DECL_THREAD_LOCAL_P (decl))); 5912 5913 if (TREE_ASM_WRITTEN (decl)) 5914 return; 5915 5916 id = DECL_ASSEMBLER_NAME (decl); 5917 ultimate_transparent_alias_target (&id); 5918 ultimate_transparent_alias_target (&target); 5919 5920 /* We must force creation of DECL_RTL for debug info generation, even though 5921 we don't use it here. */ 5922 make_decl_rtl (decl); 5923 5924 TREE_ASM_WRITTEN (decl) = 1; 5925 TREE_ASM_WRITTEN (DECL_ASSEMBLER_NAME (decl)) = 1; 5926 TREE_ASM_WRITTEN (id) = 1; 5927 5928 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))) 5929 { 5930 if (!TREE_SYMBOL_REFERENCED (target)) 5931 weakref_targets = tree_cons (decl, target, weakref_targets); 5932 5933 #ifdef ASM_OUTPUT_WEAKREF 5934 ASM_OUTPUT_WEAKREF (asm_out_file, decl, 5935 IDENTIFIER_POINTER (id), 5936 IDENTIFIER_POINTER (target)); 5937 #else 5938 if (!TARGET_SUPPORTS_WEAK) 5939 { 5940 error_at (DECL_SOURCE_LOCATION (decl), 5941 "weakref is not supported in this configuration"); 5942 return; 5943 } 5944 #endif 5945 return; 5946 } 5947 5948 #ifdef ASM_OUTPUT_DEF 5949 tree orig_decl = decl; 5950 5951 /* Make name accessible from other files, if appropriate. */ 5952 5953 if (TREE_PUBLIC (decl) || TREE_PUBLIC (orig_decl)) 5954 { 5955 globalize_decl (decl); 5956 maybe_assemble_visibility (decl); 5957 } 5958 if (TREE_CODE (decl) == FUNCTION_DECL 5959 && cgraph_node::get (decl)->ifunc_resolver) 5960 { 5961 #if defined (ASM_OUTPUT_TYPE_DIRECTIVE) 5962 if (targetm.has_ifunc_p ()) 5963 ASM_OUTPUT_TYPE_DIRECTIVE 5964 (asm_out_file, IDENTIFIER_POINTER (id), 5965 IFUNC_ASM_TYPE); 5966 else 5967 #endif 5968 error_at (DECL_SOURCE_LOCATION (decl), 5969 "%qs is not supported on this target", "ifunc"); 5970 } 5971 5972 # ifdef ASM_OUTPUT_DEF_FROM_DECLS 5973 ASM_OUTPUT_DEF_FROM_DECLS (asm_out_file, decl, target); 5974 # else 5975 ASM_OUTPUT_DEF (asm_out_file, 5976 IDENTIFIER_POINTER (id), 5977 IDENTIFIER_POINTER (target)); 5978 # endif 5979 #elif defined (ASM_OUTPUT_WEAK_ALIAS) || defined (ASM_WEAKEN_DECL) 5980 { 5981 const char *name; 5982 tree *p, t; 5983 5984 name = IDENTIFIER_POINTER (id); 5985 # ifdef ASM_WEAKEN_DECL 5986 ASM_WEAKEN_DECL (asm_out_file, decl, name, IDENTIFIER_POINTER (target)); 5987 # else 5988 ASM_OUTPUT_WEAK_ALIAS (asm_out_file, name, IDENTIFIER_POINTER (target)); 5989 # endif 5990 /* Remove this function from the pending weak list so that 5991 we do not emit multiple .weak directives for it. */ 5992 for (p = &weak_decls; (t = *p) ; ) 5993 if (DECL_ASSEMBLER_NAME (decl) == DECL_ASSEMBLER_NAME (TREE_VALUE (t)) 5994 || id == DECL_ASSEMBLER_NAME (TREE_VALUE (t))) 5995 *p = TREE_CHAIN (t); 5996 else 5997 p = &TREE_CHAIN (t); 5998 5999 /* Remove weakrefs to the same target from the pending weakref 6000 list, for the same reason. */ 6001 for (p = &weakref_targets; (t = *p) ; ) 6002 { 6003 if (id == ultimate_transparent_alias_target (&TREE_VALUE (t))) 6004 *p = TREE_CHAIN (t); 6005 else 6006 p = &TREE_CHAIN (t); 6007 } 6008 } 6009 #endif 6010 } 6011 6012 /* Output .symver directive. */ 6013 6014 void 6015 do_assemble_symver (tree decl, tree target) 6016 { 6017 tree id = DECL_ASSEMBLER_NAME (decl); 6018 ultimate_transparent_alias_target (&id); 6019 ultimate_transparent_alias_target (&target); 6020 #ifdef ASM_OUTPUT_SYMVER_DIRECTIVE 6021 ASM_OUTPUT_SYMVER_DIRECTIVE (asm_out_file, 6022 IDENTIFIER_POINTER (target), 6023 IDENTIFIER_POINTER (id)); 6024 #else 6025 error ("symver is only supported on ELF platforms"); 6026 #endif 6027 } 6028 6029 /* Emit an assembler directive to make the symbol for DECL an alias to 6030 the symbol for TARGET. */ 6031 6032 void 6033 assemble_alias (tree decl, tree target) 6034 { 6035 tree target_decl; 6036 6037 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))) 6038 { 6039 tree alias = DECL_ASSEMBLER_NAME (decl); 6040 6041 ultimate_transparent_alias_target (&target); 6042 6043 if (alias == target) 6044 error ("%qs symbol %q+D ultimately targets itself", "weakref", decl); 6045 if (TREE_PUBLIC (decl)) 6046 error ("%qs symbol %q+D must have static linkage", "weakref", decl); 6047 } 6048 else 6049 { 6050 #if !defined (ASM_OUTPUT_DEF) 6051 # if !defined(ASM_OUTPUT_WEAK_ALIAS) && !defined (ASM_WEAKEN_DECL) 6052 error_at (DECL_SOURCE_LOCATION (decl), 6053 "alias definitions not supported in this configuration"); 6054 TREE_ASM_WRITTEN (decl) = 1; 6055 return; 6056 # else 6057 if (!DECL_WEAK (decl)) 6058 { 6059 /* NB: ifunc_resolver isn't set when an error is detected. */ 6060 if (TREE_CODE (decl) == FUNCTION_DECL 6061 && lookup_attribute ("ifunc", DECL_ATTRIBUTES (decl))) 6062 error_at (DECL_SOURCE_LOCATION (decl), 6063 "%qs is not supported in this configuration", "ifunc"); 6064 else 6065 error_at (DECL_SOURCE_LOCATION (decl), 6066 "only weak aliases are supported in this configuration"); 6067 TREE_ASM_WRITTEN (decl) = 1; 6068 return; 6069 } 6070 # endif 6071 #endif 6072 } 6073 TREE_USED (decl) = 1; 6074 6075 /* Allow aliases to aliases. */ 6076 if (TREE_CODE (decl) == FUNCTION_DECL) 6077 cgraph_node::get_create (decl)->alias = true; 6078 else 6079 varpool_node::get_create (decl)->alias = true; 6080 6081 /* If the target has already been emitted, we don't have to queue the 6082 alias. This saves a tad of memory. */ 6083 if (symtab->global_info_ready) 6084 target_decl = find_decl (target); 6085 else 6086 target_decl= NULL; 6087 if ((target_decl && TREE_ASM_WRITTEN (target_decl)) 6088 || symtab->state >= EXPANSION) 6089 do_assemble_alias (decl, target); 6090 else 6091 { 6092 alias_pair p = {decl, target}; 6093 vec_safe_push (alias_pairs, p); 6094 } 6095 } 6096 6097 /* Record and output a table of translations from original function 6098 to its transaction aware clone. Note that tm_pure functions are 6099 considered to be their own clone. */ 6100 6101 struct tm_clone_hasher : ggc_cache_ptr_hash<tree_map> 6102 { 6103 static hashval_t hash (tree_map *m) { return tree_map_hash (m); } 6104 static bool equal (tree_map *a, tree_map *b) { return tree_map_eq (a, b); } 6105 6106 static int 6107 keep_cache_entry (tree_map *&e) 6108 { 6109 return ggc_marked_p (e->base.from); 6110 } 6111 }; 6112 6113 static GTY((cache)) hash_table<tm_clone_hasher> *tm_clone_hash; 6114 6115 void 6116 record_tm_clone_pair (tree o, tree n) 6117 { 6118 struct tree_map **slot, *h; 6119 6120 if (tm_clone_hash == NULL) 6121 tm_clone_hash = hash_table<tm_clone_hasher>::create_ggc (32); 6122 6123 h = ggc_alloc<tree_map> (); 6124 h->hash = htab_hash_pointer (o); 6125 h->base.from = o; 6126 h->to = n; 6127 6128 slot = tm_clone_hash->find_slot_with_hash (h, h->hash, INSERT); 6129 *slot = h; 6130 } 6131 6132 tree 6133 get_tm_clone_pair (tree o) 6134 { 6135 if (tm_clone_hash) 6136 { 6137 struct tree_map *h, in; 6138 6139 in.base.from = o; 6140 in.hash = htab_hash_pointer (o); 6141 h = tm_clone_hash->find_with_hash (&in, in.hash); 6142 if (h) 6143 return h->to; 6144 } 6145 return NULL_TREE; 6146 } 6147 6148 struct tm_alias_pair 6149 { 6150 unsigned int uid; 6151 tree from; 6152 tree to; 6153 }; 6154 6155 6156 /* Dump the actual pairs to the .tm_clone_table section. */ 6157 6158 static void 6159 dump_tm_clone_pairs (vec<tm_alias_pair> tm_alias_pairs) 6160 { 6161 unsigned i; 6162 tm_alias_pair *p; 6163 bool switched = false; 6164 6165 FOR_EACH_VEC_ELT (tm_alias_pairs, i, p) 6166 { 6167 tree src = p->from; 6168 tree dst = p->to; 6169 struct cgraph_node *src_n = cgraph_node::get (src); 6170 struct cgraph_node *dst_n = cgraph_node::get (dst); 6171 6172 /* The function ipa_tm_create_version() marks the clone as needed if 6173 the original function was needed. But we also mark the clone as 6174 needed if we ever called the clone indirectly through 6175 TM_GETTMCLONE. If neither of these are true, we didn't generate 6176 a clone, and we didn't call it indirectly... no sense keeping it 6177 in the clone table. */ 6178 if (!dst_n || !dst_n->definition) 6179 continue; 6180 6181 /* This covers the case where we have optimized the original 6182 function away, and only access the transactional clone. */ 6183 if (!src_n || !src_n->definition) 6184 continue; 6185 6186 if (!switched) 6187 { 6188 switch_to_section (targetm.asm_out.tm_clone_table_section ()); 6189 assemble_align (POINTER_SIZE); 6190 switched = true; 6191 } 6192 6193 assemble_integer (XEXP (DECL_RTL (src), 0), 6194 POINTER_SIZE_UNITS, POINTER_SIZE, 1); 6195 assemble_integer (XEXP (DECL_RTL (dst), 0), 6196 POINTER_SIZE_UNITS, POINTER_SIZE, 1); 6197 } 6198 } 6199 6200 /* Provide a default for the tm_clone_table section. */ 6201 6202 section * 6203 default_clone_table_section (void) 6204 { 6205 return get_named_section (NULL, ".tm_clone_table", 3); 6206 } 6207 6208 /* Helper comparison function for qsorting by the DECL_UID stored in 6209 alias_pair->emitted_diags. */ 6210 6211 static int 6212 tm_alias_pair_cmp (const void *x, const void *y) 6213 { 6214 const tm_alias_pair *p1 = (const tm_alias_pair *) x; 6215 const tm_alias_pair *p2 = (const tm_alias_pair *) y; 6216 if (p1->uid < p2->uid) 6217 return -1; 6218 if (p1->uid > p2->uid) 6219 return 1; 6220 return 0; 6221 } 6222 6223 void 6224 finish_tm_clone_pairs (void) 6225 { 6226 vec<tm_alias_pair> tm_alias_pairs = vNULL; 6227 6228 if (tm_clone_hash == NULL) 6229 return; 6230 6231 /* We need a determenistic order for the .tm_clone_table, otherwise 6232 we will get bootstrap comparison failures, so dump the hash table 6233 to a vector, sort it, and dump the vector. */ 6234 6235 /* Dump the hashtable to a vector. */ 6236 tree_map *map; 6237 hash_table<tm_clone_hasher>::iterator iter; 6238 FOR_EACH_HASH_TABLE_ELEMENT (*tm_clone_hash, map, tree_map *, iter) 6239 { 6240 tm_alias_pair p = {DECL_UID (map->base.from), map->base.from, map->to}; 6241 tm_alias_pairs.safe_push (p); 6242 } 6243 /* Sort it. */ 6244 tm_alias_pairs.qsort (tm_alias_pair_cmp); 6245 6246 /* Dump it. */ 6247 dump_tm_clone_pairs (tm_alias_pairs); 6248 6249 tm_clone_hash->empty (); 6250 tm_clone_hash = NULL; 6251 tm_alias_pairs.release (); 6252 } 6253 6254 6255 /* Emit an assembler directive to set symbol for DECL visibility to 6256 the visibility type VIS, which must not be VISIBILITY_DEFAULT. */ 6257 6258 void 6259 default_assemble_visibility (tree decl ATTRIBUTE_UNUSED, 6260 int vis ATTRIBUTE_UNUSED) 6261 { 6262 #ifdef HAVE_GAS_HIDDEN 6263 static const char * const visibility_types[] = { 6264 NULL, "protected", "hidden", "internal" 6265 }; 6266 6267 const char *name, *type; 6268 tree id; 6269 6270 id = DECL_ASSEMBLER_NAME (decl); 6271 ultimate_transparent_alias_target (&id); 6272 name = IDENTIFIER_POINTER (id); 6273 6274 type = visibility_types[vis]; 6275 6276 fprintf (asm_out_file, "\t.%s\t", type); 6277 assemble_name (asm_out_file, name); 6278 fprintf (asm_out_file, "\n"); 6279 #else 6280 if (!DECL_ARTIFICIAL (decl)) 6281 warning (OPT_Wattributes, "visibility attribute not supported " 6282 "in this configuration; ignored"); 6283 #endif 6284 } 6285 6286 /* A helper function to call assemble_visibility when needed for a decl. */ 6287 6288 int 6289 maybe_assemble_visibility (tree decl) 6290 { 6291 enum symbol_visibility vis = DECL_VISIBILITY (decl); 6292 if (vis != VISIBILITY_DEFAULT) 6293 { 6294 targetm.asm_out.assemble_visibility (decl, vis); 6295 return 1; 6296 } 6297 else 6298 return 0; 6299 } 6300 6301 /* Returns 1 if the target configuration supports defining public symbols 6302 so that one of them will be chosen at link time instead of generating a 6303 multiply-defined symbol error, whether through the use of weak symbols or 6304 a target-specific mechanism for having duplicates discarded. */ 6305 6306 int 6307 supports_one_only (void) 6308 { 6309 if (SUPPORTS_ONE_ONLY) 6310 return 1; 6311 return TARGET_SUPPORTS_WEAK; 6312 } 6313 6314 /* Set up DECL as a public symbol that can be defined in multiple 6315 translation units without generating a linker error. */ 6316 6317 void 6318 make_decl_one_only (tree decl, tree comdat_group) 6319 { 6320 struct symtab_node *symbol; 6321 gcc_assert (VAR_OR_FUNCTION_DECL_P (decl)); 6322 6323 TREE_PUBLIC (decl) = 1; 6324 6325 if (VAR_P (decl)) 6326 symbol = varpool_node::get_create (decl); 6327 else 6328 symbol = cgraph_node::get_create (decl); 6329 6330 if (SUPPORTS_ONE_ONLY) 6331 { 6332 #ifdef MAKE_DECL_ONE_ONLY 6333 MAKE_DECL_ONE_ONLY (decl); 6334 #endif 6335 symbol->set_comdat_group (comdat_group); 6336 } 6337 else if (VAR_P (decl) 6338 && (DECL_INITIAL (decl) == 0 6339 || (!in_lto_p && DECL_INITIAL (decl) == error_mark_node))) 6340 DECL_COMMON (decl) = 1; 6341 else 6342 { 6343 gcc_assert (TARGET_SUPPORTS_WEAK); 6344 DECL_WEAK (decl) = 1; 6345 } 6346 } 6347 6348 void 6349 init_varasm_once (void) 6350 { 6351 section_htab = hash_table<section_hasher>::create_ggc (31); 6352 object_block_htab = hash_table<object_block_hasher>::create_ggc (31); 6353 const_desc_htab = hash_table<tree_descriptor_hasher>::create_ggc (1009); 6354 6355 shared_constant_pool = create_constant_pool (); 6356 6357 #ifdef TEXT_SECTION_ASM_OP 6358 text_section = get_unnamed_section (SECTION_CODE, output_section_asm_op, 6359 TEXT_SECTION_ASM_OP); 6360 #endif 6361 6362 #ifdef DATA_SECTION_ASM_OP 6363 data_section = get_unnamed_section (SECTION_WRITE, output_section_asm_op, 6364 DATA_SECTION_ASM_OP); 6365 #endif 6366 6367 #ifdef SDATA_SECTION_ASM_OP 6368 sdata_section = get_unnamed_section (SECTION_WRITE, output_section_asm_op, 6369 SDATA_SECTION_ASM_OP); 6370 #endif 6371 6372 #ifdef READONLY_DATA_SECTION_ASM_OP 6373 readonly_data_section = get_unnamed_section (0, output_section_asm_op, 6374 READONLY_DATA_SECTION_ASM_OP); 6375 #endif 6376 6377 #ifdef CTORS_SECTION_ASM_OP 6378 ctors_section = get_unnamed_section (0, output_section_asm_op, 6379 CTORS_SECTION_ASM_OP); 6380 #endif 6381 6382 #ifdef DTORS_SECTION_ASM_OP 6383 dtors_section = get_unnamed_section (0, output_section_asm_op, 6384 DTORS_SECTION_ASM_OP); 6385 #endif 6386 6387 #ifdef BSS_SECTION_ASM_OP 6388 bss_section = get_unnamed_section (SECTION_WRITE | SECTION_BSS, 6389 output_section_asm_op, 6390 BSS_SECTION_ASM_OP); 6391 #endif 6392 6393 #ifdef SBSS_SECTION_ASM_OP 6394 sbss_section = get_unnamed_section (SECTION_WRITE | SECTION_BSS, 6395 output_section_asm_op, 6396 SBSS_SECTION_ASM_OP); 6397 #endif 6398 6399 tls_comm_section = get_noswitch_section (SECTION_WRITE | SECTION_BSS 6400 | SECTION_COMMON, emit_tls_common); 6401 lcomm_section = get_noswitch_section (SECTION_WRITE | SECTION_BSS 6402 | SECTION_COMMON, emit_local); 6403 comm_section = get_noswitch_section (SECTION_WRITE | SECTION_BSS 6404 | SECTION_COMMON, emit_common); 6405 6406 #if defined ASM_OUTPUT_ALIGNED_BSS 6407 bss_noswitch_section = get_noswitch_section (SECTION_WRITE | SECTION_BSS, 6408 emit_bss); 6409 #endif 6410 6411 targetm.asm_out.init_sections (); 6412 6413 if (readonly_data_section == NULL) 6414 readonly_data_section = text_section; 6415 6416 #ifdef ASM_OUTPUT_EXTERNAL 6417 pending_assemble_externals_set = new hash_set<tree>; 6418 #endif 6419 } 6420 6421 enum tls_model 6422 decl_default_tls_model (const_tree decl) 6423 { 6424 enum tls_model kind; 6425 bool is_local; 6426 6427 is_local = targetm.binds_local_p (decl); 6428 if (!flag_shlib) 6429 { 6430 if (is_local) 6431 kind = TLS_MODEL_LOCAL_EXEC; 6432 else 6433 kind = TLS_MODEL_INITIAL_EXEC; 6434 } 6435 6436 /* Local dynamic is inefficient when we're not combining the 6437 parts of the address. */ 6438 else if (optimize && is_local) 6439 kind = TLS_MODEL_LOCAL_DYNAMIC; 6440 else 6441 kind = TLS_MODEL_GLOBAL_DYNAMIC; 6442 if (kind < flag_tls_default) 6443 kind = flag_tls_default; 6444 6445 return kind; 6446 } 6447 6448 /* Select a set of attributes for section NAME based on the properties 6449 of DECL and whether or not RELOC indicates that DECL's initializer 6450 might contain runtime relocations. 6451 6452 We make the section read-only and executable for a function decl, 6453 read-only for a const data decl, and writable for a non-const data decl. */ 6454 6455 unsigned int 6456 default_section_type_flags (tree decl, const char *name, int reloc) 6457 { 6458 unsigned int flags; 6459 6460 if (decl && TREE_CODE (decl) == FUNCTION_DECL) 6461 flags = SECTION_CODE; 6462 else if (decl) 6463 { 6464 enum section_category category 6465 = categorize_decl_for_section (decl, reloc); 6466 if (decl_readonly_section_1 (category)) 6467 flags = 0; 6468 else if (category == SECCAT_DATA_REL_RO 6469 || category == SECCAT_DATA_REL_RO_LOCAL) 6470 flags = SECTION_WRITE | SECTION_RELRO; 6471 else 6472 flags = SECTION_WRITE; 6473 } 6474 else 6475 { 6476 flags = SECTION_WRITE; 6477 if (strcmp (name, ".data.rel.ro") == 0 6478 || strcmp (name, ".data.rel.ro.local") == 0) 6479 flags |= SECTION_RELRO; 6480 } 6481 6482 if (decl && DECL_P (decl) && DECL_COMDAT_GROUP (decl)) 6483 flags |= SECTION_LINKONCE; 6484 6485 if (strcmp (name, ".vtable_map_vars") == 0) 6486 flags |= SECTION_LINKONCE; 6487 6488 if (decl && VAR_P (decl) && DECL_THREAD_LOCAL_P (decl)) 6489 flags |= SECTION_TLS | SECTION_WRITE; 6490 6491 if (strcmp (name, ".bss") == 0 6492 || strncmp (name, ".bss.", 5) == 0 6493 || strncmp (name, ".gnu.linkonce.b.", 16) == 0 6494 || strcmp (name, ".persistent.bss") == 0 6495 || strcmp (name, ".sbss") == 0 6496 || strncmp (name, ".sbss.", 6) == 0 6497 || strncmp (name, ".gnu.linkonce.sb.", 17) == 0) 6498 flags |= SECTION_BSS; 6499 6500 if (strcmp (name, ".tdata") == 0 6501 || strncmp (name, ".tdata.", 7) == 0 6502 || strncmp (name, ".gnu.linkonce.td.", 17) == 0) 6503 flags |= SECTION_TLS; 6504 6505 if (strcmp (name, ".tbss") == 0 6506 || strncmp (name, ".tbss.", 6) == 0 6507 || strncmp (name, ".gnu.linkonce.tb.", 17) == 0) 6508 flags |= SECTION_TLS | SECTION_BSS; 6509 6510 if (strcmp (name, ".noinit") == 0) 6511 flags |= SECTION_WRITE | SECTION_BSS | SECTION_NOTYPE; 6512 6513 /* Various sections have special ELF types that the assembler will 6514 assign by default based on the name. They are neither SHT_PROGBITS 6515 nor SHT_NOBITS, so when changing sections we don't want to print a 6516 section type (@progbits or @nobits). Rather than duplicating the 6517 assembler's knowledge of what those special name patterns are, just 6518 let the assembler choose the type if we don't know a specific 6519 reason to set it to something other than the default. SHT_PROGBITS 6520 is the default for sections whose name is not specially known to 6521 the assembler, so it does no harm to leave the choice to the 6522 assembler when @progbits is the best thing we know to use. If 6523 someone is silly enough to emit code or TLS variables to one of 6524 these sections, then don't handle them specially. 6525 6526 default_elf_asm_named_section (below) handles the BSS, TLS, ENTSIZE, and 6527 LINKONCE cases when NOTYPE is not set, so leave those to its logic. */ 6528 if (!(flags & (SECTION_CODE | SECTION_BSS | SECTION_TLS | SECTION_ENTSIZE)) 6529 && !(HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE))) 6530 flags |= SECTION_NOTYPE; 6531 6532 return flags; 6533 } 6534 6535 /* Return true if the target supports some form of global BSS, 6536 either through bss_noswitch_section, or by selecting a BSS 6537 section in TARGET_ASM_SELECT_SECTION. */ 6538 6539 bool 6540 have_global_bss_p (void) 6541 { 6542 return bss_noswitch_section || targetm.have_switchable_bss_sections; 6543 } 6544 6545 /* Output assembly to switch to section NAME with attribute FLAGS. 6546 Four variants for common object file formats. */ 6547 6548 void 6549 default_no_named_section (const char *name ATTRIBUTE_UNUSED, 6550 unsigned int flags ATTRIBUTE_UNUSED, 6551 tree decl ATTRIBUTE_UNUSED) 6552 { 6553 /* Some object formats don't support named sections at all. The 6554 front-end should already have flagged this as an error. */ 6555 gcc_unreachable (); 6556 } 6557 6558 #ifndef TLS_SECTION_ASM_FLAG 6559 #define TLS_SECTION_ASM_FLAG 'T' 6560 #endif 6561 6562 void 6563 default_elf_asm_named_section (const char *name, unsigned int flags, 6564 tree decl) 6565 { 6566 char flagchars[11], *f = flagchars; 6567 unsigned int numeric_value = 0; 6568 6569 /* If we have already declared this section, we can use an 6570 abbreviated form to switch back to it -- unless this section is 6571 part of a COMDAT groups, in which case GAS requires the full 6572 declaration every time. */ 6573 if (!(HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE)) 6574 && (flags & SECTION_DECLARED)) 6575 { 6576 fprintf (asm_out_file, "\t.section\t%s\n", name); 6577 return; 6578 } 6579 6580 /* If we have a machine specific flag, then use the numeric value to pass 6581 this on to GAS. */ 6582 if (targetm.asm_out.elf_flags_numeric (flags, &numeric_value)) 6583 snprintf (f, sizeof (flagchars), "0x%08x", numeric_value); 6584 else 6585 { 6586 if (!(flags & SECTION_DEBUG)) 6587 *f++ = 'a'; 6588 #if HAVE_GAS_SECTION_EXCLUDE 6589 if (flags & SECTION_EXCLUDE) 6590 *f++ = 'e'; 6591 #endif 6592 if (flags & SECTION_WRITE) 6593 *f++ = 'w'; 6594 if (flags & SECTION_CODE) 6595 *f++ = 'x'; 6596 if (flags & SECTION_SMALL) 6597 *f++ = 's'; 6598 if (flags & SECTION_MERGE) 6599 *f++ = 'M'; 6600 if (flags & SECTION_STRINGS) 6601 *f++ = 'S'; 6602 if (flags & SECTION_TLS) 6603 *f++ = TLS_SECTION_ASM_FLAG; 6604 if (HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE)) 6605 *f++ = 'G'; 6606 #ifdef MACH_DEP_SECTION_ASM_FLAG 6607 if (flags & SECTION_MACH_DEP) 6608 *f++ = MACH_DEP_SECTION_ASM_FLAG; 6609 #endif 6610 *f = '\0'; 6611 } 6612 6613 fprintf (asm_out_file, "\t.section\t%s,\"%s\"", name, flagchars); 6614 6615 /* default_section_type_flags (above) knows which flags need special 6616 handling here, and sets NOTYPE when none of these apply so that the 6617 assembler's logic for default types can apply to user-chosen 6618 section names. */ 6619 if (!(flags & SECTION_NOTYPE)) 6620 { 6621 const char *type; 6622 const char *format; 6623 6624 if (flags & SECTION_BSS) 6625 type = "nobits"; 6626 else 6627 type = "progbits"; 6628 6629 format = ",@%s"; 6630 /* On platforms that use "@" as the assembly comment character, 6631 use "%" instead. */ 6632 if (strcmp (ASM_COMMENT_START, "@") == 0) 6633 format = ",%%%s"; 6634 fprintf (asm_out_file, format, type); 6635 6636 if (flags & SECTION_ENTSIZE) 6637 fprintf (asm_out_file, ",%d", flags & SECTION_ENTSIZE); 6638 if (HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE)) 6639 { 6640 if (TREE_CODE (decl) == IDENTIFIER_NODE) 6641 fprintf (asm_out_file, ",%s,comdat", IDENTIFIER_POINTER (decl)); 6642 else 6643 fprintf (asm_out_file, ",%s,comdat", 6644 IDENTIFIER_POINTER (DECL_COMDAT_GROUP (decl))); 6645 } 6646 } 6647 6648 putc ('\n', asm_out_file); 6649 } 6650 6651 void 6652 default_coff_asm_named_section (const char *name, unsigned int flags, 6653 tree decl ATTRIBUTE_UNUSED) 6654 { 6655 char flagchars[8], *f = flagchars; 6656 6657 if (flags & SECTION_WRITE) 6658 *f++ = 'w'; 6659 if (flags & SECTION_CODE) 6660 *f++ = 'x'; 6661 *f = '\0'; 6662 6663 fprintf (asm_out_file, "\t.section\t%s,\"%s\"\n", name, flagchars); 6664 } 6665 6666 void 6667 default_pe_asm_named_section (const char *name, unsigned int flags, 6668 tree decl) 6669 { 6670 default_coff_asm_named_section (name, flags, decl); 6671 6672 if (flags & SECTION_LINKONCE) 6673 { 6674 /* Functions may have been compiled at various levels of 6675 optimization so we can't use `same_size' here. 6676 Instead, have the linker pick one. */ 6677 fprintf (asm_out_file, "\t.linkonce %s\n", 6678 (flags & SECTION_CODE ? "discard" : "same_size")); 6679 } 6680 } 6681 6682 /* The lame default section selector. */ 6683 6684 section * 6685 default_select_section (tree decl, int reloc, 6686 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED) 6687 { 6688 if (DECL_P (decl)) 6689 { 6690 if (decl_readonly_section (decl, reloc)) 6691 return readonly_data_section; 6692 } 6693 else if (TREE_CODE (decl) == CONSTRUCTOR) 6694 { 6695 if (! ((flag_pic && reloc) 6696 || !TREE_READONLY (decl) 6697 || TREE_SIDE_EFFECTS (decl) 6698 || !TREE_CONSTANT (decl))) 6699 return readonly_data_section; 6700 } 6701 else if (TREE_CODE (decl) == STRING_CST) 6702 return readonly_data_section; 6703 else if (! (flag_pic && reloc)) 6704 return readonly_data_section; 6705 6706 return data_section; 6707 } 6708 6709 enum section_category 6710 categorize_decl_for_section (const_tree decl, int reloc) 6711 { 6712 enum section_category ret; 6713 6714 if (TREE_CODE (decl) == FUNCTION_DECL) 6715 return SECCAT_TEXT; 6716 else if (TREE_CODE (decl) == STRING_CST) 6717 { 6718 if ((flag_sanitize & SANITIZE_ADDRESS) 6719 && asan_protect_global (CONST_CAST_TREE (decl))) 6720 /* or !flag_merge_constants */ 6721 return SECCAT_RODATA; 6722 else 6723 return SECCAT_RODATA_MERGE_STR; 6724 } 6725 else if (VAR_P (decl)) 6726 { 6727 tree d = CONST_CAST_TREE (decl); 6728 if (bss_initializer_p (decl)) 6729 ret = SECCAT_BSS; 6730 else if (! TREE_READONLY (decl) 6731 || TREE_SIDE_EFFECTS (decl) 6732 || (DECL_INITIAL (decl) 6733 && ! TREE_CONSTANT (DECL_INITIAL (decl)))) 6734 { 6735 /* Here the reloc_rw_mask is not testing whether the section should 6736 be read-only or not, but whether the dynamic link will have to 6737 do something. If so, we wish to segregate the data in order to 6738 minimize cache misses inside the dynamic linker. */ 6739 if (reloc & targetm.asm_out.reloc_rw_mask ()) 6740 ret = reloc == 1 ? SECCAT_DATA_REL_LOCAL : SECCAT_DATA_REL; 6741 else 6742 ret = SECCAT_DATA; 6743 } 6744 else if (reloc & targetm.asm_out.reloc_rw_mask ()) 6745 ret = reloc == 1 ? SECCAT_DATA_REL_RO_LOCAL : SECCAT_DATA_REL_RO; 6746 else if (reloc || flag_merge_constants < 2 6747 || ((flag_sanitize & SANITIZE_ADDRESS) 6748 /* PR 81697: for architectures that use section anchors we 6749 need to ignore DECL_RTL_SET_P (decl) for string constants 6750 inside this asan_protect_global call because otherwise 6751 we'll wrongly put them into SECCAT_RODATA_MERGE_CONST 6752 section, set DECL_RTL (decl) later on and add DECL to 6753 protected globals via successive asan_protect_global 6754 calls. In this scenario we'll end up with wrong 6755 alignment of these strings at runtime and possible ASan 6756 false positives. */ 6757 && asan_protect_global (d, use_object_blocks_p () 6758 && use_blocks_for_decl_p (d)))) 6759 /* C and C++ don't allow different variables to share the same 6760 location. -fmerge-all-constants allows even that (at the 6761 expense of not conforming). */ 6762 ret = SECCAT_RODATA; 6763 else if (DECL_INITIAL (decl) 6764 && TREE_CODE (DECL_INITIAL (decl)) == STRING_CST) 6765 ret = SECCAT_RODATA_MERGE_STR_INIT; 6766 else 6767 ret = SECCAT_RODATA_MERGE_CONST; 6768 } 6769 else if (TREE_CODE (decl) == CONSTRUCTOR) 6770 { 6771 if ((reloc & targetm.asm_out.reloc_rw_mask ()) 6772 || TREE_SIDE_EFFECTS (decl) 6773 || ! TREE_CONSTANT (decl)) 6774 ret = SECCAT_DATA; 6775 else 6776 ret = SECCAT_RODATA; 6777 } 6778 else 6779 ret = SECCAT_RODATA; 6780 6781 /* There are no read-only thread-local sections. */ 6782 if (VAR_P (decl) && DECL_THREAD_LOCAL_P (decl)) 6783 { 6784 /* Note that this would be *just* SECCAT_BSS, except that there's 6785 no concept of a read-only thread-local-data section. */ 6786 if (ret == SECCAT_BSS 6787 || DECL_INITIAL (decl) == NULL 6788 || (flag_zero_initialized_in_bss 6789 && initializer_zerop (DECL_INITIAL (decl)))) 6790 ret = SECCAT_TBSS; 6791 else 6792 ret = SECCAT_TDATA; 6793 } 6794 6795 /* If the target uses small data sections, select it. */ 6796 else if (targetm.in_small_data_p (decl)) 6797 { 6798 if (ret == SECCAT_BSS) 6799 ret = SECCAT_SBSS; 6800 else if (targetm.have_srodata_section && ret == SECCAT_RODATA) 6801 ret = SECCAT_SRODATA; 6802 else 6803 ret = SECCAT_SDATA; 6804 } 6805 6806 return ret; 6807 } 6808 6809 static bool 6810 decl_readonly_section_1 (enum section_category category) 6811 { 6812 switch (category) 6813 { 6814 case SECCAT_RODATA: 6815 case SECCAT_RODATA_MERGE_STR: 6816 case SECCAT_RODATA_MERGE_STR_INIT: 6817 case SECCAT_RODATA_MERGE_CONST: 6818 case SECCAT_SRODATA: 6819 return true; 6820 default: 6821 return false; 6822 } 6823 } 6824 6825 bool 6826 decl_readonly_section (const_tree decl, int reloc) 6827 { 6828 return decl_readonly_section_1 (categorize_decl_for_section (decl, reloc)); 6829 } 6830 6831 /* Select a section based on the above categorization. */ 6832 6833 section * 6834 default_elf_select_section (tree decl, int reloc, 6835 unsigned HOST_WIDE_INT align) 6836 { 6837 const char *sname; 6838 6839 switch (categorize_decl_for_section (decl, reloc)) 6840 { 6841 case SECCAT_TEXT: 6842 /* We're not supposed to be called on FUNCTION_DECLs. */ 6843 gcc_unreachable (); 6844 case SECCAT_RODATA: 6845 return readonly_data_section; 6846 case SECCAT_RODATA_MERGE_STR: 6847 return mergeable_string_section (decl, align, 0); 6848 case SECCAT_RODATA_MERGE_STR_INIT: 6849 return mergeable_string_section (DECL_INITIAL (decl), align, 0); 6850 case SECCAT_RODATA_MERGE_CONST: 6851 return mergeable_constant_section (DECL_MODE (decl), align, 0); 6852 case SECCAT_SRODATA: 6853 sname = ".sdata2"; 6854 break; 6855 case SECCAT_DATA: 6856 return data_section; 6857 case SECCAT_DATA_REL: 6858 sname = ".data.rel"; 6859 break; 6860 case SECCAT_DATA_REL_LOCAL: 6861 sname = ".data.rel.local"; 6862 break; 6863 case SECCAT_DATA_REL_RO: 6864 sname = ".data.rel.ro"; 6865 break; 6866 case SECCAT_DATA_REL_RO_LOCAL: 6867 sname = ".data.rel.ro.local"; 6868 break; 6869 case SECCAT_SDATA: 6870 sname = ".sdata"; 6871 break; 6872 case SECCAT_TDATA: 6873 sname = ".tdata"; 6874 break; 6875 case SECCAT_BSS: 6876 if (DECL_P (decl) 6877 && lookup_attribute ("noinit", DECL_ATTRIBUTES (decl)) != NULL_TREE) 6878 { 6879 sname = ".noinit"; 6880 break; 6881 } 6882 6883 if (bss_section) 6884 return bss_section; 6885 sname = ".bss"; 6886 break; 6887 case SECCAT_SBSS: 6888 sname = ".sbss"; 6889 break; 6890 case SECCAT_TBSS: 6891 sname = ".tbss"; 6892 break; 6893 default: 6894 gcc_unreachable (); 6895 } 6896 6897 return get_named_section (decl, sname, reloc); 6898 } 6899 6900 /* Construct a unique section name based on the decl name and the 6901 categorization performed above. */ 6902 6903 void 6904 default_unique_section (tree decl, int reloc) 6905 { 6906 /* We only need to use .gnu.linkonce if we don't have COMDAT groups. */ 6907 bool one_only = DECL_ONE_ONLY (decl) && !HAVE_COMDAT_GROUP; 6908 const char *prefix, *name, *linkonce; 6909 char *string; 6910 tree id; 6911 6912 switch (categorize_decl_for_section (decl, reloc)) 6913 { 6914 case SECCAT_TEXT: 6915 prefix = one_only ? ".t" : ".text"; 6916 break; 6917 case SECCAT_RODATA: 6918 case SECCAT_RODATA_MERGE_STR: 6919 case SECCAT_RODATA_MERGE_STR_INIT: 6920 case SECCAT_RODATA_MERGE_CONST: 6921 prefix = one_only ? ".r" : ".rodata"; 6922 break; 6923 case SECCAT_SRODATA: 6924 prefix = one_only ? ".s2" : ".sdata2"; 6925 break; 6926 case SECCAT_DATA: 6927 prefix = one_only ? ".d" : ".data"; 6928 break; 6929 case SECCAT_DATA_REL: 6930 prefix = one_only ? ".d.rel" : ".data.rel"; 6931 break; 6932 case SECCAT_DATA_REL_LOCAL: 6933 prefix = one_only ? ".d.rel.local" : ".data.rel.local"; 6934 break; 6935 case SECCAT_DATA_REL_RO: 6936 prefix = one_only ? ".d.rel.ro" : ".data.rel.ro"; 6937 break; 6938 case SECCAT_DATA_REL_RO_LOCAL: 6939 prefix = one_only ? ".d.rel.ro.local" : ".data.rel.ro.local"; 6940 break; 6941 case SECCAT_SDATA: 6942 prefix = one_only ? ".s" : ".sdata"; 6943 break; 6944 case SECCAT_BSS: 6945 prefix = one_only ? ".b" : ".bss"; 6946 break; 6947 case SECCAT_SBSS: 6948 prefix = one_only ? ".sb" : ".sbss"; 6949 break; 6950 case SECCAT_TDATA: 6951 prefix = one_only ? ".td" : ".tdata"; 6952 break; 6953 case SECCAT_TBSS: 6954 prefix = one_only ? ".tb" : ".tbss"; 6955 break; 6956 default: 6957 gcc_unreachable (); 6958 } 6959 6960 id = DECL_ASSEMBLER_NAME (decl); 6961 ultimate_transparent_alias_target (&id); 6962 name = IDENTIFIER_POINTER (id); 6963 name = targetm.strip_name_encoding (name); 6964 6965 /* If we're using one_only, then there needs to be a .gnu.linkonce 6966 prefix to the section name. */ 6967 linkonce = one_only ? ".gnu.linkonce" : ""; 6968 6969 string = ACONCAT ((linkonce, prefix, ".", name, NULL)); 6970 6971 set_decl_section_name (decl, string); 6972 } 6973 6974 /* Subroutine of compute_reloc_for_rtx for leaf rtxes. */ 6975 6976 static int 6977 compute_reloc_for_rtx_1 (const_rtx x) 6978 { 6979 switch (GET_CODE (x)) 6980 { 6981 case SYMBOL_REF: 6982 return SYMBOL_REF_LOCAL_P (x) ? 1 : 2; 6983 case LABEL_REF: 6984 return 1; 6985 default: 6986 return 0; 6987 } 6988 } 6989 6990 /* Like compute_reloc_for_constant, except for an RTX. The return value 6991 is a mask for which bit 1 indicates a global relocation, and bit 0 6992 indicates a local relocation. */ 6993 6994 static int 6995 compute_reloc_for_rtx (const_rtx x) 6996 { 6997 switch (GET_CODE (x)) 6998 { 6999 case SYMBOL_REF: 7000 case LABEL_REF: 7001 return compute_reloc_for_rtx_1 (x); 7002 7003 case CONST: 7004 { 7005 int reloc = 0; 7006 subrtx_iterator::array_type array; 7007 FOR_EACH_SUBRTX (iter, array, x, ALL) 7008 reloc |= compute_reloc_for_rtx_1 (*iter); 7009 return reloc; 7010 } 7011 7012 default: 7013 return 0; 7014 } 7015 } 7016 7017 section * 7018 default_select_rtx_section (machine_mode mode ATTRIBUTE_UNUSED, 7019 rtx x, 7020 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED) 7021 { 7022 if (compute_reloc_for_rtx (x) & targetm.asm_out.reloc_rw_mask ()) 7023 return data_section; 7024 else 7025 return readonly_data_section; 7026 } 7027 7028 section * 7029 default_elf_select_rtx_section (machine_mode mode, rtx x, 7030 unsigned HOST_WIDE_INT align) 7031 { 7032 int reloc = compute_reloc_for_rtx (x); 7033 7034 /* ??? Handle small data here somehow. */ 7035 7036 if (reloc & targetm.asm_out.reloc_rw_mask ()) 7037 { 7038 if (reloc == 1) 7039 return get_named_section (NULL, ".data.rel.ro.local", 1); 7040 else 7041 return get_named_section (NULL, ".data.rel.ro", 3); 7042 } 7043 7044 return mergeable_constant_section (mode, align, 0); 7045 } 7046 7047 /* Set the generally applicable flags on the SYMBOL_REF for EXP. */ 7048 7049 void 7050 default_encode_section_info (tree decl, rtx rtl, int first ATTRIBUTE_UNUSED) 7051 { 7052 rtx symbol; 7053 int flags; 7054 7055 /* Careful not to prod global register variables. */ 7056 if (!MEM_P (rtl)) 7057 return; 7058 symbol = XEXP (rtl, 0); 7059 if (GET_CODE (symbol) != SYMBOL_REF) 7060 return; 7061 7062 flags = SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_HAS_BLOCK_INFO; 7063 if (TREE_CODE (decl) == FUNCTION_DECL) 7064 flags |= SYMBOL_FLAG_FUNCTION; 7065 if (targetm.binds_local_p (decl)) 7066 flags |= SYMBOL_FLAG_LOCAL; 7067 if (VAR_P (decl) && DECL_THREAD_LOCAL_P (decl)) 7068 flags |= DECL_TLS_MODEL (decl) << SYMBOL_FLAG_TLS_SHIFT; 7069 else if (targetm.in_small_data_p (decl)) 7070 flags |= SYMBOL_FLAG_SMALL; 7071 /* ??? Why is DECL_EXTERNAL ever set for non-PUBLIC names? Without 7072 being PUBLIC, the thing *must* be defined in this translation unit. 7073 Prevent this buglet from being propagated into rtl code as well. */ 7074 if (DECL_P (decl) && DECL_EXTERNAL (decl) && TREE_PUBLIC (decl)) 7075 flags |= SYMBOL_FLAG_EXTERNAL; 7076 7077 SYMBOL_REF_FLAGS (symbol) = flags; 7078 } 7079 7080 /* By default, we do nothing for encode_section_info, so we need not 7081 do anything but discard the '*' marker. */ 7082 7083 const char * 7084 default_strip_name_encoding (const char *str) 7085 { 7086 return str + (*str == '*'); 7087 } 7088 7089 #ifdef ASM_OUTPUT_DEF 7090 /* The default implementation of TARGET_ASM_OUTPUT_ANCHOR. Define the 7091 anchor relative to ".", the current section position. */ 7092 7093 void 7094 default_asm_output_anchor (rtx symbol) 7095 { 7096 char buffer[100]; 7097 7098 sprintf (buffer, "*. + " HOST_WIDE_INT_PRINT_DEC, 7099 SYMBOL_REF_BLOCK_OFFSET (symbol)); 7100 ASM_OUTPUT_DEF (asm_out_file, XSTR (symbol, 0), buffer); 7101 } 7102 #endif 7103 7104 /* The default implementation of TARGET_USE_ANCHORS_FOR_SYMBOL_P. */ 7105 7106 bool 7107 default_use_anchors_for_symbol_p (const_rtx symbol) 7108 { 7109 tree decl; 7110 section *sect = SYMBOL_REF_BLOCK (symbol)->sect; 7111 7112 /* This function should only be called with non-zero SYMBOL_REF_BLOCK, 7113 furthermore get_block_for_section should not create object blocks 7114 for mergeable sections. */ 7115 gcc_checking_assert (sect && !(sect->common.flags & SECTION_MERGE)); 7116 7117 /* Don't use anchors for small data sections. The small data register 7118 acts as an anchor for such sections. */ 7119 if (sect->common.flags & SECTION_SMALL) 7120 return false; 7121 7122 decl = SYMBOL_REF_DECL (symbol); 7123 if (decl && DECL_P (decl)) 7124 { 7125 /* Don't use section anchors for decls that might be defined or 7126 usurped by other modules. */ 7127 if (TREE_PUBLIC (decl) && !decl_binds_to_current_def_p (decl)) 7128 return false; 7129 7130 /* Don't use section anchors for decls that will be placed in a 7131 small data section. */ 7132 /* ??? Ideally, this check would be redundant with the SECTION_SMALL 7133 one above. The problem is that we only use SECTION_SMALL for 7134 sections that should be marked as small in the section directive. */ 7135 if (targetm.in_small_data_p (decl)) 7136 return false; 7137 7138 /* Don't use section anchors for decls that won't fit inside a single 7139 anchor range to reduce the amount of instructions required to refer 7140 to the entire declaration. */ 7141 if (DECL_SIZE_UNIT (decl) == NULL_TREE 7142 || !tree_fits_uhwi_p (DECL_SIZE_UNIT (decl)) 7143 || (tree_to_uhwi (DECL_SIZE_UNIT (decl)) 7144 >= (unsigned HOST_WIDE_INT) targetm.max_anchor_offset)) 7145 return false; 7146 7147 } 7148 return true; 7149 } 7150 7151 /* Return true when RESOLUTION indicate that symbol will be bound to the 7152 definition provided by current .o file. */ 7153 7154 static bool 7155 resolution_to_local_definition_p (enum ld_plugin_symbol_resolution resolution) 7156 { 7157 return (resolution == LDPR_PREVAILING_DEF 7158 || resolution == LDPR_PREVAILING_DEF_IRONLY_EXP 7159 || resolution == LDPR_PREVAILING_DEF_IRONLY); 7160 } 7161 7162 /* Return true when RESOLUTION indicate that symbol will be bound locally 7163 within current executable or DSO. */ 7164 7165 static bool 7166 resolution_local_p (enum ld_plugin_symbol_resolution resolution) 7167 { 7168 return (resolution == LDPR_PREVAILING_DEF 7169 || resolution == LDPR_PREVAILING_DEF_IRONLY 7170 || resolution == LDPR_PREVAILING_DEF_IRONLY_EXP 7171 || resolution == LDPR_PREEMPTED_REG 7172 || resolution == LDPR_PREEMPTED_IR 7173 || resolution == LDPR_RESOLVED_IR 7174 || resolution == LDPR_RESOLVED_EXEC); 7175 } 7176 7177 /* COMMON_LOCAL_P is true means that the linker can guarantee that an 7178 uninitialized common symbol in the executable will still be defined 7179 (through COPY relocation) in the executable. */ 7180 7181 bool 7182 default_binds_local_p_3 (const_tree exp, bool shlib, bool weak_dominate, 7183 bool extern_protected_data, bool common_local_p) 7184 { 7185 /* A non-decl is an entry in the constant pool. */ 7186 if (!DECL_P (exp)) 7187 return true; 7188 7189 /* Weakrefs may not bind locally, even though the weakref itself is always 7190 static and therefore local. Similarly, the resolver for ifunc functions 7191 might resolve to a non-local function. 7192 FIXME: We can resolve the weakref case more curefuly by looking at the 7193 weakref alias. */ 7194 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (exp)) 7195 || (TREE_CODE (exp) == FUNCTION_DECL 7196 && cgraph_node::get (exp) 7197 && cgraph_node::get (exp)->ifunc_resolver)) 7198 return false; 7199 7200 /* Static variables are always local. */ 7201 if (! TREE_PUBLIC (exp)) 7202 return true; 7203 7204 /* With resolution file in hand, take look into resolutions. 7205 We can't just return true for resolved_locally symbols, 7206 because dynamic linking might overwrite symbols 7207 in shared libraries. */ 7208 bool resolved_locally = false; 7209 7210 bool uninited_common = (DECL_COMMON (exp) 7211 && (DECL_INITIAL (exp) == NULL 7212 || (!in_lto_p 7213 && DECL_INITIAL (exp) == error_mark_node))); 7214 7215 /* A non-external variable is defined locally only if it isn't 7216 uninitialized COMMON variable or common_local_p is true. */ 7217 bool defined_locally = (!DECL_EXTERNAL (exp) 7218 && (!uninited_common || common_local_p)); 7219 if (symtab_node *node = symtab_node::get (exp)) 7220 { 7221 if (node->in_other_partition) 7222 defined_locally = true; 7223 if (node->can_be_discarded_p ()) 7224 ; 7225 else if (resolution_to_local_definition_p (node->resolution)) 7226 defined_locally = resolved_locally = true; 7227 else if (resolution_local_p (node->resolution)) 7228 resolved_locally = true; 7229 } 7230 if (defined_locally && weak_dominate && !shlib) 7231 resolved_locally = true; 7232 7233 /* Undefined weak symbols are never defined locally. */ 7234 if (DECL_WEAK (exp) && !defined_locally) 7235 return false; 7236 7237 /* A symbol is local if the user has said explicitly that it will be, 7238 or if we have a definition for the symbol. We cannot infer visibility 7239 for undefined symbols. */ 7240 if (DECL_VISIBILITY (exp) != VISIBILITY_DEFAULT 7241 && (TREE_CODE (exp) == FUNCTION_DECL 7242 || !extern_protected_data 7243 || DECL_VISIBILITY (exp) != VISIBILITY_PROTECTED) 7244 && (DECL_VISIBILITY_SPECIFIED (exp) || defined_locally)) 7245 return true; 7246 7247 /* If PIC, then assume that any global name can be overridden by 7248 symbols resolved from other modules. */ 7249 if (shlib) 7250 return false; 7251 7252 /* Variables defined outside this object might not be local. */ 7253 if (DECL_EXTERNAL (exp) && !resolved_locally) 7254 return false; 7255 7256 /* Non-dominant weak symbols are not defined locally. */ 7257 if (DECL_WEAK (exp) && !resolved_locally) 7258 return false; 7259 7260 /* Uninitialized COMMON variable may be unified with symbols 7261 resolved from other modules. */ 7262 if (uninited_common && !resolved_locally) 7263 return false; 7264 7265 /* Otherwise we're left with initialized (or non-common) global data 7266 which is of necessity defined locally. */ 7267 return true; 7268 } 7269 7270 /* Assume ELF-ish defaults, since that's pretty much the most liberal 7271 wrt cross-module name binding. */ 7272 7273 bool 7274 default_binds_local_p (const_tree exp) 7275 { 7276 return default_binds_local_p_3 (exp, flag_shlib != 0, true, false, false); 7277 } 7278 7279 /* Similar to default_binds_local_p, but common symbol may be local and 7280 extern protected data is non-local. */ 7281 7282 bool 7283 default_binds_local_p_2 (const_tree exp) 7284 { 7285 return default_binds_local_p_3 (exp, flag_shlib != 0, true, true, 7286 !flag_pic); 7287 } 7288 7289 bool 7290 default_binds_local_p_1 (const_tree exp, int shlib) 7291 { 7292 return default_binds_local_p_3 (exp, shlib != 0, false, false, false); 7293 } 7294 7295 /* Return true when references to DECL must bind to current definition in 7296 final executable. 7297 7298 The condition is usually equivalent to whether the function binds to the 7299 current module (shared library or executable), that is to binds_local_p. 7300 We use this fact to avoid need for another target hook and implement 7301 the logic using binds_local_p and just special cases where 7302 decl_binds_to_current_def_p is stronger than binds_local_p. In particular 7303 the weak definitions (that can be overwritten at linktime by other 7304 definition from different object file) and when resolution info is available 7305 we simply use the knowledge passed to us by linker plugin. */ 7306 bool 7307 decl_binds_to_current_def_p (const_tree decl) 7308 { 7309 gcc_assert (DECL_P (decl)); 7310 if (!targetm.binds_local_p (decl)) 7311 return false; 7312 if (!TREE_PUBLIC (decl)) 7313 return true; 7314 7315 /* When resolution is available, just use it. */ 7316 if (symtab_node *node = symtab_node::get (decl)) 7317 { 7318 if (node->resolution != LDPR_UNKNOWN 7319 && !node->can_be_discarded_p ()) 7320 return resolution_to_local_definition_p (node->resolution); 7321 } 7322 7323 /* Otherwise we have to assume the worst for DECL_WEAK (hidden weaks 7324 binds locally but still can be overwritten), DECL_COMMON (can be merged 7325 with a non-common definition somewhere in the same module) or 7326 DECL_EXTERNAL. 7327 This rely on fact that binds_local_p behave as decl_replaceable_p 7328 for all other declaration types. */ 7329 if (DECL_WEAK (decl)) 7330 return false; 7331 if (DECL_COMMON (decl) 7332 && (DECL_INITIAL (decl) == NULL 7333 || (!in_lto_p && DECL_INITIAL (decl) == error_mark_node))) 7334 return false; 7335 if (DECL_EXTERNAL (decl)) 7336 return false; 7337 return true; 7338 } 7339 7340 /* A replaceable function or variable is one which may be replaced 7341 at link-time with an entirely different definition, provided that the 7342 replacement has the same type. For example, functions declared 7343 with __attribute__((weak)) on most systems are replaceable. 7344 7345 COMDAT functions are not replaceable, since all definitions of the 7346 function must be equivalent. It is important that COMDAT functions 7347 not be treated as replaceable so that use of C++ template 7348 instantiations is not penalized. */ 7349 7350 bool 7351 decl_replaceable_p (tree decl) 7352 { 7353 gcc_assert (DECL_P (decl)); 7354 if (!TREE_PUBLIC (decl) || DECL_COMDAT (decl)) 7355 return false; 7356 if (!flag_semantic_interposition 7357 && !DECL_WEAK (decl)) 7358 return false; 7359 return !decl_binds_to_current_def_p (decl); 7360 } 7361 7362 /* Default function to output code that will globalize a label. A 7363 target must define GLOBAL_ASM_OP or provide its own function to 7364 globalize a label. */ 7365 #ifdef GLOBAL_ASM_OP 7366 void 7367 default_globalize_label (FILE * stream, const char *name) 7368 { 7369 fputs (GLOBAL_ASM_OP, stream); 7370 assemble_name (stream, name); 7371 putc ('\n', stream); 7372 } 7373 #endif /* GLOBAL_ASM_OP */ 7374 7375 /* Default function to output code that will globalize a declaration. */ 7376 void 7377 default_globalize_decl_name (FILE * stream, tree decl) 7378 { 7379 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0); 7380 targetm.asm_out.globalize_label (stream, name); 7381 } 7382 7383 /* Default function to output a label for unwind information. The 7384 default is to do nothing. A target that needs nonlocal labels for 7385 unwind information must provide its own function to do this. */ 7386 void 7387 default_emit_unwind_label (FILE * stream ATTRIBUTE_UNUSED, 7388 tree decl ATTRIBUTE_UNUSED, 7389 int for_eh ATTRIBUTE_UNUSED, 7390 int empty ATTRIBUTE_UNUSED) 7391 { 7392 } 7393 7394 /* Default function to output a label to divide up the exception table. 7395 The default is to do nothing. A target that needs/wants to divide 7396 up the table must provide it's own function to do this. */ 7397 void 7398 default_emit_except_table_label (FILE * stream ATTRIBUTE_UNUSED) 7399 { 7400 } 7401 7402 /* This is how to output an internal numbered label where PREFIX is 7403 the class of label and LABELNO is the number within the class. */ 7404 7405 void 7406 default_generate_internal_label (char *buf, const char *prefix, 7407 unsigned long labelno) 7408 { 7409 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, labelno); 7410 } 7411 7412 /* This is how to output an internal numbered label where PREFIX is 7413 the class of label and LABELNO is the number within the class. */ 7414 7415 void 7416 default_internal_label (FILE *stream, const char *prefix, 7417 unsigned long labelno) 7418 { 7419 char *const buf = (char *) alloca (40 + strlen (prefix)); 7420 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, labelno); 7421 ASM_OUTPUT_INTERNAL_LABEL (stream, buf); 7422 } 7423 7424 7425 /* The default implementation of ASM_DECLARE_CONSTANT_NAME. */ 7426 7427 void 7428 default_asm_declare_constant_name (FILE *file, const char *name, 7429 const_tree exp ATTRIBUTE_UNUSED, 7430 HOST_WIDE_INT size ATTRIBUTE_UNUSED) 7431 { 7432 assemble_label (file, name); 7433 } 7434 7435 /* This is the default behavior at the beginning of a file. It's 7436 controlled by two other target-hook toggles. */ 7437 void 7438 default_file_start (void) 7439 { 7440 if (targetm.asm_file_start_app_off 7441 && !(flag_verbose_asm || flag_debug_asm || flag_dump_rtl_in_asm)) 7442 fputs (ASM_APP_OFF, asm_out_file); 7443 7444 if (targetm.asm_file_start_file_directive) 7445 { 7446 /* LTO produced units have no meaningful main_input_filename. */ 7447 if (in_lto_p) 7448 output_file_directive (asm_out_file, "<artificial>"); 7449 else 7450 output_file_directive (asm_out_file, main_input_filename); 7451 } 7452 } 7453 7454 /* This is a generic routine suitable for use as TARGET_ASM_FILE_END 7455 which emits a special section directive used to indicate whether or 7456 not this object file needs an executable stack. This is primarily 7457 a GNU extension to ELF but could be used on other targets. */ 7458 7459 int trampolines_created; 7460 7461 void 7462 file_end_indicate_exec_stack (void) 7463 { 7464 unsigned int flags = SECTION_DEBUG; 7465 if (trampolines_created) 7466 flags |= SECTION_CODE; 7467 7468 switch_to_section (get_section (".note.GNU-stack", flags, NULL)); 7469 } 7470 7471 /* Emit a special section directive to indicate that this object file 7472 was compiled with -fsplit-stack. This is used to let the linker 7473 detect calls between split-stack code and non-split-stack code, so 7474 that it can modify the split-stack code to allocate a sufficiently 7475 large stack. We emit another special section if there are any 7476 functions in this file which have the no_split_stack attribute, to 7477 prevent the linker from warning about being unable to convert the 7478 functions if they call non-split-stack code. */ 7479 7480 void 7481 file_end_indicate_split_stack (void) 7482 { 7483 if (flag_split_stack) 7484 { 7485 switch_to_section (get_section (".note.GNU-split-stack", SECTION_DEBUG, 7486 NULL)); 7487 if (saw_no_split_stack) 7488 switch_to_section (get_section (".note.GNU-no-split-stack", 7489 SECTION_DEBUG, NULL)); 7490 } 7491 } 7492 7493 /* Output DIRECTIVE (a C string) followed by a newline. This is used as 7494 a get_unnamed_section callback. */ 7495 7496 void 7497 output_section_asm_op (const void *directive) 7498 { 7499 fprintf (asm_out_file, "%s\n", (const char *) directive); 7500 } 7501 7502 /* Emit assembly code to switch to section NEW_SECTION. Do nothing if 7503 the current section is NEW_SECTION. */ 7504 7505 void 7506 switch_to_section (section *new_section) 7507 { 7508 if (in_section == new_section) 7509 return; 7510 7511 if (new_section->common.flags & SECTION_FORGET) 7512 in_section = NULL; 7513 else 7514 in_section = new_section; 7515 7516 switch (SECTION_STYLE (new_section)) 7517 { 7518 case SECTION_NAMED: 7519 targetm.asm_out.named_section (new_section->named.name, 7520 new_section->named.common.flags, 7521 new_section->named.decl); 7522 break; 7523 7524 case SECTION_UNNAMED: 7525 new_section->unnamed.callback (new_section->unnamed.data); 7526 break; 7527 7528 case SECTION_NOSWITCH: 7529 gcc_unreachable (); 7530 break; 7531 } 7532 7533 new_section->common.flags |= SECTION_DECLARED; 7534 } 7535 7536 /* If block symbol SYMBOL has not yet been assigned an offset, place 7537 it at the end of its block. */ 7538 7539 void 7540 place_block_symbol (rtx symbol) 7541 { 7542 unsigned HOST_WIDE_INT size, mask, offset; 7543 class constant_descriptor_rtx *desc; 7544 unsigned int alignment; 7545 struct object_block *block; 7546 tree decl; 7547 7548 gcc_assert (SYMBOL_REF_BLOCK (symbol)); 7549 if (SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0) 7550 return; 7551 7552 /* Work out the symbol's size and alignment. */ 7553 if (CONSTANT_POOL_ADDRESS_P (symbol)) 7554 { 7555 desc = SYMBOL_REF_CONSTANT (symbol); 7556 alignment = desc->align; 7557 size = GET_MODE_SIZE (desc->mode); 7558 } 7559 else if (TREE_CONSTANT_POOL_ADDRESS_P (symbol)) 7560 { 7561 decl = SYMBOL_REF_DECL (symbol); 7562 gcc_checking_assert (DECL_IN_CONSTANT_POOL (decl)); 7563 alignment = DECL_ALIGN (decl); 7564 size = get_constant_size (DECL_INITIAL (decl)); 7565 if ((flag_sanitize & SANITIZE_ADDRESS) 7566 && TREE_CODE (DECL_INITIAL (decl)) == STRING_CST 7567 && asan_protect_global (DECL_INITIAL (decl))) 7568 { 7569 size += asan_red_zone_size (size); 7570 alignment = MAX (alignment, 7571 ASAN_RED_ZONE_SIZE * BITS_PER_UNIT); 7572 } 7573 } 7574 else 7575 { 7576 struct symtab_node *snode; 7577 decl = SYMBOL_REF_DECL (symbol); 7578 7579 snode = symtab_node::get (decl); 7580 if (snode->alias) 7581 { 7582 rtx target = DECL_RTL (snode->ultimate_alias_target ()->decl); 7583 7584 gcc_assert (MEM_P (target) 7585 && GET_CODE (XEXP (target, 0)) == SYMBOL_REF 7586 && SYMBOL_REF_HAS_BLOCK_INFO_P (XEXP (target, 0))); 7587 target = XEXP (target, 0); 7588 place_block_symbol (target); 7589 SYMBOL_REF_BLOCK_OFFSET (symbol) = SYMBOL_REF_BLOCK_OFFSET (target); 7590 return; 7591 } 7592 alignment = get_variable_align (decl); 7593 size = tree_to_uhwi (DECL_SIZE_UNIT (decl)); 7594 if ((flag_sanitize & SANITIZE_ADDRESS) 7595 && asan_protect_global (decl)) 7596 { 7597 size += asan_red_zone_size (size); 7598 alignment = MAX (alignment, 7599 ASAN_RED_ZONE_SIZE * BITS_PER_UNIT); 7600 } 7601 } 7602 7603 /* Calculate the object's offset from the start of the block. */ 7604 block = SYMBOL_REF_BLOCK (symbol); 7605 mask = alignment / BITS_PER_UNIT - 1; 7606 offset = (block->size + mask) & ~mask; 7607 SYMBOL_REF_BLOCK_OFFSET (symbol) = offset; 7608 7609 /* Record the block's new alignment and size. */ 7610 block->alignment = MAX (block->alignment, alignment); 7611 block->size = offset + size; 7612 7613 vec_safe_push (block->objects, symbol); 7614 } 7615 7616 /* Return the anchor that should be used to address byte offset OFFSET 7617 from the first object in BLOCK. MODEL is the TLS model used 7618 to access it. */ 7619 7620 rtx 7621 get_section_anchor (struct object_block *block, HOST_WIDE_INT offset, 7622 enum tls_model model) 7623 { 7624 char label[100]; 7625 unsigned int begin, middle, end; 7626 unsigned HOST_WIDE_INT min_offset, max_offset, range, bias, delta; 7627 rtx anchor; 7628 7629 /* Work out the anchor's offset. Use an offset of 0 for the first 7630 anchor so that we don't pessimize the case where we take the address 7631 of a variable at the beginning of the block. This is particularly 7632 useful when a block has only one variable assigned to it. 7633 7634 We try to place anchors RANGE bytes apart, so there can then be 7635 anchors at +/-RANGE, +/-2 * RANGE, and so on, up to the limits of 7636 a ptr_mode offset. With some target settings, the lowest such 7637 anchor might be out of range for the lowest ptr_mode offset; 7638 likewise the highest anchor for the highest offset. Use anchors 7639 at the extreme ends of the ptr_mode range in such cases. 7640 7641 All arithmetic uses unsigned integers in order to avoid 7642 signed overflow. */ 7643 max_offset = (unsigned HOST_WIDE_INT) targetm.max_anchor_offset; 7644 min_offset = (unsigned HOST_WIDE_INT) targetm.min_anchor_offset; 7645 range = max_offset - min_offset + 1; 7646 if (range == 0) 7647 offset = 0; 7648 else 7649 { 7650 bias = HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (ptr_mode) - 1); 7651 if (offset < 0) 7652 { 7653 delta = -(unsigned HOST_WIDE_INT) offset + max_offset; 7654 delta -= delta % range; 7655 if (delta > bias) 7656 delta = bias; 7657 offset = (HOST_WIDE_INT) (-delta); 7658 } 7659 else 7660 { 7661 delta = (unsigned HOST_WIDE_INT) offset - min_offset; 7662 delta -= delta % range; 7663 if (delta > bias - 1) 7664 delta = bias - 1; 7665 offset = (HOST_WIDE_INT) delta; 7666 } 7667 } 7668 7669 /* Do a binary search to see if there's already an anchor we can use. 7670 Set BEGIN to the new anchor's index if not. */ 7671 begin = 0; 7672 end = vec_safe_length (block->anchors); 7673 while (begin != end) 7674 { 7675 middle = (end + begin) / 2; 7676 anchor = (*block->anchors)[middle]; 7677 if (SYMBOL_REF_BLOCK_OFFSET (anchor) > offset) 7678 end = middle; 7679 else if (SYMBOL_REF_BLOCK_OFFSET (anchor) < offset) 7680 begin = middle + 1; 7681 else if (SYMBOL_REF_TLS_MODEL (anchor) > model) 7682 end = middle; 7683 else if (SYMBOL_REF_TLS_MODEL (anchor) < model) 7684 begin = middle + 1; 7685 else 7686 return anchor; 7687 } 7688 7689 /* Create a new anchor with a unique label. */ 7690 ASM_GENERATE_INTERNAL_LABEL (label, "LANCHOR", anchor_labelno++); 7691 anchor = create_block_symbol (ggc_strdup (label), block, offset); 7692 SYMBOL_REF_FLAGS (anchor) |= SYMBOL_FLAG_LOCAL | SYMBOL_FLAG_ANCHOR; 7693 SYMBOL_REF_FLAGS (anchor) |= model << SYMBOL_FLAG_TLS_SHIFT; 7694 7695 /* Insert it at index BEGIN. */ 7696 vec_safe_insert (block->anchors, begin, anchor); 7697 return anchor; 7698 } 7699 7700 /* Output the objects in BLOCK. */ 7701 7702 static void 7703 output_object_block (struct object_block *block) 7704 { 7705 class constant_descriptor_rtx *desc; 7706 unsigned int i; 7707 HOST_WIDE_INT offset; 7708 tree decl; 7709 rtx symbol; 7710 7711 if (!block->objects) 7712 return; 7713 7714 /* Switch to the section and make sure that the first byte is 7715 suitably aligned. */ 7716 /* Special case VTV comdat sections similar to assemble_variable. */ 7717 if (SECTION_STYLE (block->sect) == SECTION_NAMED 7718 && block->sect->named.name 7719 && (strcmp (block->sect->named.name, ".vtable_map_vars") == 0)) 7720 handle_vtv_comdat_section (block->sect, block->sect->named.decl); 7721 else 7722 switch_to_section (block->sect); 7723 7724 gcc_checking_assert (!(block->sect->common.flags & SECTION_MERGE)); 7725 assemble_align (block->alignment); 7726 7727 /* Define the values of all anchors relative to the current section 7728 position. */ 7729 FOR_EACH_VEC_SAFE_ELT (block->anchors, i, symbol) 7730 targetm.asm_out.output_anchor (symbol); 7731 7732 /* Output the objects themselves. */ 7733 offset = 0; 7734 FOR_EACH_VEC_ELT (*block->objects, i, symbol) 7735 { 7736 /* Move to the object's offset, padding with zeros if necessary. */ 7737 assemble_zeros (SYMBOL_REF_BLOCK_OFFSET (symbol) - offset); 7738 offset = SYMBOL_REF_BLOCK_OFFSET (symbol); 7739 if (CONSTANT_POOL_ADDRESS_P (symbol)) 7740 { 7741 desc = SYMBOL_REF_CONSTANT (symbol); 7742 /* Pass 1 for align as we have already laid out everything in the block. 7743 So aligning shouldn't be necessary. */ 7744 output_constant_pool_1 (desc, 1); 7745 offset += GET_MODE_SIZE (desc->mode); 7746 } 7747 else if (TREE_CONSTANT_POOL_ADDRESS_P (symbol)) 7748 { 7749 HOST_WIDE_INT size; 7750 decl = SYMBOL_REF_DECL (symbol); 7751 assemble_constant_contents (DECL_INITIAL (decl), XSTR (symbol, 0), 7752 DECL_ALIGN (decl), false); 7753 7754 size = get_constant_size (DECL_INITIAL (decl)); 7755 offset += size; 7756 if ((flag_sanitize & SANITIZE_ADDRESS) 7757 && TREE_CODE (DECL_INITIAL (decl)) == STRING_CST 7758 && asan_protect_global (DECL_INITIAL (decl))) 7759 { 7760 size = asan_red_zone_size (size); 7761 assemble_zeros (size); 7762 offset += size; 7763 } 7764 } 7765 else 7766 { 7767 HOST_WIDE_INT size; 7768 decl = SYMBOL_REF_DECL (symbol); 7769 assemble_variable_contents (decl, XSTR (symbol, 0), false, false); 7770 size = tree_to_uhwi (DECL_SIZE_UNIT (decl)); 7771 offset += size; 7772 if ((flag_sanitize & SANITIZE_ADDRESS) 7773 && asan_protect_global (decl)) 7774 { 7775 size = asan_red_zone_size (size); 7776 assemble_zeros (size); 7777 offset += size; 7778 } 7779 } 7780 } 7781 } 7782 7783 /* A callback for qsort to compare object_blocks. */ 7784 7785 static int 7786 output_object_block_compare (const void *x, const void *y) 7787 { 7788 object_block *p1 = *(object_block * const*)x; 7789 object_block *p2 = *(object_block * const*)y; 7790 7791 if (p1->sect->common.flags & SECTION_NAMED 7792 && !(p2->sect->common.flags & SECTION_NAMED)) 7793 return 1; 7794 7795 if (!(p1->sect->common.flags & SECTION_NAMED) 7796 && p2->sect->common.flags & SECTION_NAMED) 7797 return -1; 7798 7799 if (p1->sect->common.flags & SECTION_NAMED 7800 && p2->sect->common.flags & SECTION_NAMED) 7801 return strcmp (p1->sect->named.name, p2->sect->named.name); 7802 7803 unsigned f1 = p1->sect->common.flags; 7804 unsigned f2 = p2->sect->common.flags; 7805 if (f1 == f2) 7806 return 0; 7807 return f1 < f2 ? -1 : 1; 7808 } 7809 7810 /* Output the definitions of all object_blocks. */ 7811 7812 void 7813 output_object_blocks (void) 7814 { 7815 vec<object_block *, va_heap> v; 7816 v.create (object_block_htab->elements ()); 7817 object_block *obj; 7818 hash_table<object_block_hasher>::iterator hi; 7819 7820 FOR_EACH_HASH_TABLE_ELEMENT (*object_block_htab, obj, object_block *, hi) 7821 v.quick_push (obj); 7822 7823 /* Sort them in order to output them in a deterministic manner, 7824 otherwise we may get .rodata sections in different orders with 7825 and without -g. */ 7826 v.qsort (output_object_block_compare); 7827 unsigned i; 7828 FOR_EACH_VEC_ELT (v, i, obj) 7829 output_object_block (obj); 7830 7831 v.release (); 7832 } 7833 7834 /* This function provides a possible implementation of the 7835 TARGET_ASM_RECORD_GCC_SWITCHES target hook for ELF targets. When triggered 7836 by -frecord-gcc-switches it creates a new mergeable, string section in the 7837 assembler output file called TARGET_ASM_RECORD_GCC_SWITCHES_SECTION which 7838 contains the switches in ASCII format. 7839 7840 FIXME: This code does not correctly handle double quote characters 7841 that appear inside strings, (it strips them rather than preserving them). 7842 FIXME: ASM_OUTPUT_ASCII, as defined in config/elfos.h will not emit NUL 7843 characters - instead it treats them as sub-string separators. Since 7844 we want to emit NUL strings terminators into the object file we have to use 7845 ASM_OUTPUT_SKIP. */ 7846 7847 int 7848 elf_record_gcc_switches (print_switch_type type, const char * name) 7849 { 7850 switch (type) 7851 { 7852 case SWITCH_TYPE_PASSED: 7853 ASM_OUTPUT_ASCII (asm_out_file, name, strlen (name)); 7854 ASM_OUTPUT_SKIP (asm_out_file, HOST_WIDE_INT_1U); 7855 break; 7856 7857 case SWITCH_TYPE_DESCRIPTIVE: 7858 if (name == NULL) 7859 { 7860 /* Distinguish between invocations where name is NULL. */ 7861 static bool started = false; 7862 7863 if (!started) 7864 { 7865 section * sec; 7866 7867 sec = get_section (targetm.asm_out.record_gcc_switches_section, 7868 SECTION_DEBUG 7869 | SECTION_MERGE 7870 | SECTION_STRINGS 7871 | (SECTION_ENTSIZE & 1), 7872 NULL); 7873 switch_to_section (sec); 7874 started = true; 7875 } 7876 } 7877 7878 default: 7879 break; 7880 } 7881 7882 /* The return value is currently ignored by the caller, but must be 0. 7883 For -fverbose-asm the return value would be the number of characters 7884 emitted into the assembler file. */ 7885 return 0; 7886 } 7887 7888 /* Emit text to declare externally defined symbols. It is needed to 7889 properly support non-default visibility. */ 7890 void 7891 default_elf_asm_output_external (FILE *file ATTRIBUTE_UNUSED, 7892 tree decl, 7893 const char *name ATTRIBUTE_UNUSED) 7894 { 7895 /* We output the name if and only if TREE_SYMBOL_REFERENCED is 7896 set in order to avoid putting out names that are never really 7897 used. Always output visibility specified in the source. */ 7898 if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)) 7899 && (DECL_VISIBILITY_SPECIFIED (decl) 7900 || targetm.binds_local_p (decl))) 7901 maybe_assemble_visibility (decl); 7902 } 7903 7904 /* The default hook for TARGET_ASM_OUTPUT_SOURCE_FILENAME. */ 7905 7906 void 7907 default_asm_output_source_filename (FILE *file, const char *name) 7908 { 7909 #ifdef ASM_OUTPUT_SOURCE_FILENAME 7910 ASM_OUTPUT_SOURCE_FILENAME (file, name); 7911 #else 7912 fprintf (file, "\t.file\t"); 7913 output_quoted_string (file, name); 7914 putc ('\n', file); 7915 #endif 7916 } 7917 7918 /* Output a file name in the form wanted by System V. */ 7919 7920 void 7921 output_file_directive (FILE *asm_file, const char *input_name) 7922 { 7923 int len; 7924 const char *na; 7925 7926 if (input_name == NULL) 7927 input_name = "<stdin>"; 7928 else 7929 input_name = remap_debug_filename (input_name); 7930 7931 len = strlen (input_name); 7932 na = input_name + len; 7933 7934 /* NA gets INPUT_NAME sans directory names. */ 7935 while (na > input_name) 7936 { 7937 if (IS_DIR_SEPARATOR (na[-1])) 7938 break; 7939 na--; 7940 } 7941 7942 targetm.asm_out.output_source_filename (asm_file, na); 7943 } 7944 7945 /* Create a DEBUG_EXPR_DECL / DEBUG_EXPR pair from RTL expression 7946 EXP. */ 7947 rtx 7948 make_debug_expr_from_rtl (const_rtx exp) 7949 { 7950 tree ddecl = make_node (DEBUG_EXPR_DECL), type; 7951 machine_mode mode = GET_MODE (exp); 7952 rtx dval; 7953 7954 DECL_ARTIFICIAL (ddecl) = 1; 7955 if (REG_P (exp) && REG_EXPR (exp)) 7956 type = TREE_TYPE (REG_EXPR (exp)); 7957 else if (MEM_P (exp) && MEM_EXPR (exp)) 7958 type = TREE_TYPE (MEM_EXPR (exp)); 7959 else 7960 type = NULL_TREE; 7961 if (type && TYPE_MODE (type) == mode) 7962 TREE_TYPE (ddecl) = type; 7963 else 7964 TREE_TYPE (ddecl) = lang_hooks.types.type_for_mode (mode, 1); 7965 SET_DECL_MODE (ddecl, mode); 7966 dval = gen_rtx_DEBUG_EXPR (mode); 7967 DEBUG_EXPR_TREE_DECL (dval) = ddecl; 7968 SET_DECL_RTL (ddecl, dval); 7969 return dval; 7970 } 7971 7972 #ifdef ELF_ASCII_ESCAPES 7973 /* Default ASM_OUTPUT_LIMITED_STRING for ELF targets. */ 7974 7975 void 7976 default_elf_asm_output_limited_string (FILE *f, const char *s) 7977 { 7978 int escape; 7979 unsigned char c; 7980 7981 fputs (STRING_ASM_OP, f); 7982 putc ('"', f); 7983 while (*s != '\0') 7984 { 7985 c = *s; 7986 escape = ELF_ASCII_ESCAPES[c]; 7987 switch (escape) 7988 { 7989 case 0: 7990 putc (c, f); 7991 break; 7992 case 1: 7993 putc ('\\', f); 7994 putc ('0'+((c>>6)&7), f); 7995 putc ('0'+((c>>3)&7), f); 7996 putc ('0'+(c&7), f); 7997 break; 7998 default: 7999 putc ('\\', f); 8000 putc (escape, f); 8001 break; 8002 } 8003 s++; 8004 } 8005 putc ('\"', f); 8006 putc ('\n', f); 8007 } 8008 8009 /* Default ASM_OUTPUT_ASCII for ELF targets. */ 8010 8011 void 8012 default_elf_asm_output_ascii (FILE *f, const char *s, unsigned int len) 8013 { 8014 const char *limit = s + len; 8015 const char *last_null = NULL; 8016 unsigned bytes_in_chunk = 0; 8017 unsigned char c; 8018 int escape; 8019 8020 for (; s < limit; s++) 8021 { 8022 const char *p; 8023 8024 if (bytes_in_chunk >= 60) 8025 { 8026 putc ('\"', f); 8027 putc ('\n', f); 8028 bytes_in_chunk = 0; 8029 } 8030 8031 if (s > last_null) 8032 { 8033 for (p = s; p < limit && *p != '\0'; p++) 8034 continue; 8035 last_null = p; 8036 } 8037 else 8038 p = last_null; 8039 8040 if (p < limit && (p - s) <= (long) ELF_STRING_LIMIT) 8041 { 8042 if (bytes_in_chunk > 0) 8043 { 8044 putc ('\"', f); 8045 putc ('\n', f); 8046 bytes_in_chunk = 0; 8047 } 8048 8049 default_elf_asm_output_limited_string (f, s); 8050 s = p; 8051 } 8052 else 8053 { 8054 if (bytes_in_chunk == 0) 8055 fputs (ASCII_DATA_ASM_OP "\"", f); 8056 8057 c = *s; 8058 escape = ELF_ASCII_ESCAPES[c]; 8059 switch (escape) 8060 { 8061 case 0: 8062 putc (c, f); 8063 bytes_in_chunk++; 8064 break; 8065 case 1: 8066 putc ('\\', f); 8067 putc ('0'+((c>>6)&7), f); 8068 putc ('0'+((c>>3)&7), f); 8069 putc ('0'+(c&7), f); 8070 bytes_in_chunk += 4; 8071 break; 8072 default: 8073 putc ('\\', f); 8074 putc (escape, f); 8075 bytes_in_chunk += 2; 8076 break; 8077 } 8078 8079 } 8080 } 8081 8082 if (bytes_in_chunk > 0) 8083 { 8084 putc ('\"', f); 8085 putc ('\n', f); 8086 } 8087 } 8088 #endif 8089 8090 static GTY(()) section *elf_init_array_section; 8091 static GTY(()) section *elf_fini_array_section; 8092 8093 static section * 8094 get_elf_initfini_array_priority_section (int priority, 8095 bool constructor_p) 8096 { 8097 section *sec; 8098 if (priority != DEFAULT_INIT_PRIORITY) 8099 { 8100 char buf[18]; 8101 sprintf (buf, "%s.%.5u", 8102 constructor_p ? ".init_array" : ".fini_array", 8103 priority); 8104 sec = get_section (buf, SECTION_WRITE | SECTION_NOTYPE, NULL_TREE); 8105 } 8106 else 8107 { 8108 if (constructor_p) 8109 { 8110 if (elf_init_array_section == NULL) 8111 elf_init_array_section 8112 = get_section (".init_array", 8113 SECTION_WRITE | SECTION_NOTYPE, NULL_TREE); 8114 sec = elf_init_array_section; 8115 } 8116 else 8117 { 8118 if (elf_fini_array_section == NULL) 8119 elf_fini_array_section 8120 = get_section (".fini_array", 8121 SECTION_WRITE | SECTION_NOTYPE, NULL_TREE); 8122 sec = elf_fini_array_section; 8123 } 8124 } 8125 return sec; 8126 } 8127 8128 /* Use .init_array section for constructors. */ 8129 8130 void 8131 default_elf_init_array_asm_out_constructor (rtx symbol, int priority) 8132 { 8133 section *sec = get_elf_initfini_array_priority_section (priority, 8134 true); 8135 assemble_addr_to_section (symbol, sec); 8136 } 8137 8138 /* Use .fini_array section for destructors. */ 8139 8140 void 8141 default_elf_fini_array_asm_out_destructor (rtx symbol, int priority) 8142 { 8143 section *sec = get_elf_initfini_array_priority_section (priority, 8144 false); 8145 assemble_addr_to_section (symbol, sec); 8146 } 8147 8148 /* Default TARGET_ASM_OUTPUT_IDENT hook. 8149 8150 This is a bit of a cheat. The real default is a no-op, but this 8151 hook is the default for all targets with a .ident directive. */ 8152 8153 void 8154 default_asm_output_ident_directive (const char *ident_str) 8155 { 8156 const char *ident_asm_op = "\t.ident\t"; 8157 8158 /* If we are still in the front end, do not write out the string 8159 to asm_out_file. Instead, add a fake top-level asm statement. 8160 This allows the front ends to use this hook without actually 8161 writing to asm_out_file, to handle #ident or Pragma Ident. */ 8162 if (symtab->state == PARSING) 8163 { 8164 char *buf = ACONCAT ((ident_asm_op, "\"", ident_str, "\"\n", NULL)); 8165 symtab->finalize_toplevel_asm (build_string (strlen (buf), buf)); 8166 } 8167 else 8168 fprintf (asm_out_file, "%s\"%s\"\n", ident_asm_op, ident_str); 8169 } 8170 8171 8172 /* This function ensures that vtable_map variables are not only 8173 in the comdat section, but that each variable has its own unique 8174 comdat name. Without this the variables end up in the same section 8175 with a single comdat name. 8176 8177 FIXME: resolve_unique_section needs to deal better with 8178 decls with both DECL_SECTION_NAME and DECL_ONE_ONLY. Once 8179 that is fixed, this if-else statement can be replaced with 8180 a single call to "switch_to_section (sect)". */ 8181 8182 static void 8183 handle_vtv_comdat_section (section *sect, const_tree decl ATTRIBUTE_UNUSED) 8184 { 8185 #if defined (OBJECT_FORMAT_ELF) 8186 targetm.asm_out.named_section (sect->named.name, 8187 sect->named.common.flags 8188 | SECTION_LINKONCE, 8189 DECL_NAME (decl)); 8190 in_section = sect; 8191 #else 8192 /* Neither OBJECT_FORMAT_PE, nor OBJECT_FORMAT_COFF is set here. 8193 Therefore the following check is used. 8194 In case a the target is PE or COFF a comdat group section 8195 is created, e.g. .vtable_map_vars$foo. The linker places 8196 everything in .vtable_map_vars at the end. 8197 8198 A fix could be made in 8199 gcc/config/i386/winnt.c: i386_pe_unique_section. */ 8200 if (TARGET_PECOFF) 8201 { 8202 char *name; 8203 8204 if (TREE_CODE (DECL_NAME (decl)) == IDENTIFIER_NODE) 8205 name = ACONCAT ((sect->named.name, "$", 8206 IDENTIFIER_POINTER (DECL_NAME (decl)), NULL)); 8207 else 8208 name = ACONCAT ((sect->named.name, "$", 8209 IDENTIFIER_POINTER (DECL_COMDAT_GROUP (DECL_NAME (decl))), 8210 NULL)); 8211 8212 targetm.asm_out.named_section (name, 8213 sect->named.common.flags 8214 | SECTION_LINKONCE, 8215 DECL_NAME (decl)); 8216 in_section = sect; 8217 } 8218 else 8219 switch_to_section (sect); 8220 #endif 8221 } 8222 8223 #include "gt-varasm.h" 8224