1 /* Output variables, constants and external declarations, for GNU compiler.
2 Copyright (C) 1987-2022 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* This file handles generation of all the assembler code
22 *except* the instructions of a function.
23 This includes declarations of variables and their initial values.
24
25 We also output the assembler code for constants stored in memory
26 and are responsible for combining constants with the same value. */
27
28 #include "config.h"
29 #include "system.h"
30 #include "coretypes.h"
31 #include "backend.h"
32 #include "target.h"
33 #include "rtl.h"
34 #include "tree.h"
35 #include "predict.h"
36 #include "memmodel.h"
37 #include "tm_p.h"
38 #include "stringpool.h"
39 #include "regs.h"
40 #include "emit-rtl.h"
41 #include "cgraph.h"
42 #include "diagnostic-core.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "varasm.h"
46 #include "version.h"
47 #include "flags.h"
48 #include "stmt.h"
49 #include "expr.h"
50 #include "expmed.h"
51 #include "optabs.h"
52 #include "output.h"
53 #include "langhooks.h"
54 #include "debug.h"
55 #include "common/common-target.h"
56 #include "stringpool.h"
57 #include "attribs.h"
58 #include "asan.h"
59 #include "rtl-iter.h"
60 #include "file-prefix-map.h" /* remap_debug_filename() */
61 #include "alloc-pool.h"
62 #include "toplev.h"
63 #include "opts.h"
64
65 #ifdef XCOFF_DEBUGGING_INFO
66 #include "xcoffout.h" /* Needed for external data declarations. */
67 #endif
68
69 /* The (assembler) name of the first globally-visible object output. */
70 extern GTY(()) const char *first_global_object_name;
71 extern GTY(()) const char *weak_global_object_name;
72
73 const char *first_global_object_name;
74 const char *weak_global_object_name;
75
76 class addr_const;
77 class constant_descriptor_rtx;
78 struct rtx_constant_pool;
79
80 #define n_deferred_constants (crtl->varasm.deferred_constants)
81
82 /* Number for making the label on the next
83 constant that is stored in memory. */
84
85 static GTY(()) int const_labelno;
86
87 /* Carry information from ASM_DECLARE_OBJECT_NAME
88 to ASM_FINISH_DECLARE_OBJECT. */
89
90 int size_directive_output;
91
92 /* The last decl for which assemble_variable was called,
93 if it did ASM_DECLARE_OBJECT_NAME.
94 If the last call to assemble_variable didn't do that,
95 this holds 0. */
96
97 tree last_assemble_variable_decl;
98
99 /* The following global variable indicates if the first basic block
100 in a function belongs to the cold partition or not. */
101
102 bool first_function_block_is_cold;
103
104 /* Whether we saw any functions with no_split_stack. */
105
106 static bool saw_no_split_stack;
107
108 static const char *strip_reg_name (const char *);
109 static int contains_pointers_p (tree);
110 #ifdef ASM_OUTPUT_EXTERNAL
111 static bool incorporeal_function_p (tree);
112 #endif
113 static void decode_addr_const (tree, class addr_const *);
114 static hashval_t const_hash_1 (const tree);
115 static int compare_constant (const tree, const tree);
116 static void output_constant_def_contents (rtx);
117 static void output_addressed_constants (tree, int);
118 static unsigned HOST_WIDE_INT output_constant (tree, unsigned HOST_WIDE_INT,
119 unsigned int, bool, bool);
120 static void globalize_decl (tree);
121 static bool decl_readonly_section_1 (enum section_category);
122 #ifdef BSS_SECTION_ASM_OP
123 #ifdef ASM_OUTPUT_ALIGNED_BSS
124 static void asm_output_aligned_bss (FILE *, tree, const char *,
125 unsigned HOST_WIDE_INT, int)
126 ATTRIBUTE_UNUSED;
127 #endif
128 #endif /* BSS_SECTION_ASM_OP */
129 static void mark_weak (tree);
130 static void output_constant_pool (const char *, tree);
131 static void handle_vtv_comdat_section (section *, const_tree);
132
133 /* Well-known sections, each one associated with some sort of *_ASM_OP. */
134 section *text_section;
135 section *data_section;
136 section *readonly_data_section;
137 section *sdata_section;
138 section *ctors_section;
139 section *dtors_section;
140 section *bss_section;
141 section *sbss_section;
142
143 /* Various forms of common section. All are guaranteed to be nonnull. */
144 section *tls_comm_section;
145 section *comm_section;
146 section *lcomm_section;
147
148 /* A SECTION_NOSWITCH section used for declaring global BSS variables.
149 May be null. */
150 section *bss_noswitch_section;
151
152 /* The section that holds the main exception table, when known. The section
153 is set either by the target's init_sections hook or by the first call to
154 switch_to_exception_section. */
155 section *exception_section;
156
157 /* The section that holds the DWARF2 frame unwind information, when known.
158 The section is set either by the target's init_sections hook or by the
159 first call to switch_to_eh_frame_section. */
160 section *eh_frame_section;
161
162 /* asm_out_file's current section. This is NULL if no section has yet
163 been selected or if we lose track of what the current section is. */
164 section *in_section;
165
166 /* True if code for the current function is currently being directed
167 at the cold section. */
168 bool in_cold_section_p;
169
170 /* The following global holds the "function name" for the code in the
171 cold section of a function, if hot/cold function splitting is enabled
172 and there was actually code that went into the cold section. A
173 pseudo function name is needed for the cold section of code for some
174 debugging tools that perform symbolization. */
175 tree cold_function_name = NULL_TREE;
176
177 /* A linked list of all the unnamed sections. */
178 static GTY(()) section *unnamed_sections;
179
180 /* Return a nonzero value if DECL has a section attribute. */
181 #define IN_NAMED_SECTION(DECL) \
182 (VAR_OR_FUNCTION_DECL_P (DECL) && DECL_SECTION_NAME (DECL) != NULL)
183
184 struct section_hasher : ggc_ptr_hash<section>
185 {
186 typedef const char *compare_type;
187
188 static hashval_t hash (section *);
189 static bool equal (section *, const char *);
190 };
191
192 /* Hash table of named sections. */
193 static GTY(()) hash_table<section_hasher> *section_htab;
194
195 struct object_block_hasher : ggc_ptr_hash<object_block>
196 {
197 typedef const section *compare_type;
198
199 static hashval_t hash (object_block *);
200 static bool equal (object_block *, const section *);
201 };
202
203 /* A table of object_blocks, indexed by section. */
204 static GTY(()) hash_table<object_block_hasher> *object_block_htab;
205
206 /* The next number to use for internal anchor labels. */
207 static GTY(()) int anchor_labelno;
208
209 /* A pool of constants that can be shared between functions. */
210 static GTY(()) struct rtx_constant_pool *shared_constant_pool;
211
212 /* Helper routines for maintaining section_htab. */
213
214 bool
equal(section * old,const char * new_name)215 section_hasher::equal (section *old, const char *new_name)
216 {
217 return strcmp (old->named.name, new_name) == 0;
218 }
219
220 hashval_t
hash(section * old)221 section_hasher::hash (section *old)
222 {
223 return htab_hash_string (old->named.name);
224 }
225
226 /* Return a hash value for section SECT. */
227
228 static hashval_t
hash_section(section * sect)229 hash_section (section *sect)
230 {
231 if (sect->common.flags & SECTION_NAMED)
232 return htab_hash_string (sect->named.name);
233 return sect->common.flags & ~SECTION_DECLARED;
234 }
235
236 /* Helper routines for maintaining object_block_htab. */
237
238 inline bool
equal(object_block * old,const section * new_section)239 object_block_hasher::equal (object_block *old, const section *new_section)
240 {
241 return old->sect == new_section;
242 }
243
244 hashval_t
hash(object_block * old)245 object_block_hasher::hash (object_block *old)
246 {
247 return hash_section (old->sect);
248 }
249
250 /* Return a new unnamed section with the given fields. */
251
252 section *
get_unnamed_section(unsigned int flags,void (* callback)(const char *),const char * data)253 get_unnamed_section (unsigned int flags, void (*callback) (const char *),
254 const char *data)
255 {
256 section *sect;
257
258 sect = ggc_alloc<section> ();
259 sect->unnamed.common.flags = flags | SECTION_UNNAMED;
260 sect->unnamed.callback = callback;
261 sect->unnamed.data = data;
262 sect->unnamed.next = unnamed_sections;
263
264 unnamed_sections = sect;
265 return sect;
266 }
267
268 /* Return a SECTION_NOSWITCH section with the given fields. */
269
270 static section *
get_noswitch_section(unsigned int flags,noswitch_section_callback callback)271 get_noswitch_section (unsigned int flags, noswitch_section_callback callback)
272 {
273 section *sect;
274
275 sect = ggc_alloc<section> ();
276 sect->noswitch.common.flags = flags | SECTION_NOSWITCH;
277 sect->noswitch.callback = callback;
278
279 return sect;
280 }
281
282 /* Return the named section structure associated with NAME. Create
283 a new section with the given fields if no such structure exists.
284 When NOT_EXISTING, then fail if the section already exists. Return
285 the existing section if the SECTION_RETAIN bit doesn't match. Set
286 the SECTION_WRITE | SECTION_RELRO bits on the existing section
287 if one of the section flags is SECTION_WRITE | SECTION_RELRO and the
288 other has none of these flags in named sections and either the section
289 hasn't been declared yet or has been declared as writable. */
290
291 section *
get_section(const char * name,unsigned int flags,tree decl,bool not_existing)292 get_section (const char *name, unsigned int flags, tree decl,
293 bool not_existing)
294 {
295 section *sect, **slot;
296
297 slot = section_htab->find_slot_with_hash (name, htab_hash_string (name),
298 INSERT);
299 flags |= SECTION_NAMED;
300 if (decl != nullptr
301 && DECL_P (decl)
302 && lookup_attribute ("retain", DECL_ATTRIBUTES (decl)))
303 flags |= SECTION_RETAIN;
304 if (*slot == NULL)
305 {
306 sect = ggc_alloc<section> ();
307 sect->named.common.flags = flags;
308 sect->named.name = ggc_strdup (name);
309 sect->named.decl = decl;
310 *slot = sect;
311 }
312 else
313 {
314 if (not_existing)
315 internal_error ("section already exists: %qs", name);
316
317 sect = *slot;
318 /* It is fine if one of the sections has SECTION_NOTYPE as long as
319 the other has none of the contrary flags (see the logic at the end
320 of default_section_type_flags, below). */
321 if (((sect->common.flags ^ flags) & SECTION_NOTYPE)
322 && !((sect->common.flags | flags)
323 & (SECTION_CODE | SECTION_BSS | SECTION_TLS | SECTION_ENTSIZE
324 | (HAVE_COMDAT_GROUP ? SECTION_LINKONCE : 0))))
325 {
326 sect->common.flags |= SECTION_NOTYPE;
327 flags |= SECTION_NOTYPE;
328 }
329 if ((sect->common.flags & ~SECTION_DECLARED) != flags
330 && ((sect->common.flags | flags) & SECTION_OVERRIDE) == 0)
331 {
332 /* It is fine if one of the section flags is
333 SECTION_WRITE | SECTION_RELRO and the other has none of these
334 flags (i.e. read-only) in named sections and either the
335 section hasn't been declared yet or has been declared as writable.
336 In that case just make sure the resulting flags are
337 SECTION_WRITE | SECTION_RELRO, ie. writable only because of
338 relocations. */
339 if (((sect->common.flags ^ flags) & (SECTION_WRITE | SECTION_RELRO))
340 == (SECTION_WRITE | SECTION_RELRO)
341 && (sect->common.flags
342 & ~(SECTION_DECLARED | SECTION_WRITE | SECTION_RELRO))
343 == (flags & ~(SECTION_WRITE | SECTION_RELRO))
344 && ((sect->common.flags & SECTION_DECLARED) == 0
345 || (sect->common.flags & SECTION_WRITE)))
346 {
347 sect->common.flags |= (SECTION_WRITE | SECTION_RELRO);
348 return sect;
349 }
350 /* If the SECTION_RETAIN bit doesn't match, return and switch
351 to a new section later. */
352 if ((sect->common.flags & SECTION_RETAIN)
353 != (flags & SECTION_RETAIN))
354 return sect;
355 /* Sanity check user variables for flag changes. */
356 if (sect->named.decl != NULL
357 && DECL_P (sect->named.decl)
358 && decl != sect->named.decl)
359 {
360 if (decl != NULL && DECL_P (decl))
361 error ("%+qD causes a section type conflict with %qD",
362 decl, sect->named.decl);
363 else
364 error ("section type conflict with %qD", sect->named.decl);
365 inform (DECL_SOURCE_LOCATION (sect->named.decl),
366 "%qD was declared here", sect->named.decl);
367 }
368 else if (decl != NULL && DECL_P (decl))
369 error ("%+qD causes a section type conflict", decl);
370 else
371 error ("section type conflict");
372 /* Make sure we don't error about one section multiple times. */
373 sect->common.flags |= SECTION_OVERRIDE;
374 }
375 }
376 return sect;
377 }
378
379 /* Return true if the current compilation mode benefits from having
380 objects grouped into blocks. */
381
382 static bool
use_object_blocks_p(void)383 use_object_blocks_p (void)
384 {
385 return flag_section_anchors;
386 }
387
388 /* Return the object_block structure for section SECT. Create a new
389 structure if we haven't created one already. Return null if SECT
390 itself is null. Return also null for mergeable sections since
391 section anchors can't be used in mergeable sections anyway,
392 because the linker might move objects around, and using the
393 object blocks infrastructure in that case is both a waste and a
394 maintenance burden. */
395
396 static struct object_block *
get_block_for_section(section * sect)397 get_block_for_section (section *sect)
398 {
399 struct object_block *block;
400
401 if (sect == NULL)
402 return NULL;
403
404 if (sect->common.flags & SECTION_MERGE)
405 return NULL;
406
407 object_block **slot
408 = object_block_htab->find_slot_with_hash (sect, hash_section (sect),
409 INSERT);
410 block = *slot;
411 if (block == NULL)
412 {
413 block = ggc_cleared_alloc<object_block> ();
414 block->sect = sect;
415 *slot = block;
416 }
417 return block;
418 }
419
420 /* Create a symbol with label LABEL and place it at byte offset
421 OFFSET in BLOCK. OFFSET can be negative if the symbol's offset
422 is not yet known. LABEL must be a garbage-collected string. */
423
424 static rtx
create_block_symbol(const char * label,struct object_block * block,HOST_WIDE_INT offset)425 create_block_symbol (const char *label, struct object_block *block,
426 HOST_WIDE_INT offset)
427 {
428 rtx symbol;
429 unsigned int size;
430
431 /* Create the extended SYMBOL_REF. */
432 size = RTX_HDR_SIZE + sizeof (struct block_symbol);
433 symbol = (rtx) ggc_internal_alloc (size);
434
435 /* Initialize the normal SYMBOL_REF fields. */
436 memset (symbol, 0, size);
437 PUT_CODE (symbol, SYMBOL_REF);
438 PUT_MODE (symbol, Pmode);
439 XSTR (symbol, 0) = label;
440 SYMBOL_REF_FLAGS (symbol) = SYMBOL_FLAG_HAS_BLOCK_INFO;
441
442 /* Initialize the block_symbol stuff. */
443 SYMBOL_REF_BLOCK (symbol) = block;
444 SYMBOL_REF_BLOCK_OFFSET (symbol) = offset;
445
446 return symbol;
447 }
448
449 /* Return a section with a particular name and with whatever SECTION_*
450 flags section_type_flags deems appropriate. The name of the section
451 is taken from NAME if nonnull, otherwise it is taken from DECL's
452 DECL_SECTION_NAME. DECL is the decl associated with the section
453 (see the section comment for details) and RELOC is as for
454 section_type_flags. */
455
456 section *
get_named_section(tree decl,const char * name,int reloc)457 get_named_section (tree decl, const char *name, int reloc)
458 {
459 unsigned int flags;
460
461 if (name == NULL)
462 {
463 gcc_assert (decl && DECL_P (decl) && DECL_SECTION_NAME (decl));
464 name = DECL_SECTION_NAME (decl);
465 }
466
467 flags = targetm.section_type_flags (decl, name, reloc);
468 return get_section (name, flags, decl);
469 }
470
471 /* Worker for resolve_unique_section. */
472
473 static bool
set_implicit_section(struct symtab_node * n,void * data ATTRIBUTE_UNUSED)474 set_implicit_section (struct symtab_node *n, void *data ATTRIBUTE_UNUSED)
475 {
476 n->implicit_section = true;
477 return false;
478 }
479
480 /* If required, set DECL_SECTION_NAME to a unique name. */
481
482 void
resolve_unique_section(tree decl,int reloc ATTRIBUTE_UNUSED,int flag_function_or_data_sections)483 resolve_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED,
484 int flag_function_or_data_sections)
485 {
486 if (DECL_SECTION_NAME (decl) == NULL
487 && targetm_common.have_named_sections
488 && (flag_function_or_data_sections
489 || lookup_attribute ("retain", DECL_ATTRIBUTES (decl))
490 || DECL_COMDAT_GROUP (decl)))
491 {
492 targetm.asm_out.unique_section (decl, reloc);
493 if (DECL_SECTION_NAME (decl))
494 symtab_node::get (decl)->call_for_symbol_and_aliases
495 (set_implicit_section, NULL, true);
496 }
497 }
498
499 #ifdef BSS_SECTION_ASM_OP
500
501 #ifdef ASM_OUTPUT_ALIGNED_BSS
502
503 /* Utility function for targets to use in implementing
504 ASM_OUTPUT_ALIGNED_BSS.
505 ??? It is believed that this function will work in most cases so such
506 support is localized here. */
507
508 static void
asm_output_aligned_bss(FILE * file,tree decl ATTRIBUTE_UNUSED,const char * name,unsigned HOST_WIDE_INT size,int align)509 asm_output_aligned_bss (FILE *file, tree decl ATTRIBUTE_UNUSED,
510 const char *name, unsigned HOST_WIDE_INT size,
511 int align)
512 {
513 switch_to_section (bss_section);
514 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
515 #ifdef ASM_DECLARE_OBJECT_NAME
516 last_assemble_variable_decl = decl;
517 ASM_DECLARE_OBJECT_NAME (file, name, decl);
518 #else
519 /* Standard thing is just output label for the object. */
520 ASM_OUTPUT_LABEL (file, name);
521 #endif /* ASM_DECLARE_OBJECT_NAME */
522 ASM_OUTPUT_SKIP (file, size ? size : 1);
523 }
524
525 #endif
526
527 #endif /* BSS_SECTION_ASM_OP */
528
529 #ifndef USE_SELECT_SECTION_FOR_FUNCTIONS
530 /* Return the hot section for function DECL. Return text_section for
531 null DECLs. */
532
533 static section *
hot_function_section(tree decl)534 hot_function_section (tree decl)
535 {
536 if (decl != NULL_TREE
537 && DECL_SECTION_NAME (decl) != NULL
538 && targetm_common.have_named_sections)
539 return get_named_section (decl, NULL, 0);
540 else
541 return text_section;
542 }
543 #endif
544
545 /* Return section for TEXT_SECTION_NAME if DECL or DECL_SECTION_NAME (DECL)
546 is NULL.
547
548 When DECL_SECTION_NAME is non-NULL and it is implicit section and
549 NAMED_SECTION_SUFFIX is non-NULL, then produce section called
550 concatenate the name with NAMED_SECTION_SUFFIX.
551 Otherwise produce "TEXT_SECTION_NAME.IMPLICIT_NAME". */
552
553 section *
get_named_text_section(tree decl,const char * text_section_name,const char * named_section_suffix)554 get_named_text_section (tree decl,
555 const char *text_section_name,
556 const char *named_section_suffix)
557 {
558 if (decl && DECL_SECTION_NAME (decl))
559 {
560 if (named_section_suffix)
561 {
562 const char *dsn = DECL_SECTION_NAME (decl);
563 const char *stripped_name;
564 char *name, *buffer;
565
566 name = (char *) alloca (strlen (dsn) + 1);
567 memcpy (name, dsn,
568 strlen (dsn) + 1);
569
570 stripped_name = targetm.strip_name_encoding (name);
571
572 buffer = ACONCAT ((stripped_name, named_section_suffix, NULL));
573 return get_named_section (decl, buffer, 0);
574 }
575 else if (symtab_node::get (decl)->implicit_section)
576 {
577 const char *name;
578
579 /* Do not try to split gnu_linkonce functions. This gets somewhat
580 slipperly. */
581 if (DECL_COMDAT_GROUP (decl) && !HAVE_COMDAT_GROUP)
582 return NULL;
583 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
584 name = targetm.strip_name_encoding (name);
585 return get_named_section (decl, ACONCAT ((text_section_name, ".",
586 name, NULL)), 0);
587 }
588 else
589 return NULL;
590 }
591 return get_named_section (decl, text_section_name, 0);
592 }
593
594 /* Choose named function section based on its frequency. */
595
596 section *
default_function_section(tree decl,enum node_frequency freq,bool startup,bool exit)597 default_function_section (tree decl, enum node_frequency freq,
598 bool startup, bool exit)
599 {
600 #if defined HAVE_LD_EH_GC_SECTIONS && defined HAVE_LD_EH_GC_SECTIONS_BUG
601 /* Old GNU linkers have buggy --gc-section support, which sometimes
602 results in .gcc_except_table* sections being garbage collected. */
603 if (decl
604 && symtab_node::get (decl)->implicit_section)
605 return NULL;
606 #endif
607
608 if (!flag_reorder_functions
609 || !targetm_common.have_named_sections)
610 return NULL;
611 /* Startup code should go to startup subsection unless it is
612 unlikely executed (this happens especially with function splitting
613 where we can split away unnecessary parts of static constructors. */
614 if (startup && freq != NODE_FREQUENCY_UNLIKELY_EXECUTED)
615 {
616 /* During LTO the tp_first_run profiling will naturally place all
617 initialization code first. Using separate section is counter-productive
618 because startup only code may call functions which are no longer
619 startup only. */
620 if (!in_lto_p
621 || !cgraph_node::get (decl)->tp_first_run
622 || !opt_for_fn (decl, flag_profile_reorder_functions))
623 return get_named_text_section (decl, ".text.startup", NULL);
624 else
625 return NULL;
626 }
627
628 /* Similarly for exit. */
629 if (exit && freq != NODE_FREQUENCY_UNLIKELY_EXECUTED)
630 return get_named_text_section (decl, ".text.exit", NULL);
631
632 /* Group cold functions together, similarly for hot code. */
633 switch (freq)
634 {
635 case NODE_FREQUENCY_UNLIKELY_EXECUTED:
636 return get_named_text_section (decl, ".text.unlikely", NULL);
637 case NODE_FREQUENCY_HOT:
638 return get_named_text_section (decl, ".text.hot", NULL);
639 /* FALLTHRU */
640 default:
641 return NULL;
642 }
643 }
644
645 /* Return the section for function DECL.
646
647 If DECL is NULL_TREE, return the text section. We can be passed
648 NULL_TREE under some circumstances by dbxout.cc at least.
649
650 If FORCE_COLD is true, return cold function section ignoring
651 the frequency info of cgraph_node. */
652
653 static section *
function_section_1(tree decl,bool force_cold)654 function_section_1 (tree decl, bool force_cold)
655 {
656 section *section = NULL;
657 enum node_frequency freq = NODE_FREQUENCY_NORMAL;
658 bool startup = false, exit = false;
659
660 if (decl)
661 {
662 struct cgraph_node *node = cgraph_node::get (decl);
663
664 if (node)
665 {
666 freq = node->frequency;
667 startup = node->only_called_at_startup;
668 exit = node->only_called_at_exit;
669 }
670 }
671 if (force_cold)
672 freq = NODE_FREQUENCY_UNLIKELY_EXECUTED;
673
674 #ifdef USE_SELECT_SECTION_FOR_FUNCTIONS
675 if (decl != NULL_TREE
676 && DECL_SECTION_NAME (decl) != NULL)
677 {
678 if (targetm.asm_out.function_section)
679 section = targetm.asm_out.function_section (decl, freq,
680 startup, exit);
681 if (section)
682 return section;
683 return get_named_section (decl, NULL, 0);
684 }
685 else
686 return targetm.asm_out.select_section
687 (decl, freq == NODE_FREQUENCY_UNLIKELY_EXECUTED,
688 symtab_node::get (decl)->definition_alignment ());
689 #else
690 if (targetm.asm_out.function_section)
691 section = targetm.asm_out.function_section (decl, freq, startup, exit);
692 if (section)
693 return section;
694 return hot_function_section (decl);
695 #endif
696 }
697
698 /* Return the section for function DECL.
699
700 If DECL is NULL_TREE, return the text section. We can be passed
701 NULL_TREE under some circumstances by dbxout.cc at least. */
702
703 section *
function_section(tree decl)704 function_section (tree decl)
705 {
706 /* Handle cases where function splitting code decides
707 to put function entry point into unlikely executed section
708 despite the fact that the function itself is not cold
709 (i.e. it is called rarely but contains a hot loop that is
710 better to live in hot subsection for the code locality). */
711 return function_section_1 (decl,
712 first_function_block_is_cold);
713 }
714
715 /* Return the section for the current function, take IN_COLD_SECTION_P
716 into account. */
717
718 section *
current_function_section(void)719 current_function_section (void)
720 {
721 return function_section_1 (current_function_decl, in_cold_section_p);
722 }
723
724 /* Tell assembler to switch to unlikely-to-be-executed text section. */
725
726 section *
unlikely_text_section(void)727 unlikely_text_section (void)
728 {
729 return function_section_1 (current_function_decl, true);
730 }
731
732 /* When called within a function context, return true if the function
733 has been assigned a cold text section and if SECT is that section.
734 When called outside a function context, return true if SECT is the
735 default cold section. */
736
737 bool
unlikely_text_section_p(section * sect)738 unlikely_text_section_p (section *sect)
739 {
740 return sect == function_section_1 (current_function_decl, true);
741 }
742
743 /* Switch to the other function partition (if inside of hot section
744 into cold section, otherwise into the hot section). */
745
746 void
switch_to_other_text_partition(void)747 switch_to_other_text_partition (void)
748 {
749 in_cold_section_p = !in_cold_section_p;
750 switch_to_section (current_function_section ());
751 }
752
753 /* Return the read-only or relocated read-only data section
754 associated with function DECL. */
755
756 section *
default_function_rodata_section(tree decl,bool relocatable)757 default_function_rodata_section (tree decl, bool relocatable)
758 {
759 const char* sname;
760 unsigned int flags;
761
762 flags = 0;
763
764 if (relocatable)
765 {
766 sname = ".data.rel.ro.local";
767 flags = (SECTION_WRITE | SECTION_RELRO);
768 }
769 else
770 sname = ".rodata";
771
772 if (decl && DECL_SECTION_NAME (decl))
773 {
774 const char *name = DECL_SECTION_NAME (decl);
775
776 if (DECL_COMDAT_GROUP (decl) && HAVE_COMDAT_GROUP)
777 {
778 const char *dot;
779 size_t len;
780 char* rname;
781
782 dot = strchr (name + 1, '.');
783 if (!dot)
784 dot = name;
785 len = strlen (dot) + strlen (sname) + 1;
786 rname = (char *) alloca (len);
787
788 strcpy (rname, sname);
789 strcat (rname, dot);
790 return get_section (rname, (SECTION_LINKONCE | flags), decl);
791 }
792 /* For .gnu.linkonce.t.foo we want to use .gnu.linkonce.r.foo or
793 .gnu.linkonce.d.rel.ro.local.foo if the jump table is relocatable. */
794 else if (DECL_COMDAT_GROUP (decl)
795 && startswith (name, ".gnu.linkonce.t."))
796 {
797 size_t len;
798 char *rname;
799
800 if (relocatable)
801 {
802 len = strlen (name) + strlen (".rel.ro.local") + 1;
803 rname = (char *) alloca (len);
804
805 strcpy (rname, ".gnu.linkonce.d.rel.ro.local");
806 strcat (rname, name + 15);
807 }
808 else
809 {
810 len = strlen (name) + 1;
811 rname = (char *) alloca (len);
812
813 memcpy (rname, name, len);
814 rname[14] = 'r';
815 }
816 return get_section (rname, (SECTION_LINKONCE | flags), decl);
817 }
818 /* For .text.foo we want to use .rodata.foo. */
819 else if (flag_function_sections && flag_data_sections
820 && startswith (name, ".text."))
821 {
822 size_t len = strlen (name) + 1;
823 char *rname = (char *) alloca (len + strlen (sname) - 5);
824
825 memcpy (rname, sname, strlen (sname));
826 memcpy (rname + strlen (sname), name + 5, len - 5);
827 return get_section (rname, flags, decl);
828 }
829 }
830
831 if (relocatable)
832 return get_section (sname, flags, decl);
833 else
834 return readonly_data_section;
835 }
836
837 /* Return the read-only data section associated with function DECL
838 for targets where that section should be always the single
839 readonly data section. */
840
841 section *
default_no_function_rodata_section(tree,bool)842 default_no_function_rodata_section (tree, bool)
843 {
844 return readonly_data_section;
845 }
846
847 /* A subroutine of mergeable_string_section and mergeable_constant_section. */
848
849 static const char *
function_mergeable_rodata_prefix(void)850 function_mergeable_rodata_prefix (void)
851 {
852 section *s = targetm.asm_out.function_rodata_section (current_function_decl,
853 false);
854 if (SECTION_STYLE (s) == SECTION_NAMED)
855 return s->named.name;
856 else
857 return targetm.asm_out.mergeable_rodata_prefix;
858 }
859
860 /* Return the section to use for string merging. */
861
862 static section *
mergeable_string_section(tree decl ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED,unsigned int flags ATTRIBUTE_UNUSED)863 mergeable_string_section (tree decl ATTRIBUTE_UNUSED,
864 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED,
865 unsigned int flags ATTRIBUTE_UNUSED)
866 {
867 HOST_WIDE_INT len;
868
869 if (HAVE_GAS_SHF_MERGE && flag_merge_constants
870 && TREE_CODE (decl) == STRING_CST
871 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
872 && align <= 256
873 && (len = int_size_in_bytes (TREE_TYPE (decl))) > 0
874 && TREE_STRING_LENGTH (decl) == len)
875 {
876 scalar_int_mode mode;
877 unsigned int modesize;
878 const char *str;
879 HOST_WIDE_INT i;
880 int j, unit;
881 const char *prefix = function_mergeable_rodata_prefix ();
882 char *name = (char *) alloca (strlen (prefix) + 30);
883
884 mode = SCALAR_INT_TYPE_MODE (TREE_TYPE (TREE_TYPE (decl)));
885 modesize = GET_MODE_BITSIZE (mode);
886 if (modesize >= 8 && modesize <= 256
887 && (modesize & (modesize - 1)) == 0)
888 {
889 if (align < modesize)
890 align = modesize;
891
892 if (!HAVE_LD_ALIGNED_SHF_MERGE && align > 8)
893 return readonly_data_section;
894
895 str = TREE_STRING_POINTER (decl);
896 unit = GET_MODE_SIZE (mode);
897
898 /* Check for embedded NUL characters. */
899 for (i = 0; i < len; i += unit)
900 {
901 for (j = 0; j < unit; j++)
902 if (str[i + j] != '\0')
903 break;
904 if (j == unit)
905 break;
906 }
907 if (i == len - unit || (unit == 1 && i == len))
908 {
909 sprintf (name, "%s.str%d.%d", prefix,
910 modesize / 8, (int) (align / 8));
911 flags |= (modesize / 8) | SECTION_MERGE | SECTION_STRINGS;
912 return get_section (name, flags, NULL);
913 }
914 }
915 }
916
917 return readonly_data_section;
918 }
919
920 /* Return the section to use for constant merging. */
921
922 section *
mergeable_constant_section(machine_mode mode ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED,unsigned int flags ATTRIBUTE_UNUSED)923 mergeable_constant_section (machine_mode mode ATTRIBUTE_UNUSED,
924 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED,
925 unsigned int flags ATTRIBUTE_UNUSED)
926 {
927 if (HAVE_GAS_SHF_MERGE && flag_merge_constants
928 && mode != VOIDmode
929 && mode != BLKmode
930 && known_le (GET_MODE_BITSIZE (mode), align)
931 && align >= 8
932 && align <= 256
933 && (align & (align - 1)) == 0
934 && (HAVE_LD_ALIGNED_SHF_MERGE ? 1 : align == 8))
935 {
936 const char *prefix = function_mergeable_rodata_prefix ();
937 char *name = (char *) alloca (strlen (prefix) + 30);
938
939 sprintf (name, "%s.cst%d", prefix, (int) (align / 8));
940 flags |= (align / 8) | SECTION_MERGE;
941 return get_section (name, flags, NULL);
942 }
943 return readonly_data_section;
944 }
945
946 /* Given NAME, a putative register name, discard any customary prefixes. */
947
948 static const char *
strip_reg_name(const char * name)949 strip_reg_name (const char *name)
950 {
951 #ifdef REGISTER_PREFIX
952 if (!strncmp (name, REGISTER_PREFIX, strlen (REGISTER_PREFIX)))
953 name += strlen (REGISTER_PREFIX);
954 #endif
955 if (name[0] == '%' || name[0] == '#')
956 name++;
957 return name;
958 }
959
960 /* The user has asked for a DECL to have a particular name. Set (or
961 change) it in such a way that we don't prefix an underscore to
962 it. */
963 void
set_user_assembler_name(tree decl,const char * name)964 set_user_assembler_name (tree decl, const char *name)
965 {
966 char *starred = (char *) alloca (strlen (name) + 2);
967 starred[0] = '*';
968 strcpy (starred + 1, name);
969 symtab->change_decl_assembler_name (decl, get_identifier (starred));
970 SET_DECL_RTL (decl, NULL_RTX);
971 }
972
973 /* Decode an `asm' spec for a declaration as a register name.
974 Return the register number, or -1 if nothing specified,
975 or -2 if the ASMSPEC is not `cc' or `memory' and is not recognized,
976 or -3 if ASMSPEC is `cc' and is not recognized,
977 or -4 if ASMSPEC is `memory' and is not recognized.
978 Accept an exact spelling or a decimal number.
979 Prefixes such as % are optional. */
980
981 int
decode_reg_name_and_count(const char * asmspec,int * pnregs)982 decode_reg_name_and_count (const char *asmspec, int *pnregs)
983 {
984 /* Presume just one register is clobbered. */
985 *pnregs = 1;
986
987 if (asmspec != 0)
988 {
989 int i;
990
991 /* Get rid of confusing prefixes. */
992 asmspec = strip_reg_name (asmspec);
993
994 /* Allow a decimal number as a "register name". */
995 for (i = strlen (asmspec) - 1; i >= 0; i--)
996 if (! ISDIGIT (asmspec[i]))
997 break;
998 if (asmspec[0] != 0 && i < 0)
999 {
1000 i = atoi (asmspec);
1001 if (i < FIRST_PSEUDO_REGISTER && i >= 0 && reg_names[i][0])
1002 return i;
1003 else
1004 return -2;
1005 }
1006
1007 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1008 if (reg_names[i][0]
1009 && ! strcmp (asmspec, strip_reg_name (reg_names[i])))
1010 return i;
1011
1012 #ifdef OVERLAPPING_REGISTER_NAMES
1013 {
1014 static const struct
1015 {
1016 const char *const name;
1017 const int number;
1018 const int nregs;
1019 } table[] = OVERLAPPING_REGISTER_NAMES;
1020
1021 for (i = 0; i < (int) ARRAY_SIZE (table); i++)
1022 if (table[i].name[0]
1023 && ! strcmp (asmspec, table[i].name))
1024 {
1025 *pnregs = table[i].nregs;
1026 return table[i].number;
1027 }
1028 }
1029 #endif /* OVERLAPPING_REGISTER_NAMES */
1030
1031 #ifdef ADDITIONAL_REGISTER_NAMES
1032 {
1033 static const struct { const char *const name; const int number; } table[]
1034 = ADDITIONAL_REGISTER_NAMES;
1035
1036 for (i = 0; i < (int) ARRAY_SIZE (table); i++)
1037 if (table[i].name[0]
1038 && ! strcmp (asmspec, table[i].name)
1039 && reg_names[table[i].number][0])
1040 return table[i].number;
1041 }
1042 #endif /* ADDITIONAL_REGISTER_NAMES */
1043
1044 if (!strcmp (asmspec, "memory"))
1045 return -4;
1046
1047 if (!strcmp (asmspec, "cc"))
1048 return -3;
1049
1050 return -2;
1051 }
1052
1053 return -1;
1054 }
1055
1056 int
decode_reg_name(const char * name)1057 decode_reg_name (const char *name)
1058 {
1059 int count;
1060 return decode_reg_name_and_count (name, &count);
1061 }
1062
1063
1064 /* Return true if DECL's initializer is suitable for a BSS section. */
1065
1066 bool
bss_initializer_p(const_tree decl,bool named)1067 bss_initializer_p (const_tree decl, bool named)
1068 {
1069 /* Do not put non-common constants into the .bss section, they belong in
1070 a readonly section, except when NAMED is true. */
1071 return ((!TREE_READONLY (decl) || DECL_COMMON (decl) || named)
1072 && (DECL_INITIAL (decl) == NULL
1073 /* In LTO we have no errors in program; error_mark_node is used
1074 to mark offlined constructors. */
1075 || (DECL_INITIAL (decl) == error_mark_node
1076 && !in_lto_p)
1077 || (flag_zero_initialized_in_bss
1078 && initializer_zerop (DECL_INITIAL (decl))
1079 /* A decl with the "persistent" attribute applied and
1080 explicitly initialized to 0 should not be treated as a BSS
1081 variable. */
1082 && !DECL_PERSISTENT_P (decl))));
1083 }
1084
1085 /* Compute the alignment of variable specified by DECL.
1086 DONT_OUTPUT_DATA is from assemble_variable. */
1087
1088 void
align_variable(tree decl,bool dont_output_data)1089 align_variable (tree decl, bool dont_output_data)
1090 {
1091 unsigned int align = DECL_ALIGN (decl);
1092
1093 /* In the case for initialing an array whose length isn't specified,
1094 where we have not yet been able to do the layout,
1095 figure out the proper alignment now. */
1096 if (dont_output_data && DECL_SIZE (decl) == 0
1097 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1098 align = MAX (align, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
1099
1100 /* Some object file formats have a maximum alignment which they support.
1101 In particular, a.out format supports a maximum alignment of 4. */
1102 if (align > MAX_OFILE_ALIGNMENT)
1103 {
1104 error ("alignment of %q+D is greater than maximum object "
1105 "file alignment %d", decl,
1106 MAX_OFILE_ALIGNMENT/BITS_PER_UNIT);
1107 align = MAX_OFILE_ALIGNMENT;
1108 }
1109
1110 if (! DECL_USER_ALIGN (decl))
1111 {
1112 #ifdef DATA_ABI_ALIGNMENT
1113 unsigned int data_abi_align
1114 = DATA_ABI_ALIGNMENT (TREE_TYPE (decl), align);
1115 /* For backwards compatibility, don't assume the ABI alignment for
1116 TLS variables. */
1117 if (! DECL_THREAD_LOCAL_P (decl) || data_abi_align <= BITS_PER_WORD)
1118 align = data_abi_align;
1119 #endif
1120
1121 /* On some machines, it is good to increase alignment sometimes.
1122 But as DECL_ALIGN is used both for actually emitting the variable
1123 and for code accessing the variable as guaranteed alignment, we
1124 can only increase the alignment if it is a performance optimization
1125 if the references to it must bind to the current definition. */
1126 if (decl_binds_to_current_def_p (decl)
1127 && !DECL_VIRTUAL_P (decl))
1128 {
1129 #ifdef DATA_ALIGNMENT
1130 unsigned int data_align = DATA_ALIGNMENT (TREE_TYPE (decl), align);
1131 /* Don't increase alignment too much for TLS variables - TLS space
1132 is too precious. */
1133 if (! DECL_THREAD_LOCAL_P (decl) || data_align <= BITS_PER_WORD)
1134 align = data_align;
1135 #endif
1136 if (DECL_INITIAL (decl) != 0
1137 /* In LTO we have no errors in program; error_mark_node is used
1138 to mark offlined constructors. */
1139 && (in_lto_p || DECL_INITIAL (decl) != error_mark_node))
1140 {
1141 unsigned int const_align
1142 = targetm.constant_alignment (DECL_INITIAL (decl), align);
1143 /* Don't increase alignment too much for TLS variables - TLS
1144 space is too precious. */
1145 if (! DECL_THREAD_LOCAL_P (decl) || const_align <= BITS_PER_WORD)
1146 align = const_align;
1147 }
1148 }
1149 }
1150
1151 /* Reset the alignment in case we have made it tighter, so we can benefit
1152 from it in get_pointer_alignment. */
1153 SET_DECL_ALIGN (decl, align);
1154 }
1155
1156 /* Return DECL_ALIGN (decl), possibly increased for optimization purposes
1157 beyond what align_variable returned. */
1158
1159 static unsigned int
get_variable_align(tree decl)1160 get_variable_align (tree decl)
1161 {
1162 unsigned int align = DECL_ALIGN (decl);
1163
1164 /* For user aligned vars or static vars align_variable already did
1165 everything. */
1166 if (DECL_USER_ALIGN (decl) || !TREE_PUBLIC (decl))
1167 return align;
1168
1169 #ifdef DATA_ABI_ALIGNMENT
1170 if (DECL_THREAD_LOCAL_P (decl))
1171 align = DATA_ABI_ALIGNMENT (TREE_TYPE (decl), align);
1172 #endif
1173
1174 /* For decls that bind to the current definition, align_variable
1175 did also everything, except for not assuming ABI required alignment
1176 of TLS variables. For other vars, increase the alignment here
1177 as an optimization. */
1178 if (!decl_binds_to_current_def_p (decl))
1179 {
1180 /* On some machines, it is good to increase alignment sometimes. */
1181 #ifdef DATA_ALIGNMENT
1182 unsigned int data_align = DATA_ALIGNMENT (TREE_TYPE (decl), align);
1183 /* Don't increase alignment too much for TLS variables - TLS space
1184 is too precious. */
1185 if (! DECL_THREAD_LOCAL_P (decl) || data_align <= BITS_PER_WORD)
1186 align = data_align;
1187 #endif
1188 if (DECL_INITIAL (decl) != 0
1189 /* In LTO we have no errors in program; error_mark_node is used
1190 to mark offlined constructors. */
1191 && (in_lto_p || DECL_INITIAL (decl) != error_mark_node))
1192 {
1193 unsigned int const_align
1194 = targetm.constant_alignment (DECL_INITIAL (decl), align);
1195 /* Don't increase alignment too much for TLS variables - TLS space
1196 is too precious. */
1197 if (! DECL_THREAD_LOCAL_P (decl) || const_align <= BITS_PER_WORD)
1198 align = const_align;
1199 }
1200 }
1201
1202 return align;
1203 }
1204
1205 /* Compute reloc for get_variable_section. The return value
1206 is a mask for which bit 1 indicates a global relocation, and bit 0
1207 indicates a local relocation. */
1208
1209 int
compute_reloc_for_var(tree decl)1210 compute_reloc_for_var (tree decl)
1211 {
1212 int reloc;
1213
1214 if (DECL_INITIAL (decl) == error_mark_node)
1215 reloc = contains_pointers_p (TREE_TYPE (decl)) ? 3 : 0;
1216 else if (DECL_INITIAL (decl))
1217 reloc = compute_reloc_for_constant (DECL_INITIAL (decl));
1218 else
1219 reloc = 0;
1220
1221 return reloc;
1222 }
1223
1224 /* Return the section into which the given VAR_DECL or CONST_DECL
1225 should be placed. PREFER_NOSWITCH_P is true if a noswitch
1226 section should be used wherever possible. */
1227
1228 section *
get_variable_section(tree decl,bool prefer_noswitch_p)1229 get_variable_section (tree decl, bool prefer_noswitch_p)
1230 {
1231 addr_space_t as = ADDR_SPACE_GENERIC;
1232 int reloc;
1233 varpool_node *vnode = varpool_node::get (decl);
1234 if (vnode)
1235 {
1236 vnode = vnode->ultimate_alias_target ();
1237 decl = vnode->decl;
1238 }
1239
1240 if (TREE_TYPE (decl) != error_mark_node)
1241 as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
1242
1243 /* We need the constructor to figure out reloc flag. */
1244 if (vnode)
1245 vnode->get_constructor ();
1246
1247 if (DECL_COMMON (decl)
1248 && !lookup_attribute ("retain", DECL_ATTRIBUTES (decl)))
1249 {
1250 /* If the decl has been given an explicit section name, or it resides
1251 in a non-generic address space, then it isn't common, and shouldn't
1252 be handled as such. */
1253 gcc_assert (DECL_SECTION_NAME (decl) == NULL
1254 && ADDR_SPACE_GENERIC_P (as));
1255 if (DECL_THREAD_LOCAL_P (decl))
1256 return tls_comm_section;
1257 else if (TREE_PUBLIC (decl) && bss_initializer_p (decl))
1258 return comm_section;
1259 }
1260
1261 reloc = compute_reloc_for_var (decl);
1262
1263 resolve_unique_section (decl, reloc, flag_data_sections);
1264 if (IN_NAMED_SECTION (decl))
1265 {
1266 section *sect = get_named_section (decl, NULL, reloc);
1267
1268 if ((sect->common.flags & SECTION_BSS)
1269 && !bss_initializer_p (decl, true))
1270 {
1271 error_at (DECL_SOURCE_LOCATION (decl),
1272 "only zero initializers are allowed in section %qs",
1273 sect->named.name);
1274 DECL_INITIAL (decl) = error_mark_node;
1275 }
1276 return sect;
1277 }
1278
1279 if (ADDR_SPACE_GENERIC_P (as)
1280 && !DECL_THREAD_LOCAL_P (decl)
1281 && !DECL_NOINIT_P (decl)
1282 && !(prefer_noswitch_p && targetm.have_switchable_bss_sections)
1283 && bss_initializer_p (decl))
1284 {
1285 if (!TREE_PUBLIC (decl)
1286 && !((flag_sanitize & SANITIZE_ADDRESS)
1287 && asan_protect_global (decl)))
1288 return lcomm_section;
1289 if (bss_noswitch_section)
1290 return bss_noswitch_section;
1291 }
1292
1293 return targetm.asm_out.select_section (decl, reloc,
1294 get_variable_align (decl));
1295 }
1296
1297 /* Return the block into which object_block DECL should be placed. */
1298
1299 static struct object_block *
get_block_for_decl(tree decl)1300 get_block_for_decl (tree decl)
1301 {
1302 section *sect;
1303
1304 if (VAR_P (decl))
1305 {
1306 /* The object must be defined in this translation unit. */
1307 if (DECL_EXTERNAL (decl))
1308 return NULL;
1309
1310 /* There's no point using object blocks for something that is
1311 isolated by definition. */
1312 if (DECL_COMDAT_GROUP (decl))
1313 return NULL;
1314 }
1315
1316 /* We can only calculate block offsets if the decl has a known
1317 constant size. */
1318 if (DECL_SIZE_UNIT (decl) == NULL)
1319 return NULL;
1320 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (decl)))
1321 return NULL;
1322
1323 /* Find out which section should contain DECL. We cannot put it into
1324 an object block if it requires a standalone definition. */
1325 if (VAR_P (decl))
1326 align_variable (decl, 0);
1327 sect = get_variable_section (decl, true);
1328 if (SECTION_STYLE (sect) == SECTION_NOSWITCH)
1329 return NULL;
1330
1331 if (bool (lookup_attribute ("retain", DECL_ATTRIBUTES (decl)))
1332 != bool (sect->common.flags & SECTION_RETAIN))
1333 return NULL;
1334
1335 return get_block_for_section (sect);
1336 }
1337
1338 /* Make sure block symbol SYMBOL is in block BLOCK. */
1339
1340 static void
change_symbol_block(rtx symbol,struct object_block * block)1341 change_symbol_block (rtx symbol, struct object_block *block)
1342 {
1343 if (block != SYMBOL_REF_BLOCK (symbol))
1344 {
1345 gcc_assert (SYMBOL_REF_BLOCK_OFFSET (symbol) < 0);
1346 SYMBOL_REF_BLOCK (symbol) = block;
1347 }
1348 }
1349
1350 /* Return true if it is possible to put DECL in an object_block. */
1351
1352 static bool
use_blocks_for_decl_p(tree decl)1353 use_blocks_for_decl_p (tree decl)
1354 {
1355 struct symtab_node *snode;
1356
1357 /* Don't create object blocks if each DECL is placed into a separate
1358 section because that will uselessly create a section anchor for
1359 each DECL. */
1360 if (flag_data_sections)
1361 return false;
1362
1363 /* Only data DECLs can be placed into object blocks. */
1364 if (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
1365 return false;
1366
1367 /* DECL_INITIAL (decl) set to decl is a hack used for some decls that
1368 are never used from code directly and we never want object block handling
1369 for those. */
1370 if (DECL_INITIAL (decl) == decl)
1371 return false;
1372
1373 /* If this decl is an alias, then we don't want to emit a
1374 definition. */
1375 if (VAR_P (decl)
1376 && (snode = symtab_node::get (decl)) != NULL
1377 && snode->alias)
1378 return false;
1379
1380 return targetm.use_blocks_for_decl_p (decl);
1381 }
1382
1383 /* Follow the IDENTIFIER_TRANSPARENT_ALIAS chain starting at *ALIAS
1384 until we find an identifier that is not itself a transparent alias.
1385 Modify the alias passed to it by reference (and all aliases on the
1386 way to the ultimate target), such that they do not have to be
1387 followed again, and return the ultimate target of the alias
1388 chain. */
1389
1390 static inline tree
ultimate_transparent_alias_target(tree * alias)1391 ultimate_transparent_alias_target (tree *alias)
1392 {
1393 tree target = *alias;
1394
1395 if (IDENTIFIER_TRANSPARENT_ALIAS (target))
1396 {
1397 gcc_assert (TREE_CHAIN (target));
1398 target = ultimate_transparent_alias_target (&TREE_CHAIN (target));
1399 gcc_assert (! IDENTIFIER_TRANSPARENT_ALIAS (target)
1400 && ! TREE_CHAIN (target));
1401 *alias = target;
1402 }
1403
1404 return target;
1405 }
1406
1407 /* Return true if REGNUM is mentioned in ELIMINABLE_REGS as a from
1408 register number. */
1409
1410 static bool
eliminable_regno_p(int regnum)1411 eliminable_regno_p (int regnum)
1412 {
1413 static const struct
1414 {
1415 const int from;
1416 const int to;
1417 } eliminables[] = ELIMINABLE_REGS;
1418 for (size_t i = 0; i < ARRAY_SIZE (eliminables); i++)
1419 if (regnum == eliminables[i].from)
1420 return true;
1421 return false;
1422 }
1423
1424 /* Create the DECL_RTL for a VAR_DECL or FUNCTION_DECL. DECL should
1425 have static storage duration. In other words, it should not be an
1426 automatic variable, including PARM_DECLs.
1427
1428 There is, however, one exception: this function handles variables
1429 explicitly placed in a particular register by the user.
1430
1431 This is never called for PARM_DECL nodes. */
1432
1433 void
make_decl_rtl(tree decl)1434 make_decl_rtl (tree decl)
1435 {
1436 const char *name = 0;
1437 int reg_number;
1438 tree id;
1439 rtx x;
1440
1441 /* Check that we are not being given an automatic variable. */
1442 gcc_assert (TREE_CODE (decl) != PARM_DECL
1443 && TREE_CODE (decl) != RESULT_DECL);
1444
1445 /* A weak alias has TREE_PUBLIC set but not the other bits. */
1446 gcc_assert (!VAR_P (decl)
1447 || TREE_STATIC (decl)
1448 || TREE_PUBLIC (decl)
1449 || DECL_EXTERNAL (decl)
1450 || DECL_REGISTER (decl));
1451
1452 /* And that we were not given a type or a label. */
1453 gcc_assert (TREE_CODE (decl) != TYPE_DECL
1454 && TREE_CODE (decl) != LABEL_DECL);
1455
1456 /* For a duplicate declaration, we can be called twice on the
1457 same DECL node. Don't discard the RTL already made. */
1458 if (DECL_RTL_SET_P (decl))
1459 {
1460 /* If the old RTL had the wrong mode, fix the mode. */
1461 x = DECL_RTL (decl);
1462 if (GET_MODE (x) != DECL_MODE (decl))
1463 SET_DECL_RTL (decl, adjust_address_nv (x, DECL_MODE (decl), 0));
1464
1465 if (TREE_CODE (decl) != FUNCTION_DECL && DECL_REGISTER (decl))
1466 return;
1467
1468 /* ??? Another way to do this would be to maintain a hashed
1469 table of such critters. Instead of adding stuff to a DECL
1470 to give certain attributes to it, we could use an external
1471 hash map from DECL to set of attributes. */
1472
1473 /* Let the target reassign the RTL if it wants.
1474 This is necessary, for example, when one machine specific
1475 decl attribute overrides another. */
1476 targetm.encode_section_info (decl, DECL_RTL (decl), false);
1477
1478 /* If the symbol has a SYMBOL_REF_BLOCK field, update it based
1479 on the new decl information. */
1480 if (MEM_P (x)
1481 && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
1482 && SYMBOL_REF_HAS_BLOCK_INFO_P (XEXP (x, 0)))
1483 change_symbol_block (XEXP (x, 0), get_block_for_decl (decl));
1484
1485 return;
1486 }
1487
1488 /* If this variable belongs to the global constant pool, retrieve the
1489 pre-computed RTL or recompute it in LTO mode. */
1490 if (VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl))
1491 {
1492 SET_DECL_RTL (decl, output_constant_def (DECL_INITIAL (decl), 1));
1493 return;
1494 }
1495
1496 id = DECL_ASSEMBLER_NAME (decl);
1497 name = IDENTIFIER_POINTER (id);
1498
1499 if (name[0] != '*' && TREE_CODE (decl) != FUNCTION_DECL
1500 && DECL_REGISTER (decl))
1501 {
1502 error ("register name not specified for %q+D", decl);
1503 }
1504 else if (TREE_CODE (decl) != FUNCTION_DECL && DECL_REGISTER (decl))
1505 {
1506 const char *asmspec = name+1;
1507 machine_mode mode = DECL_MODE (decl);
1508 reg_number = decode_reg_name (asmspec);
1509 /* First detect errors in declaring global registers. */
1510 if (reg_number == -1)
1511 error ("register name not specified for %q+D", decl);
1512 else if (reg_number < 0)
1513 error ("invalid register name for %q+D", decl);
1514 else if (mode == BLKmode)
1515 error ("data type of %q+D isn%'t suitable for a register",
1516 decl);
1517 else if (!in_hard_reg_set_p (accessible_reg_set, mode, reg_number))
1518 error ("the register specified for %q+D cannot be accessed"
1519 " by the current target", decl);
1520 else if (!in_hard_reg_set_p (operand_reg_set, mode, reg_number))
1521 error ("the register specified for %q+D is not general enough"
1522 " to be used as a register variable", decl);
1523 else if (!targetm.hard_regno_mode_ok (reg_number, mode))
1524 error ("register specified for %q+D isn%'t suitable for data type",
1525 decl);
1526 else if (reg_number != HARD_FRAME_POINTER_REGNUM
1527 && (reg_number == FRAME_POINTER_REGNUM
1528 #ifdef RETURN_ADDRESS_POINTER_REGNUM
1529 || reg_number == RETURN_ADDRESS_POINTER_REGNUM
1530 #endif
1531 || reg_number == ARG_POINTER_REGNUM)
1532 && eliminable_regno_p (reg_number))
1533 error ("register specified for %q+D is an internal GCC "
1534 "implementation detail", decl);
1535 /* Now handle properly declared static register variables. */
1536 else
1537 {
1538 int nregs;
1539
1540 if (DECL_INITIAL (decl) != 0 && TREE_STATIC (decl))
1541 {
1542 DECL_INITIAL (decl) = 0;
1543 error ("global register variable has initial value");
1544 }
1545 if (TREE_THIS_VOLATILE (decl))
1546 warning (OPT_Wvolatile_register_var,
1547 "optimization may eliminate reads and/or "
1548 "writes to register variables");
1549
1550 /* If the user specified one of the eliminables registers here,
1551 e.g., FRAME_POINTER_REGNUM, we don't want to get this variable
1552 confused with that register and be eliminated. This usage is
1553 somewhat suspect... */
1554
1555 SET_DECL_RTL (decl, gen_raw_REG (mode, reg_number));
1556 ORIGINAL_REGNO (DECL_RTL (decl)) = reg_number;
1557 REG_USERVAR_P (DECL_RTL (decl)) = 1;
1558
1559 if (TREE_STATIC (decl))
1560 {
1561 /* Make this register global, so not usable for anything
1562 else. */
1563 #ifdef ASM_DECLARE_REGISTER_GLOBAL
1564 name = IDENTIFIER_POINTER (DECL_NAME (decl));
1565 ASM_DECLARE_REGISTER_GLOBAL (asm_out_file, decl, reg_number, name);
1566 #endif
1567 nregs = hard_regno_nregs (reg_number, mode);
1568 while (nregs > 0)
1569 globalize_reg (decl, reg_number + --nregs);
1570 }
1571
1572 /* As a register variable, it has no section. */
1573 return;
1574 }
1575 /* Avoid internal errors from invalid register
1576 specifications. */
1577 SET_DECL_ASSEMBLER_NAME (decl, NULL_TREE);
1578 DECL_HARD_REGISTER (decl) = 0;
1579 /* Also avoid SSA inconsistencies by pretending this is an external
1580 decl now. */
1581 DECL_EXTERNAL (decl) = 1;
1582 return;
1583 }
1584 /* Now handle ordinary static variables and functions (in memory).
1585 Also handle vars declared register invalidly. */
1586 else if (name[0] == '*')
1587 {
1588 #ifdef REGISTER_PREFIX
1589 if (strlen (REGISTER_PREFIX) != 0)
1590 {
1591 reg_number = decode_reg_name (name);
1592 if (reg_number >= 0 || reg_number == -3)
1593 error ("register name given for non-register variable %q+D", decl);
1594 }
1595 #endif
1596 }
1597
1598 /* Specifying a section attribute on a variable forces it into a
1599 non-.bss section, and thus it cannot be common. */
1600 /* FIXME: In general this code should not be necessary because
1601 visibility pass is doing the same work. But notice_global_symbol
1602 is called early and it needs to make DECL_RTL to get the name.
1603 we take care of recomputing the DECL_RTL after visibility is changed. */
1604 if (VAR_P (decl)
1605 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
1606 && DECL_SECTION_NAME (decl) != NULL
1607 && DECL_INITIAL (decl) == NULL_TREE
1608 && DECL_COMMON (decl))
1609 DECL_COMMON (decl) = 0;
1610
1611 /* Variables can't be both common and weak. */
1612 if (VAR_P (decl) && DECL_WEAK (decl))
1613 DECL_COMMON (decl) = 0;
1614
1615 if (use_object_blocks_p () && use_blocks_for_decl_p (decl))
1616 x = create_block_symbol (name, get_block_for_decl (decl), -1);
1617 else
1618 {
1619 machine_mode address_mode = Pmode;
1620 if (TREE_TYPE (decl) != error_mark_node)
1621 {
1622 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
1623 address_mode = targetm.addr_space.address_mode (as);
1624 }
1625 x = gen_rtx_SYMBOL_REF (address_mode, name);
1626 }
1627 SYMBOL_REF_WEAK (x) = DECL_WEAK (decl);
1628 SET_SYMBOL_REF_DECL (x, decl);
1629
1630 x = gen_rtx_MEM (DECL_MODE (decl), x);
1631 if (TREE_CODE (decl) != FUNCTION_DECL)
1632 set_mem_attributes (x, decl, 1);
1633 SET_DECL_RTL (decl, x);
1634
1635 /* Optionally set flags or add text to the name to record information
1636 such as that it is a function name.
1637 If the name is changed, the macro ASM_OUTPUT_LABELREF
1638 will have to know how to strip this information. */
1639 targetm.encode_section_info (decl, DECL_RTL (decl), true);
1640 }
1641
1642 /* Like make_decl_rtl, but inhibit creation of new alias sets when
1643 calling make_decl_rtl. Also, reset DECL_RTL before returning the
1644 rtl. */
1645
1646 rtx
make_decl_rtl_for_debug(tree decl)1647 make_decl_rtl_for_debug (tree decl)
1648 {
1649 unsigned int save_aliasing_flag;
1650 rtx rtl;
1651
1652 if (DECL_RTL_SET_P (decl))
1653 return DECL_RTL (decl);
1654
1655 /* Kludge alert! Somewhere down the call chain, make_decl_rtl will
1656 call new_alias_set. If running with -fcompare-debug, sometimes
1657 we do not want to create alias sets that will throw the alias
1658 numbers off in the comparison dumps. So... clearing
1659 flag_strict_aliasing will keep new_alias_set() from creating a
1660 new set. */
1661 save_aliasing_flag = flag_strict_aliasing;
1662 flag_strict_aliasing = 0;
1663
1664 rtl = DECL_RTL (decl);
1665 /* Reset DECL_RTL back, as various parts of the compiler expects
1666 DECL_RTL set meaning it is actually going to be output. */
1667 SET_DECL_RTL (decl, NULL);
1668
1669 flag_strict_aliasing = save_aliasing_flag;
1670 return rtl;
1671 }
1672
1673 /* Output a string of literal assembler code
1674 for an `asm' keyword used between functions. */
1675
1676 void
assemble_asm(tree string)1677 assemble_asm (tree string)
1678 {
1679 const char *p;
1680 app_enable ();
1681
1682 if (TREE_CODE (string) == ADDR_EXPR)
1683 string = TREE_OPERAND (string, 0);
1684
1685 p = TREE_STRING_POINTER (string);
1686 fprintf (asm_out_file, "%s%s\n", p[0] == '\t' ? "" : "\t", p);
1687 }
1688
1689 /* Write the address of the entity given by SYMBOL to SEC. */
1690 void
assemble_addr_to_section(rtx symbol,section * sec)1691 assemble_addr_to_section (rtx symbol, section *sec)
1692 {
1693 switch_to_section (sec);
1694 assemble_align (POINTER_SIZE);
1695 assemble_integer (symbol, POINTER_SIZE_UNITS, POINTER_SIZE, 1);
1696 }
1697
1698 /* Return the numbered .ctors.N (if CONSTRUCTOR_P) or .dtors.N (if
1699 not) section for PRIORITY. */
1700 section *
get_cdtor_priority_section(int priority,bool constructor_p)1701 get_cdtor_priority_section (int priority, bool constructor_p)
1702 {
1703 /* Buffer conservatively large enough for the full range of a 32-bit
1704 int plus the text below. */
1705 char buf[18];
1706
1707 /* ??? This only works reliably with the GNU linker. */
1708 sprintf (buf, "%s.%.5u",
1709 constructor_p ? ".ctors" : ".dtors",
1710 /* Invert the numbering so the linker puts us in the proper
1711 order; constructors are run from right to left, and the
1712 linker sorts in increasing order. */
1713 MAX_INIT_PRIORITY - priority);
1714 return get_section (buf, SECTION_WRITE, NULL);
1715 }
1716
1717 void
default_named_section_asm_out_destructor(rtx symbol,int priority)1718 default_named_section_asm_out_destructor (rtx symbol, int priority)
1719 {
1720 section *sec;
1721
1722 if (priority != DEFAULT_INIT_PRIORITY)
1723 sec = get_cdtor_priority_section (priority,
1724 /*constructor_p=*/false);
1725 else
1726 sec = get_section (".dtors", SECTION_WRITE, NULL);
1727
1728 assemble_addr_to_section (symbol, sec);
1729 }
1730
1731 #ifdef DTORS_SECTION_ASM_OP
1732 void
default_dtor_section_asm_out_destructor(rtx symbol,int priority ATTRIBUTE_UNUSED)1733 default_dtor_section_asm_out_destructor (rtx symbol,
1734 int priority ATTRIBUTE_UNUSED)
1735 {
1736 assemble_addr_to_section (symbol, dtors_section);
1737 }
1738 #endif
1739
1740 void
default_named_section_asm_out_constructor(rtx symbol,int priority)1741 default_named_section_asm_out_constructor (rtx symbol, int priority)
1742 {
1743 section *sec;
1744
1745 if (priority != DEFAULT_INIT_PRIORITY)
1746 sec = get_cdtor_priority_section (priority,
1747 /*constructor_p=*/true);
1748 else
1749 sec = get_section (".ctors", SECTION_WRITE, NULL);
1750
1751 assemble_addr_to_section (symbol, sec);
1752 }
1753
1754 #ifdef CTORS_SECTION_ASM_OP
1755 void
default_ctor_section_asm_out_constructor(rtx symbol,int priority ATTRIBUTE_UNUSED)1756 default_ctor_section_asm_out_constructor (rtx symbol,
1757 int priority ATTRIBUTE_UNUSED)
1758 {
1759 assemble_addr_to_section (symbol, ctors_section);
1760 }
1761 #endif
1762
1763 /* CONSTANT_POOL_BEFORE_FUNCTION may be defined as an expression with
1764 a nonzero value if the constant pool should be output before the
1765 start of the function, or a zero value if the pool should output
1766 after the end of the function. The default is to put it before the
1767 start. */
1768
1769 #ifndef CONSTANT_POOL_BEFORE_FUNCTION
1770 #define CONSTANT_POOL_BEFORE_FUNCTION 1
1771 #endif
1772
1773 /* DECL is an object (either VAR_DECL or FUNCTION_DECL) which is going
1774 to be output to assembler.
1775 Set first_global_object_name and weak_global_object_name as appropriate. */
1776
1777 void
notice_global_symbol(tree decl)1778 notice_global_symbol (tree decl)
1779 {
1780 const char **t = &first_global_object_name;
1781
1782 if (first_global_object_name
1783 || !TREE_PUBLIC (decl)
1784 || DECL_EXTERNAL (decl)
1785 || !DECL_NAME (decl)
1786 || (VAR_P (decl) && DECL_HARD_REGISTER (decl))
1787 || (TREE_CODE (decl) != FUNCTION_DECL
1788 && (!VAR_P (decl)
1789 || (DECL_COMMON (decl)
1790 && (DECL_INITIAL (decl) == 0
1791 || DECL_INITIAL (decl) == error_mark_node)))))
1792 return;
1793
1794 /* We win when global object is found, but it is useful to know about weak
1795 symbol as well so we can produce nicer unique names. */
1796 if (DECL_WEAK (decl) || DECL_ONE_ONLY (decl) || flag_shlib)
1797 t = &weak_global_object_name;
1798
1799 if (!*t)
1800 {
1801 tree id = DECL_ASSEMBLER_NAME (decl);
1802 ultimate_transparent_alias_target (&id);
1803 *t = ggc_strdup (targetm.strip_name_encoding (IDENTIFIER_POINTER (id)));
1804 }
1805 }
1806
1807 /* If not using flag_reorder_blocks_and_partition, decide early whether the
1808 current function goes into the cold section, so that targets can use
1809 current_function_section during RTL expansion. DECL describes the
1810 function. */
1811
1812 void
decide_function_section(tree decl)1813 decide_function_section (tree decl)
1814 {
1815 first_function_block_is_cold = false;
1816
1817 if (DECL_SECTION_NAME (decl))
1818 {
1819 struct cgraph_node *node = cgraph_node::get (current_function_decl);
1820 /* Calls to function_section rely on first_function_block_is_cold
1821 being accurate. */
1822 first_function_block_is_cold = (node
1823 && node->frequency
1824 == NODE_FREQUENCY_UNLIKELY_EXECUTED);
1825 }
1826
1827 in_cold_section_p = first_function_block_is_cold;
1828 }
1829
1830 /* Get the function's name, as described by its RTL. This may be
1831 different from the DECL_NAME name used in the source file. */
1832 const char *
get_fnname_from_decl(tree decl)1833 get_fnname_from_decl (tree decl)
1834 {
1835 rtx x = DECL_RTL (decl);
1836 gcc_assert (MEM_P (x));
1837 x = XEXP (x, 0);
1838 gcc_assert (GET_CODE (x) == SYMBOL_REF);
1839 return XSTR (x, 0);
1840 }
1841
1842 /* Output assembler code for the constant pool of a function and associated
1843 with defining the name of the function. DECL describes the function.
1844 NAME is the function's name. For the constant pool, we use the current
1845 constant pool data. */
1846
1847 void
assemble_start_function(tree decl,const char * fnname)1848 assemble_start_function (tree decl, const char *fnname)
1849 {
1850 int align;
1851 char tmp_label[100];
1852 bool hot_label_written = false;
1853
1854 if (crtl->has_bb_partition)
1855 {
1856 ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LHOTB", const_labelno);
1857 crtl->subsections.hot_section_label = ggc_strdup (tmp_label);
1858 ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LCOLDB", const_labelno);
1859 crtl->subsections.cold_section_label = ggc_strdup (tmp_label);
1860 ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LHOTE", const_labelno);
1861 crtl->subsections.hot_section_end_label = ggc_strdup (tmp_label);
1862 ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LCOLDE", const_labelno);
1863 crtl->subsections.cold_section_end_label = ggc_strdup (tmp_label);
1864 const_labelno++;
1865 cold_function_name = NULL_TREE;
1866 }
1867 else
1868 {
1869 crtl->subsections.hot_section_label = NULL;
1870 crtl->subsections.cold_section_label = NULL;
1871 crtl->subsections.hot_section_end_label = NULL;
1872 crtl->subsections.cold_section_end_label = NULL;
1873 }
1874
1875 /* The following code does not need preprocessing in the assembler. */
1876
1877 app_disable ();
1878
1879 if (CONSTANT_POOL_BEFORE_FUNCTION)
1880 output_constant_pool (fnname, decl);
1881
1882 align = symtab_node::get (decl)->definition_alignment ();
1883
1884 /* Make sure the not and cold text (code) sections are properly
1885 aligned. This is necessary here in the case where the function
1886 has both hot and cold sections, because we don't want to re-set
1887 the alignment when the section switch happens mid-function. */
1888
1889 if (crtl->has_bb_partition)
1890 {
1891 first_function_block_is_cold = false;
1892
1893 switch_to_section (unlikely_text_section ());
1894 assemble_align (align);
1895 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.cold_section_label);
1896
1897 /* When the function starts with a cold section, we need to explicitly
1898 align the hot section and write out the hot section label.
1899 But if the current function is a thunk, we do not have a CFG. */
1900 if (!cfun->is_thunk
1901 && BB_PARTITION (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) == BB_COLD_PARTITION)
1902 {
1903 switch_to_section (text_section);
1904 assemble_align (align);
1905 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.hot_section_label);
1906 hot_label_written = true;
1907 first_function_block_is_cold = true;
1908 }
1909 in_cold_section_p = first_function_block_is_cold;
1910 }
1911
1912
1913 /* Switch to the correct text section for the start of the function. */
1914
1915 switch_to_section (function_section (decl), decl);
1916 if (crtl->has_bb_partition && !hot_label_written)
1917 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.hot_section_label);
1918
1919 /* Tell assembler to move to target machine's alignment for functions. */
1920 align = floor_log2 (align / BITS_PER_UNIT);
1921 if (align > 0)
1922 {
1923 ASM_OUTPUT_ALIGN (asm_out_file, align);
1924 }
1925
1926 /* Handle a user-specified function alignment.
1927 Note that we still need to align to DECL_ALIGN, as above,
1928 because ASM_OUTPUT_MAX_SKIP_ALIGN might not do any alignment at all. */
1929 if (! DECL_USER_ALIGN (decl)
1930 && align_functions.levels[0].log > align
1931 && optimize_function_for_speed_p (cfun))
1932 {
1933 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1934 int align_log = align_functions.levels[0].log;
1935 #endif
1936 int max_skip = align_functions.levels[0].maxskip;
1937 if (flag_limit_function_alignment && crtl->max_insn_address > 0
1938 && max_skip >= crtl->max_insn_address)
1939 max_skip = crtl->max_insn_address - 1;
1940
1941 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1942 ASM_OUTPUT_MAX_SKIP_ALIGN (asm_out_file, align_log, max_skip);
1943 if (max_skip == align_functions.levels[0].maxskip)
1944 ASM_OUTPUT_MAX_SKIP_ALIGN (asm_out_file,
1945 align_functions.levels[1].log,
1946 align_functions.levels[1].maxskip);
1947 #else
1948 ASM_OUTPUT_ALIGN (asm_out_file, align_functions.levels[0].log);
1949 #endif
1950 }
1951
1952 #ifdef ASM_OUTPUT_FUNCTION_PREFIX
1953 ASM_OUTPUT_FUNCTION_PREFIX (asm_out_file, fnname);
1954 #endif
1955
1956 if (!DECL_IGNORED_P (decl))
1957 (*debug_hooks->begin_function) (decl);
1958
1959 /* Make function name accessible from other files, if appropriate. */
1960
1961 if (TREE_PUBLIC (decl))
1962 {
1963 notice_global_symbol (decl);
1964
1965 globalize_decl (decl);
1966
1967 maybe_assemble_visibility (decl);
1968 }
1969
1970 if (DECL_PRESERVE_P (decl))
1971 targetm.asm_out.mark_decl_preserved (fnname);
1972
1973 unsigned short patch_area_size = crtl->patch_area_size;
1974 unsigned short patch_area_entry = crtl->patch_area_entry;
1975
1976 /* Emit the patching area before the entry label, if any. */
1977 if (patch_area_entry > 0)
1978 targetm.asm_out.print_patchable_function_entry (asm_out_file,
1979 patch_area_entry, true);
1980
1981 /* Do any machine/system dependent processing of the function name. */
1982 #ifdef ASM_DECLARE_FUNCTION_NAME
1983 ASM_DECLARE_FUNCTION_NAME (asm_out_file, fnname, current_function_decl);
1984 #else
1985 /* Standard thing is just output label for the function. */
1986 ASM_OUTPUT_FUNCTION_LABEL (asm_out_file, fnname, current_function_decl);
1987 #endif /* ASM_DECLARE_FUNCTION_NAME */
1988
1989 /* And the area after the label. Record it if we haven't done so yet. */
1990 if (patch_area_size > patch_area_entry)
1991 targetm.asm_out.print_patchable_function_entry (asm_out_file,
1992 patch_area_size
1993 - patch_area_entry,
1994 patch_area_entry == 0);
1995
1996 if (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (decl)))
1997 saw_no_split_stack = true;
1998 }
1999
2000 /* Output assembler code associated with defining the size of the
2001 function. DECL describes the function. NAME is the function's name. */
2002
2003 void
assemble_end_function(tree decl,const char * fnname ATTRIBUTE_UNUSED)2004 assemble_end_function (tree decl, const char *fnname ATTRIBUTE_UNUSED)
2005 {
2006 #ifdef ASM_DECLARE_FUNCTION_SIZE
2007 /* We could have switched section in the middle of the function. */
2008 if (crtl->has_bb_partition)
2009 switch_to_section (function_section (decl));
2010 ASM_DECLARE_FUNCTION_SIZE (asm_out_file, fnname, decl);
2011 #endif
2012 if (! CONSTANT_POOL_BEFORE_FUNCTION)
2013 {
2014 output_constant_pool (fnname, decl);
2015 switch_to_section (function_section (decl)); /* need to switch back */
2016 }
2017 /* Output labels for end of hot/cold text sections (to be used by
2018 debug info.) */
2019 if (crtl->has_bb_partition)
2020 {
2021 section *save_text_section;
2022
2023 save_text_section = in_section;
2024 switch_to_section (unlikely_text_section ());
2025 #ifdef ASM_DECLARE_COLD_FUNCTION_SIZE
2026 if (cold_function_name != NULL_TREE)
2027 ASM_DECLARE_COLD_FUNCTION_SIZE (asm_out_file,
2028 IDENTIFIER_POINTER (cold_function_name),
2029 decl);
2030 #endif
2031 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.cold_section_end_label);
2032 if (first_function_block_is_cold)
2033 switch_to_section (text_section);
2034 else
2035 switch_to_section (function_section (decl));
2036 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.hot_section_end_label);
2037 switch_to_section (save_text_section);
2038 }
2039 }
2040
2041 /* Assemble code to leave SIZE bytes of zeros. */
2042
2043 void
assemble_zeros(unsigned HOST_WIDE_INT size)2044 assemble_zeros (unsigned HOST_WIDE_INT size)
2045 {
2046 /* Do no output if -fsyntax-only. */
2047 if (flag_syntax_only)
2048 return;
2049
2050 #ifdef ASM_NO_SKIP_IN_TEXT
2051 /* The `space' pseudo in the text section outputs nop insns rather than 0s,
2052 so we must output 0s explicitly in the text section. */
2053 if (ASM_NO_SKIP_IN_TEXT && (in_section->common.flags & SECTION_CODE) != 0)
2054 {
2055 unsigned HOST_WIDE_INT i;
2056 for (i = 0; i < size; i++)
2057 assemble_integer (const0_rtx, 1, BITS_PER_UNIT, 1);
2058 }
2059 else
2060 #endif
2061 if (size > 0)
2062 ASM_OUTPUT_SKIP (asm_out_file, size);
2063 }
2064
2065 /* Assemble an alignment pseudo op for an ALIGN-bit boundary. */
2066
2067 void
assemble_align(unsigned int align)2068 assemble_align (unsigned int align)
2069 {
2070 if (align > BITS_PER_UNIT)
2071 {
2072 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT));
2073 }
2074 }
2075
2076 /* Assemble a string constant with the specified C string as contents. */
2077
2078 void
assemble_string(const char * p,int size)2079 assemble_string (const char *p, int size)
2080 {
2081 int pos = 0;
2082 int maximum = 2000;
2083
2084 /* If the string is very long, split it up. */
2085
2086 while (pos < size)
2087 {
2088 int thissize = size - pos;
2089 if (thissize > maximum)
2090 thissize = maximum;
2091
2092 ASM_OUTPUT_ASCII (asm_out_file, p, thissize);
2093
2094 pos += thissize;
2095 p += thissize;
2096 }
2097 }
2098
2099
2100 /* A noswitch_section_callback for lcomm_section. */
2101
2102 static bool
emit_local(tree decl ATTRIBUTE_UNUSED,const char * name ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)2103 emit_local (tree decl ATTRIBUTE_UNUSED,
2104 const char *name ATTRIBUTE_UNUSED,
2105 unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,
2106 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)
2107 {
2108 #if defined ASM_OUTPUT_ALIGNED_DECL_LOCAL
2109 unsigned int align = symtab_node::get (decl)->definition_alignment ();
2110 ASM_OUTPUT_ALIGNED_DECL_LOCAL (asm_out_file, decl, name,
2111 size, align);
2112 return true;
2113 #elif defined ASM_OUTPUT_ALIGNED_LOCAL
2114 unsigned int align = symtab_node::get (decl)->definition_alignment ();
2115 ASM_OUTPUT_ALIGNED_LOCAL (asm_out_file, name, size, align);
2116 return true;
2117 #else
2118 ASM_OUTPUT_LOCAL (asm_out_file, name, size, rounded);
2119 return false;
2120 #endif
2121 }
2122
2123 /* A noswitch_section_callback for bss_noswitch_section. */
2124
2125 #if defined ASM_OUTPUT_ALIGNED_BSS
2126 static bool
emit_bss(tree decl ATTRIBUTE_UNUSED,const char * name ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)2127 emit_bss (tree decl ATTRIBUTE_UNUSED,
2128 const char *name ATTRIBUTE_UNUSED,
2129 unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,
2130 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)
2131 {
2132 ASM_OUTPUT_ALIGNED_BSS (asm_out_file, decl, name, size,
2133 get_variable_align (decl));
2134 return true;
2135 }
2136 #endif
2137
2138 /* A noswitch_section_callback for comm_section. */
2139
2140 static bool
emit_common(tree decl ATTRIBUTE_UNUSED,const char * name ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)2141 emit_common (tree decl ATTRIBUTE_UNUSED,
2142 const char *name ATTRIBUTE_UNUSED,
2143 unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,
2144 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)
2145 {
2146 #if defined ASM_OUTPUT_ALIGNED_DECL_COMMON
2147 ASM_OUTPUT_ALIGNED_DECL_COMMON (asm_out_file, decl, name,
2148 size, get_variable_align (decl));
2149 return true;
2150 #elif defined ASM_OUTPUT_ALIGNED_COMMON
2151 ASM_OUTPUT_ALIGNED_COMMON (asm_out_file, name, size,
2152 get_variable_align (decl));
2153 return true;
2154 #else
2155 ASM_OUTPUT_COMMON (asm_out_file, name, size, rounded);
2156 return false;
2157 #endif
2158 }
2159
2160 /* A noswitch_section_callback for tls_comm_section. */
2161
2162 static bool
emit_tls_common(tree decl ATTRIBUTE_UNUSED,const char * name ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)2163 emit_tls_common (tree decl ATTRIBUTE_UNUSED,
2164 const char *name ATTRIBUTE_UNUSED,
2165 unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,
2166 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)
2167 {
2168 #ifdef ASM_OUTPUT_TLS_COMMON
2169 ASM_OUTPUT_TLS_COMMON (asm_out_file, decl, name, size);
2170 return true;
2171 #else
2172 sorry ("thread-local COMMON data not implemented");
2173 return true;
2174 #endif
2175 }
2176
2177 /* Assemble DECL given that it belongs in SECTION_NOSWITCH section SECT.
2178 NAME is the name of DECL's SYMBOL_REF. */
2179
2180 static void
assemble_noswitch_variable(tree decl,const char * name,section * sect,unsigned int align)2181 assemble_noswitch_variable (tree decl, const char *name, section *sect,
2182 unsigned int align)
2183 {
2184 unsigned HOST_WIDE_INT size, rounded;
2185
2186 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2187 rounded = size;
2188
2189 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_protect_global (decl))
2190 size += asan_red_zone_size (size);
2191
2192 /* Don't allocate zero bytes of common,
2193 since that means "undefined external" in the linker. */
2194 if (size == 0)
2195 rounded = 1;
2196
2197 /* Round size up to multiple of BIGGEST_ALIGNMENT bits
2198 so that each uninitialized object starts on such a boundary. */
2199 rounded += (BIGGEST_ALIGNMENT / BITS_PER_UNIT) - 1;
2200 rounded = (rounded / (BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2201 * (BIGGEST_ALIGNMENT / BITS_PER_UNIT));
2202
2203 if (!sect->noswitch.callback (decl, name, size, rounded)
2204 && (unsigned HOST_WIDE_INT) (align / BITS_PER_UNIT) > rounded)
2205 error ("requested alignment for %q+D is greater than "
2206 "implemented alignment of %wu", decl, rounded);
2207 }
2208
2209 /* A subroutine of assemble_variable. Output the label and contents of
2210 DECL, whose address is a SYMBOL_REF with name NAME. DONT_OUTPUT_DATA
2211 is as for assemble_variable. */
2212
2213 static void
assemble_variable_contents(tree decl,const char * name,bool dont_output_data,bool merge_strings)2214 assemble_variable_contents (tree decl, const char *name,
2215 bool dont_output_data, bool merge_strings)
2216 {
2217 /* Do any machine/system dependent processing of the object. */
2218 #ifdef ASM_DECLARE_OBJECT_NAME
2219 last_assemble_variable_decl = decl;
2220 ASM_DECLARE_OBJECT_NAME (asm_out_file, name, decl);
2221 #else
2222 /* Standard thing is just output label for the object. */
2223 ASM_OUTPUT_LABEL (asm_out_file, name);
2224 #endif /* ASM_DECLARE_OBJECT_NAME */
2225
2226 if (!dont_output_data)
2227 {
2228 /* Caller is supposed to use varpool_get_constructor when it wants
2229 to output the body. */
2230 gcc_assert (!in_lto_p || DECL_INITIAL (decl) != error_mark_node);
2231 if (DECL_INITIAL (decl)
2232 && DECL_INITIAL (decl) != error_mark_node
2233 && !initializer_zerop (DECL_INITIAL (decl)))
2234 /* Output the actual data. */
2235 output_constant (DECL_INITIAL (decl),
2236 tree_to_uhwi (DECL_SIZE_UNIT (decl)),
2237 get_variable_align (decl),
2238 false, merge_strings);
2239 else
2240 /* Leave space for it. */
2241 assemble_zeros (tree_to_uhwi (DECL_SIZE_UNIT (decl)));
2242 targetm.asm_out.decl_end ();
2243 }
2244 }
2245
2246 /* Write out assembly for the variable DECL, which is not defined in
2247 the current translation unit. */
2248 void
assemble_undefined_decl(tree decl)2249 assemble_undefined_decl (tree decl)
2250 {
2251 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
2252 targetm.asm_out.assemble_undefined_decl (asm_out_file, name, decl);
2253 }
2254
2255 /* Assemble everything that is needed for a variable or function declaration.
2256 Not used for automatic variables, and not used for function definitions.
2257 Should not be called for variables of incomplete structure type.
2258
2259 TOP_LEVEL is nonzero if this variable has file scope.
2260 AT_END is nonzero if this is the special handling, at end of compilation,
2261 to define things that have had only tentative definitions.
2262 DONT_OUTPUT_DATA if nonzero means don't actually output the
2263 initial value (that will be done by the caller). */
2264
2265 void
assemble_variable(tree decl,int top_level ATTRIBUTE_UNUSED,int at_end ATTRIBUTE_UNUSED,int dont_output_data)2266 assemble_variable (tree decl, int top_level ATTRIBUTE_UNUSED,
2267 int at_end ATTRIBUTE_UNUSED, int dont_output_data)
2268 {
2269 const char *name;
2270 rtx decl_rtl, symbol;
2271 section *sect;
2272 unsigned int align;
2273 bool asan_protected = false;
2274
2275 /* This function is supposed to handle VARIABLES. Ensure we have one. */
2276 gcc_assert (VAR_P (decl));
2277
2278 /* Emulated TLS had better not get this far. */
2279 gcc_checking_assert (targetm.have_tls || !DECL_THREAD_LOCAL_P (decl));
2280
2281 last_assemble_variable_decl = 0;
2282
2283 /* Normally no need to say anything here for external references,
2284 since assemble_external is called by the language-specific code
2285 when a declaration is first seen. */
2286
2287 if (DECL_EXTERNAL (decl))
2288 return;
2289
2290 /* Do nothing for global register variables. */
2291 if (DECL_RTL_SET_P (decl) && REG_P (DECL_RTL (decl)))
2292 {
2293 TREE_ASM_WRITTEN (decl) = 1;
2294 return;
2295 }
2296
2297 /* If type was incomplete when the variable was declared,
2298 see if it is complete now. */
2299
2300 if (DECL_SIZE (decl) == 0)
2301 layout_decl (decl, 0);
2302
2303 /* Still incomplete => don't allocate it; treat the tentative defn
2304 (which is what it must have been) as an `extern' reference. */
2305
2306 if (!dont_output_data && DECL_SIZE (decl) == 0)
2307 {
2308 error ("storage size of %q+D isn%'t known", decl);
2309 TREE_ASM_WRITTEN (decl) = 1;
2310 return;
2311 }
2312
2313 /* The first declaration of a variable that comes through this function
2314 decides whether it is global (in C, has external linkage)
2315 or local (in C, has internal linkage). So do nothing more
2316 if this function has already run. */
2317
2318 if (TREE_ASM_WRITTEN (decl))
2319 return;
2320
2321 /* Make sure targetm.encode_section_info is invoked before we set
2322 ASM_WRITTEN. */
2323 decl_rtl = DECL_RTL (decl);
2324
2325 TREE_ASM_WRITTEN (decl) = 1;
2326
2327 /* Do no output if -fsyntax-only. */
2328 if (flag_syntax_only)
2329 return;
2330
2331 if (! dont_output_data
2332 && ! valid_constant_size_p (DECL_SIZE_UNIT (decl)))
2333 {
2334 error ("size of variable %q+D is too large", decl);
2335 return;
2336 }
2337
2338 gcc_assert (MEM_P (decl_rtl));
2339 gcc_assert (GET_CODE (XEXP (decl_rtl, 0)) == SYMBOL_REF);
2340 symbol = XEXP (decl_rtl, 0);
2341
2342 /* If this symbol belongs to the tree constant pool, output the constant
2343 if it hasn't already been written. */
2344 if (TREE_CONSTANT_POOL_ADDRESS_P (symbol))
2345 {
2346 tree decl = SYMBOL_REF_DECL (symbol);
2347 if (!TREE_ASM_WRITTEN (DECL_INITIAL (decl)))
2348 output_constant_def_contents (symbol);
2349 return;
2350 }
2351
2352 app_disable ();
2353
2354 name = XSTR (symbol, 0);
2355 if (TREE_PUBLIC (decl) && DECL_NAME (decl))
2356 notice_global_symbol (decl);
2357
2358 /* Compute the alignment of this data. */
2359
2360 align_variable (decl, dont_output_data);
2361
2362 if ((flag_sanitize & SANITIZE_ADDRESS)
2363 && asan_protect_global (decl))
2364 {
2365 asan_protected = true;
2366 SET_DECL_ALIGN (decl, MAX (DECL_ALIGN (decl),
2367 ASAN_RED_ZONE_SIZE * BITS_PER_UNIT));
2368 }
2369
2370 set_mem_align (decl_rtl, DECL_ALIGN (decl));
2371
2372 align = get_variable_align (decl);
2373
2374 if (TREE_PUBLIC (decl))
2375 maybe_assemble_visibility (decl);
2376
2377 if (DECL_PRESERVE_P (decl))
2378 targetm.asm_out.mark_decl_preserved (name);
2379
2380 /* First make the assembler name(s) global if appropriate. */
2381 sect = get_variable_section (decl, false);
2382 if (TREE_PUBLIC (decl)
2383 && (sect->common.flags & SECTION_COMMON) == 0)
2384 globalize_decl (decl);
2385
2386 /* Output any data that we will need to use the address of. */
2387 if (DECL_INITIAL (decl) && DECL_INITIAL (decl) != error_mark_node)
2388 output_addressed_constants (DECL_INITIAL (decl), 0);
2389
2390 /* dbxout.cc needs to know this. */
2391 if (sect && (sect->common.flags & SECTION_CODE) != 0)
2392 DECL_IN_TEXT_SECTION (decl) = 1;
2393
2394 /* If the decl is part of an object_block, make sure that the decl
2395 has been positioned within its block, but do not write out its
2396 definition yet. output_object_blocks will do that later. */
2397 if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol) && SYMBOL_REF_BLOCK (symbol))
2398 {
2399 gcc_assert (!dont_output_data);
2400 place_block_symbol (symbol);
2401 }
2402 else if (SECTION_STYLE (sect) == SECTION_NOSWITCH)
2403 assemble_noswitch_variable (decl, name, sect, align);
2404 else
2405 {
2406 /* Special-case handling of vtv comdat sections. */
2407 if (SECTION_STYLE (sect) == SECTION_NAMED
2408 && (strcmp (sect->named.name, ".vtable_map_vars") == 0))
2409 handle_vtv_comdat_section (sect, decl);
2410 else
2411 switch_to_section (sect, decl);
2412 if (align > BITS_PER_UNIT)
2413 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT));
2414 assemble_variable_contents (decl, name, dont_output_data,
2415 (sect->common.flags & SECTION_MERGE)
2416 && (sect->common.flags & SECTION_STRINGS));
2417 if (asan_protected)
2418 {
2419 unsigned HOST_WIDE_INT int size
2420 = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2421 assemble_zeros (asan_red_zone_size (size));
2422 }
2423 }
2424 }
2425
2426
2427 /* Given a function declaration (FN_DECL), this function assembles the
2428 function into the .preinit_array section. */
2429
2430 void
assemble_vtv_preinit_initializer(tree fn_decl)2431 assemble_vtv_preinit_initializer (tree fn_decl)
2432 {
2433 section *sect;
2434 unsigned flags = SECTION_WRITE;
2435 rtx symbol = XEXP (DECL_RTL (fn_decl), 0);
2436
2437 flags |= SECTION_NOTYPE;
2438 sect = get_section (".preinit_array", flags, fn_decl);
2439 switch_to_section (sect);
2440 assemble_addr_to_section (symbol, sect);
2441 }
2442
2443 /* Return 1 if type TYPE contains any pointers. */
2444
2445 static int
contains_pointers_p(tree type)2446 contains_pointers_p (tree type)
2447 {
2448 switch (TREE_CODE (type))
2449 {
2450 case POINTER_TYPE:
2451 case REFERENCE_TYPE:
2452 /* I'm not sure whether OFFSET_TYPE needs this treatment,
2453 so I'll play safe and return 1. */
2454 case OFFSET_TYPE:
2455 return 1;
2456
2457 case RECORD_TYPE:
2458 case UNION_TYPE:
2459 case QUAL_UNION_TYPE:
2460 {
2461 tree fields;
2462 /* For a type that has fields, see if the fields have pointers. */
2463 for (fields = TYPE_FIELDS (type); fields; fields = DECL_CHAIN (fields))
2464 if (TREE_CODE (fields) == FIELD_DECL
2465 && contains_pointers_p (TREE_TYPE (fields)))
2466 return 1;
2467 return 0;
2468 }
2469
2470 case ARRAY_TYPE:
2471 /* An array type contains pointers if its element type does. */
2472 return contains_pointers_p (TREE_TYPE (type));
2473
2474 default:
2475 return 0;
2476 }
2477 }
2478
2479 /* We delay assemble_external processing until
2480 the compilation unit is finalized. This is the best we can do for
2481 right now (i.e. stage 3 of GCC 4.0) - the right thing is to delay
2482 it all the way to final. See PR 17982 for further discussion. */
2483 static GTY(()) tree pending_assemble_externals;
2484
2485 #ifdef ASM_OUTPUT_EXTERNAL
2486 /* Some targets delay some output to final using TARGET_ASM_FILE_END.
2487 As a result, assemble_external can be called after the list of externals
2488 is processed and the pointer set destroyed. */
2489 static bool pending_assemble_externals_processed;
2490
2491 /* Avoid O(external_decls**2) lookups in the pending_assemble_externals
2492 TREE_LIST in assemble_external. */
2493 static hash_set<tree> *pending_assemble_externals_set;
2494
2495 /* True if DECL is a function decl for which no out-of-line copy exists.
2496 It is assumed that DECL's assembler name has been set. */
2497
2498 static bool
incorporeal_function_p(tree decl)2499 incorporeal_function_p (tree decl)
2500 {
2501 if (TREE_CODE (decl) == FUNCTION_DECL && fndecl_built_in_p (decl))
2502 {
2503 const char *name;
2504
2505 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2506 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl)))
2507 return true;
2508
2509 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
2510 /* Atomic or sync builtins which have survived this far will be
2511 resolved externally and therefore are not incorporeal. */
2512 if (startswith (name, "__builtin_"))
2513 return true;
2514 }
2515 return false;
2516 }
2517
2518 /* Actually do the tests to determine if this is necessary, and invoke
2519 ASM_OUTPUT_EXTERNAL. */
2520 static void
assemble_external_real(tree decl)2521 assemble_external_real (tree decl)
2522 {
2523 rtx rtl = DECL_RTL (decl);
2524
2525 if (MEM_P (rtl) && GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF
2526 && !SYMBOL_REF_USED (XEXP (rtl, 0))
2527 && !incorporeal_function_p (decl))
2528 {
2529 /* Some systems do require some output. */
2530 SYMBOL_REF_USED (XEXP (rtl, 0)) = 1;
2531 ASM_OUTPUT_EXTERNAL (asm_out_file, decl, XSTR (XEXP (rtl, 0), 0));
2532 }
2533 }
2534 #endif
2535
2536 void
process_pending_assemble_externals(void)2537 process_pending_assemble_externals (void)
2538 {
2539 #ifdef ASM_OUTPUT_EXTERNAL
2540 tree list;
2541 for (list = pending_assemble_externals; list; list = TREE_CHAIN (list))
2542 assemble_external_real (TREE_VALUE (list));
2543
2544 pending_assemble_externals = 0;
2545 pending_assemble_externals_processed = true;
2546 delete pending_assemble_externals_set;
2547 #endif
2548 }
2549
2550 /* This TREE_LIST contains any weak symbol declarations waiting
2551 to be emitted. */
2552 static GTY(()) tree weak_decls;
2553
2554 /* Output something to declare an external symbol to the assembler,
2555 and qualifiers such as weakness. (Most assemblers don't need
2556 extern declaration, so we normally output nothing.) Do nothing if
2557 DECL is not external. */
2558
2559 void
assemble_external(tree decl ATTRIBUTE_UNUSED)2560 assemble_external (tree decl ATTRIBUTE_UNUSED)
2561 {
2562 /* Make sure that the ASM_OUT_FILE is open.
2563 If it's not, we should not be calling this function. */
2564 gcc_assert (asm_out_file);
2565
2566 /* In a perfect world, the following condition would be true.
2567 Sadly, the Go front end emit assembly *from the front end*,
2568 bypassing the call graph. See PR52739. Fix before GCC 4.8. */
2569 #if 0
2570 /* This function should only be called if we are expanding, or have
2571 expanded, to RTL.
2572 Ideally, only final.cc would be calling this function, but it is
2573 not clear whether that would break things somehow. See PR 17982
2574 for further discussion. */
2575 gcc_assert (state == EXPANSION
2576 || state == FINISHED);
2577 #endif
2578
2579 if (!DECL_P (decl) || !DECL_EXTERNAL (decl) || !TREE_PUBLIC (decl))
2580 return;
2581
2582 /* We want to output annotation for weak and external symbols at
2583 very last to check if they are references or not. */
2584
2585 if (TARGET_SUPPORTS_WEAK
2586 && DECL_WEAK (decl)
2587 /* TREE_STATIC is a weird and abused creature which is not
2588 generally the right test for whether an entity has been
2589 locally emitted, inlined or otherwise not-really-extern, but
2590 for declarations that can be weak, it happens to be
2591 match. */
2592 && !TREE_STATIC (decl)
2593 && lookup_attribute ("weak", DECL_ATTRIBUTES (decl))
2594 && value_member (decl, weak_decls) == NULL_TREE)
2595 weak_decls = tree_cons (NULL, decl, weak_decls);
2596
2597 #ifdef ASM_OUTPUT_EXTERNAL
2598 if (pending_assemble_externals_processed)
2599 {
2600 assemble_external_real (decl);
2601 return;
2602 }
2603
2604 if (! pending_assemble_externals_set->add (decl))
2605 pending_assemble_externals = tree_cons (NULL, decl,
2606 pending_assemble_externals);
2607 #endif
2608 }
2609
2610 /* Similar, for calling a library function FUN. */
2611
2612 void
assemble_external_libcall(rtx fun)2613 assemble_external_libcall (rtx fun)
2614 {
2615 /* Declare library function name external when first used, if nec. */
2616 if (! SYMBOL_REF_USED (fun))
2617 {
2618 SYMBOL_REF_USED (fun) = 1;
2619 targetm.asm_out.external_libcall (fun);
2620 }
2621 }
2622
2623 /* Assemble a label named NAME. */
2624
2625 void
assemble_label(FILE * file,const char * name)2626 assemble_label (FILE *file, const char *name)
2627 {
2628 ASM_OUTPUT_LABEL (file, name);
2629 }
2630
2631 /* Set the symbol_referenced flag for ID. */
2632 void
mark_referenced(tree id)2633 mark_referenced (tree id)
2634 {
2635 TREE_SYMBOL_REFERENCED (id) = 1;
2636 }
2637
2638 /* Set the symbol_referenced flag for DECL and notify callgraph. */
2639 void
mark_decl_referenced(tree decl)2640 mark_decl_referenced (tree decl)
2641 {
2642 if (TREE_CODE (decl) == FUNCTION_DECL)
2643 {
2644 /* Extern inline functions don't become needed when referenced.
2645 If we know a method will be emitted in other TU and no new
2646 functions can be marked reachable, just use the external
2647 definition. */
2648 struct cgraph_node *node = cgraph_node::get_create (decl);
2649 if (!DECL_EXTERNAL (decl)
2650 && !node->definition)
2651 node->mark_force_output ();
2652 }
2653 else if (VAR_P (decl))
2654 {
2655 varpool_node *node = varpool_node::get_create (decl);
2656 /* C++ frontend use mark_decl_references to force COMDAT variables
2657 to be output that might appear dead otherwise. */
2658 node->force_output = true;
2659 }
2660 /* else do nothing - we can get various sorts of CST nodes here,
2661 which do not need to be marked. */
2662 }
2663
2664
2665 /* Output to FILE (an assembly file) a reference to NAME. If NAME
2666 starts with a *, the rest of NAME is output verbatim. Otherwise
2667 NAME is transformed in a target-specific way (usually by the
2668 addition of an underscore). */
2669
2670 void
assemble_name_raw(FILE * file,const char * name)2671 assemble_name_raw (FILE *file, const char *name)
2672 {
2673 if (name[0] == '*')
2674 fputs (&name[1], file);
2675 else
2676 ASM_OUTPUT_LABELREF (file, name);
2677 }
2678
2679 /* Return NAME that should actually be emitted, looking through
2680 transparent aliases. If NAME refers to an entity that is also
2681 represented as a tree (like a function or variable), mark the entity
2682 as referenced. */
2683 const char *
assemble_name_resolve(const char * name)2684 assemble_name_resolve (const char *name)
2685 {
2686 const char *real_name = targetm.strip_name_encoding (name);
2687 tree id = maybe_get_identifier (real_name);
2688
2689 if (id)
2690 {
2691 tree id_orig = id;
2692
2693 mark_referenced (id);
2694 ultimate_transparent_alias_target (&id);
2695 if (id != id_orig)
2696 name = IDENTIFIER_POINTER (id);
2697 gcc_assert (! TREE_CHAIN (id));
2698 }
2699
2700 return name;
2701 }
2702
2703 /* Like assemble_name_raw, but should be used when NAME might refer to
2704 an entity that is also represented as a tree (like a function or
2705 variable). If NAME does refer to such an entity, that entity will
2706 be marked as referenced. */
2707
2708 void
assemble_name(FILE * file,const char * name)2709 assemble_name (FILE *file, const char *name)
2710 {
2711 assemble_name_raw (file, assemble_name_resolve (name));
2712 }
2713
2714 /* Allocate SIZE bytes writable static space with a gensym name
2715 and return an RTX to refer to its address. */
2716
2717 rtx
assemble_static_space(unsigned HOST_WIDE_INT size)2718 assemble_static_space (unsigned HOST_WIDE_INT size)
2719 {
2720 char name[17];
2721 const char *namestring;
2722 rtx x;
2723
2724 ASM_GENERATE_INTERNAL_LABEL (name, "LF", const_labelno);
2725 ++const_labelno;
2726 namestring = ggc_strdup (name);
2727
2728 x = gen_rtx_SYMBOL_REF (Pmode, namestring);
2729 SYMBOL_REF_FLAGS (x) = SYMBOL_FLAG_LOCAL;
2730
2731 #ifdef ASM_OUTPUT_ALIGNED_DECL_LOCAL
2732 ASM_OUTPUT_ALIGNED_DECL_LOCAL (asm_out_file, NULL_TREE, name, size,
2733 BIGGEST_ALIGNMENT);
2734 #else
2735 #ifdef ASM_OUTPUT_ALIGNED_LOCAL
2736 ASM_OUTPUT_ALIGNED_LOCAL (asm_out_file, name, size, BIGGEST_ALIGNMENT);
2737 #else
2738 {
2739 /* Round size up to multiple of BIGGEST_ALIGNMENT bits
2740 so that each uninitialized object starts on such a boundary. */
2741 /* Variable `rounded' might or might not be used in ASM_OUTPUT_LOCAL. */
2742 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED
2743 = ((size + (BIGGEST_ALIGNMENT / BITS_PER_UNIT) - 1)
2744 / (BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2745 * (BIGGEST_ALIGNMENT / BITS_PER_UNIT));
2746 ASM_OUTPUT_LOCAL (asm_out_file, name, size, rounded);
2747 }
2748 #endif
2749 #endif
2750 return x;
2751 }
2752
2753 /* Assemble the static constant template for function entry trampolines.
2754 This is done at most once per compilation.
2755 Returns an RTX for the address of the template. */
2756
2757 static GTY(()) rtx initial_trampoline;
2758
2759 rtx
assemble_trampoline_template(void)2760 assemble_trampoline_template (void)
2761 {
2762 char label[256];
2763 const char *name;
2764 int align;
2765 rtx symbol;
2766
2767 gcc_assert (targetm.asm_out.trampoline_template != NULL);
2768
2769 if (initial_trampoline)
2770 return initial_trampoline;
2771
2772 /* By default, put trampoline templates in read-only data section. */
2773
2774 #ifdef TRAMPOLINE_SECTION
2775 switch_to_section (TRAMPOLINE_SECTION);
2776 #else
2777 switch_to_section (readonly_data_section);
2778 #endif
2779
2780 /* Write the assembler code to define one. */
2781 align = floor_log2 (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
2782 if (align > 0)
2783 ASM_OUTPUT_ALIGN (asm_out_file, align);
2784
2785 targetm.asm_out.internal_label (asm_out_file, "LTRAMP", 0);
2786 targetm.asm_out.trampoline_template (asm_out_file);
2787
2788 /* Record the rtl to refer to it. */
2789 ASM_GENERATE_INTERNAL_LABEL (label, "LTRAMP", 0);
2790 name = ggc_strdup (label);
2791 symbol = gen_rtx_SYMBOL_REF (Pmode, name);
2792 SYMBOL_REF_FLAGS (symbol) = SYMBOL_FLAG_LOCAL;
2793
2794 initial_trampoline = gen_const_mem (BLKmode, symbol);
2795 set_mem_align (initial_trampoline, TRAMPOLINE_ALIGNMENT);
2796 set_mem_size (initial_trampoline, TRAMPOLINE_SIZE);
2797
2798 return initial_trampoline;
2799 }
2800
2801 /* A and B are either alignments or offsets. Return the minimum alignment
2802 that may be assumed after adding the two together. */
2803
2804 static inline unsigned
min_align(unsigned int a,unsigned int b)2805 min_align (unsigned int a, unsigned int b)
2806 {
2807 return least_bit_hwi (a | b);
2808 }
2809
2810 /* Return the assembler directive for creating a given kind of integer
2811 object. SIZE is the number of bytes in the object and ALIGNED_P
2812 indicates whether it is known to be aligned. Return NULL if the
2813 assembly dialect has no such directive.
2814
2815 The returned string should be printed at the start of a new line and
2816 be followed immediately by the object's initial value. */
2817
2818 const char *
integer_asm_op(int size,int aligned_p)2819 integer_asm_op (int size, int aligned_p)
2820 {
2821 struct asm_int_op *ops;
2822
2823 if (aligned_p)
2824 ops = &targetm.asm_out.aligned_op;
2825 else
2826 ops = &targetm.asm_out.unaligned_op;
2827
2828 switch (size)
2829 {
2830 case 1:
2831 return targetm.asm_out.byte_op;
2832 case 2:
2833 return ops->hi;
2834 case 3:
2835 return ops->psi;
2836 case 4:
2837 return ops->si;
2838 case 5:
2839 case 6:
2840 case 7:
2841 return ops->pdi;
2842 case 8:
2843 return ops->di;
2844 case 9:
2845 case 10:
2846 case 11:
2847 case 12:
2848 case 13:
2849 case 14:
2850 case 15:
2851 return ops->pti;
2852 case 16:
2853 return ops->ti;
2854 default:
2855 return NULL;
2856 }
2857 }
2858
2859 /* Use directive OP to assemble an integer object X. Print OP at the
2860 start of the line, followed immediately by the value of X. */
2861
2862 void
assemble_integer_with_op(const char * op,rtx x)2863 assemble_integer_with_op (const char *op, rtx x)
2864 {
2865 fputs (op, asm_out_file);
2866 output_addr_const (asm_out_file, x);
2867 fputc ('\n', asm_out_file);
2868 }
2869
2870 /* The default implementation of the asm_out.integer target hook. */
2871
2872 bool
default_assemble_integer(rtx x ATTRIBUTE_UNUSED,unsigned int size ATTRIBUTE_UNUSED,int aligned_p ATTRIBUTE_UNUSED)2873 default_assemble_integer (rtx x ATTRIBUTE_UNUSED,
2874 unsigned int size ATTRIBUTE_UNUSED,
2875 int aligned_p ATTRIBUTE_UNUSED)
2876 {
2877 const char *op = integer_asm_op (size, aligned_p);
2878 /* Avoid GAS bugs for large values. Specifically negative values whose
2879 absolute value fits in a bfd_vma, but not in a bfd_signed_vma. */
2880 if (size > UNITS_PER_WORD && size > POINTER_SIZE_UNITS)
2881 return false;
2882 return op && (assemble_integer_with_op (op, x), true);
2883 }
2884
2885 /* Assemble the integer constant X into an object of SIZE bytes. ALIGN is
2886 the alignment of the integer in bits. Return 1 if we were able to output
2887 the constant, otherwise 0. We must be able to output the constant,
2888 if FORCE is nonzero. */
2889
2890 bool
assemble_integer(rtx x,unsigned int size,unsigned int align,int force)2891 assemble_integer (rtx x, unsigned int size, unsigned int align, int force)
2892 {
2893 int aligned_p;
2894
2895 aligned_p = (align >= MIN (size * BITS_PER_UNIT, BIGGEST_ALIGNMENT));
2896
2897 /* See if the target hook can handle this kind of object. */
2898 if (targetm.asm_out.integer (x, size, aligned_p))
2899 return true;
2900
2901 /* If the object is a multi-byte one, try splitting it up. Split
2902 it into words it if is multi-word, otherwise split it into bytes. */
2903 if (size > 1)
2904 {
2905 machine_mode omode, imode;
2906 unsigned int subalign;
2907 unsigned int subsize, i;
2908 enum mode_class mclass;
2909
2910 subsize = size > UNITS_PER_WORD? UNITS_PER_WORD : 1;
2911 subalign = MIN (align, subsize * BITS_PER_UNIT);
2912 if (GET_CODE (x) == CONST_FIXED)
2913 mclass = GET_MODE_CLASS (GET_MODE (x));
2914 else
2915 mclass = MODE_INT;
2916
2917 omode = mode_for_size (subsize * BITS_PER_UNIT, mclass, 0).require ();
2918 imode = mode_for_size (size * BITS_PER_UNIT, mclass, 0).require ();
2919
2920 for (i = 0; i < size; i += subsize)
2921 {
2922 rtx partial = simplify_subreg (omode, x, imode, i);
2923 if (!partial || !assemble_integer (partial, subsize, subalign, 0))
2924 break;
2925 }
2926 if (i == size)
2927 return true;
2928
2929 /* If we've printed some of it, but not all of it, there's no going
2930 back now. */
2931 gcc_assert (!i);
2932 }
2933
2934 gcc_assert (!force);
2935
2936 return false;
2937 }
2938
2939 /* Assemble the floating-point constant D into an object of size MODE. ALIGN
2940 is the alignment of the constant in bits. If REVERSE is true, D is output
2941 in reverse storage order. */
2942
2943 void
assemble_real(REAL_VALUE_TYPE d,scalar_float_mode mode,unsigned int align,bool reverse)2944 assemble_real (REAL_VALUE_TYPE d, scalar_float_mode mode, unsigned int align,
2945 bool reverse)
2946 {
2947 long data[4] = {0, 0, 0, 0};
2948 int bitsize, nelts, nunits, units_per;
2949 rtx elt;
2950
2951 /* This is hairy. We have a quantity of known size. real_to_target
2952 will put it into an array of *host* longs, 32 bits per element
2953 (even if long is more than 32 bits). We need to determine the
2954 number of array elements that are occupied (nelts) and the number
2955 of *target* min-addressable units that will be occupied in the
2956 object file (nunits). We cannot assume that 32 divides the
2957 mode's bitsize (size * BITS_PER_UNIT) evenly.
2958
2959 size * BITS_PER_UNIT is used here to make sure that padding bits
2960 (which might appear at either end of the value; real_to_target
2961 will include the padding bits in its output array) are included. */
2962
2963 nunits = GET_MODE_SIZE (mode);
2964 bitsize = nunits * BITS_PER_UNIT;
2965 nelts = CEIL (bitsize, 32);
2966 units_per = 32 / BITS_PER_UNIT;
2967
2968 real_to_target (data, &d, mode);
2969
2970 /* Put out the first word with the specified alignment. */
2971 unsigned int chunk_nunits = MIN (nunits, units_per);
2972 if (reverse)
2973 elt = flip_storage_order (SImode, gen_int_mode (data[nelts - 1], SImode));
2974 else
2975 elt = GEN_INT (sext_hwi (data[0], chunk_nunits * BITS_PER_UNIT));
2976 assemble_integer (elt, chunk_nunits, align, 1);
2977 nunits -= chunk_nunits;
2978
2979 /* Subsequent words need only 32-bit alignment. */
2980 align = min_align (align, 32);
2981
2982 for (int i = 1; i < nelts; i++)
2983 {
2984 chunk_nunits = MIN (nunits, units_per);
2985 if (reverse)
2986 elt = flip_storage_order (SImode,
2987 gen_int_mode (data[nelts - 1 - i], SImode));
2988 else
2989 elt = GEN_INT (sext_hwi (data[i], chunk_nunits * BITS_PER_UNIT));
2990 assemble_integer (elt, chunk_nunits, align, 1);
2991 nunits -= chunk_nunits;
2992 }
2993 }
2994
2995 /* Given an expression EXP with a constant value,
2996 reduce it to the sum of an assembler symbol and an integer.
2997 Store them both in the structure *VALUE.
2998 EXP must be reducible. */
2999
3000 class addr_const {
3001 public:
3002 rtx base;
3003 poly_int64 offset;
3004 };
3005
3006 static void
decode_addr_const(tree exp,class addr_const * value)3007 decode_addr_const (tree exp, class addr_const *value)
3008 {
3009 tree target = TREE_OPERAND (exp, 0);
3010 poly_int64 offset = 0;
3011 rtx x;
3012
3013 while (1)
3014 {
3015 poly_int64 bytepos;
3016 if (TREE_CODE (target) == COMPONENT_REF
3017 && poly_int_tree_p (byte_position (TREE_OPERAND (target, 1)),
3018 &bytepos))
3019 {
3020 offset += bytepos;
3021 target = TREE_OPERAND (target, 0);
3022 }
3023 else if (TREE_CODE (target) == ARRAY_REF
3024 || TREE_CODE (target) == ARRAY_RANGE_REF)
3025 {
3026 /* Truncate big offset. */
3027 offset
3028 += (TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (target)))
3029 * wi::to_poly_widest (TREE_OPERAND (target, 1)).force_shwi ());
3030 target = TREE_OPERAND (target, 0);
3031 }
3032 else if (TREE_CODE (target) == MEM_REF
3033 && TREE_CODE (TREE_OPERAND (target, 0)) == ADDR_EXPR)
3034 {
3035 offset += mem_ref_offset (target).force_shwi ();
3036 target = TREE_OPERAND (TREE_OPERAND (target, 0), 0);
3037 }
3038 else if (TREE_CODE (target) == INDIRECT_REF
3039 && TREE_CODE (TREE_OPERAND (target, 0)) == NOP_EXPR
3040 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (target, 0), 0))
3041 == ADDR_EXPR)
3042 target = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (target, 0), 0), 0);
3043 else
3044 break;
3045 }
3046
3047 switch (TREE_CODE (target))
3048 {
3049 case VAR_DECL:
3050 case FUNCTION_DECL:
3051 x = DECL_RTL (target);
3052 break;
3053
3054 case LABEL_DECL:
3055 x = gen_rtx_MEM (FUNCTION_MODE,
3056 gen_rtx_LABEL_REF (Pmode, force_label_rtx (target)));
3057 break;
3058
3059 case REAL_CST:
3060 case FIXED_CST:
3061 case STRING_CST:
3062 case COMPLEX_CST:
3063 case CONSTRUCTOR:
3064 case INTEGER_CST:
3065 x = lookup_constant_def (target);
3066 /* Should have been added by output_addressed_constants. */
3067 gcc_assert (x);
3068 break;
3069
3070 case INDIRECT_REF:
3071 /* This deals with absolute addresses. */
3072 offset += tree_to_shwi (TREE_OPERAND (target, 0));
3073 x = gen_rtx_MEM (QImode,
3074 gen_rtx_SYMBOL_REF (Pmode, "origin of addresses"));
3075 break;
3076
3077 case COMPOUND_LITERAL_EXPR:
3078 gcc_assert (COMPOUND_LITERAL_EXPR_DECL (target));
3079 x = DECL_RTL (COMPOUND_LITERAL_EXPR_DECL (target));
3080 break;
3081
3082 default:
3083 gcc_unreachable ();
3084 }
3085
3086 gcc_assert (MEM_P (x));
3087 x = XEXP (x, 0);
3088
3089 value->base = x;
3090 value->offset = offset;
3091 }
3092
3093 static GTY(()) hash_table<tree_descriptor_hasher> *const_desc_htab;
3094
3095 static void maybe_output_constant_def_contents (struct constant_descriptor_tree *, int);
3096
3097 /* Constant pool accessor function. */
3098
3099 hash_table<tree_descriptor_hasher> *
constant_pool_htab(void)3100 constant_pool_htab (void)
3101 {
3102 return const_desc_htab;
3103 }
3104
3105 /* Compute a hash code for a constant expression. */
3106
3107 hashval_t
hash(constant_descriptor_tree * ptr)3108 tree_descriptor_hasher::hash (constant_descriptor_tree *ptr)
3109 {
3110 return ptr->hash;
3111 }
3112
3113 static hashval_t
const_hash_1(const tree exp)3114 const_hash_1 (const tree exp)
3115 {
3116 const char *p;
3117 hashval_t hi;
3118 int len, i;
3119 enum tree_code code = TREE_CODE (exp);
3120
3121 /* Either set P and LEN to the address and len of something to hash and
3122 exit the switch or return a value. */
3123
3124 switch (code)
3125 {
3126 case INTEGER_CST:
3127 p = (char *) &TREE_INT_CST_ELT (exp, 0);
3128 len = TREE_INT_CST_NUNITS (exp) * sizeof (HOST_WIDE_INT);
3129 break;
3130
3131 case REAL_CST:
3132 return real_hash (TREE_REAL_CST_PTR (exp));
3133
3134 case FIXED_CST:
3135 return fixed_hash (TREE_FIXED_CST_PTR (exp));
3136
3137 case STRING_CST:
3138 p = TREE_STRING_POINTER (exp);
3139 len = TREE_STRING_LENGTH (exp);
3140 break;
3141
3142 case COMPLEX_CST:
3143 return (const_hash_1 (TREE_REALPART (exp)) * 5
3144 + const_hash_1 (TREE_IMAGPART (exp)));
3145
3146 case VECTOR_CST:
3147 {
3148 hi = 7 + VECTOR_CST_NPATTERNS (exp);
3149 hi = hi * 563 + VECTOR_CST_NELTS_PER_PATTERN (exp);
3150 unsigned int count = vector_cst_encoded_nelts (exp);
3151 for (unsigned int i = 0; i < count; ++i)
3152 hi = hi * 563 + const_hash_1 (VECTOR_CST_ENCODED_ELT (exp, i));
3153 return hi;
3154 }
3155
3156 case CONSTRUCTOR:
3157 {
3158 unsigned HOST_WIDE_INT idx;
3159 tree value;
3160
3161 hi = 5 + int_size_in_bytes (TREE_TYPE (exp));
3162
3163 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
3164 if (value)
3165 hi = hi * 603 + const_hash_1 (value);
3166
3167 return hi;
3168 }
3169
3170 case ADDR_EXPR:
3171 if (CONSTANT_CLASS_P (TREE_OPERAND (exp, 0)))
3172 return const_hash_1 (TREE_OPERAND (exp, 0));
3173
3174 /* Fallthru. */
3175 case FDESC_EXPR:
3176 {
3177 class addr_const value;
3178
3179 decode_addr_const (exp, &value);
3180 switch (GET_CODE (value.base))
3181 {
3182 case SYMBOL_REF:
3183 /* Don't hash the address of the SYMBOL_REF;
3184 only use the offset and the symbol name. */
3185 hi = value.offset.coeffs[0];
3186 p = XSTR (value.base, 0);
3187 for (i = 0; p[i] != 0; i++)
3188 hi = ((hi * 613) + (unsigned) (p[i]));
3189 break;
3190
3191 case LABEL_REF:
3192 hi = (value.offset.coeffs[0]
3193 + CODE_LABEL_NUMBER (label_ref_label (value.base)) * 13);
3194 break;
3195
3196 default:
3197 gcc_unreachable ();
3198 }
3199 }
3200 return hi;
3201
3202 case PLUS_EXPR:
3203 case POINTER_PLUS_EXPR:
3204 case MINUS_EXPR:
3205 return (const_hash_1 (TREE_OPERAND (exp, 0)) * 9
3206 + const_hash_1 (TREE_OPERAND (exp, 1)));
3207
3208 CASE_CONVERT:
3209 return const_hash_1 (TREE_OPERAND (exp, 0)) * 7 + 2;
3210
3211 default:
3212 /* A language specific constant. Just hash the code. */
3213 return code;
3214 }
3215
3216 /* Compute hashing function. */
3217 hi = len;
3218 for (i = 0; i < len; i++)
3219 hi = ((hi * 613) + (unsigned) (p[i]));
3220
3221 return hi;
3222 }
3223
3224 /* Wrapper of compare_constant, for the htab interface. */
3225 bool
equal(constant_descriptor_tree * c1,constant_descriptor_tree * c2)3226 tree_descriptor_hasher::equal (constant_descriptor_tree *c1,
3227 constant_descriptor_tree *c2)
3228 {
3229 if (c1->hash != c2->hash)
3230 return 0;
3231 return compare_constant (c1->value, c2->value);
3232 }
3233
3234 /* Compare t1 and t2, and return 1 only if they are known to result in
3235 the same bit pattern on output. */
3236
3237 static int
compare_constant(const tree t1,const tree t2)3238 compare_constant (const tree t1, const tree t2)
3239 {
3240 enum tree_code typecode;
3241
3242 if (t1 == NULL_TREE)
3243 return t2 == NULL_TREE;
3244 if (t2 == NULL_TREE)
3245 return 0;
3246
3247 if (TREE_CODE (t1) != TREE_CODE (t2))
3248 return 0;
3249
3250 switch (TREE_CODE (t1))
3251 {
3252 case INTEGER_CST:
3253 /* Integer constants are the same only if the same width of type. */
3254 if (TYPE_PRECISION (TREE_TYPE (t1)) != TYPE_PRECISION (TREE_TYPE (t2)))
3255 return 0;
3256 if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2)))
3257 return 0;
3258 return tree_int_cst_equal (t1, t2);
3259
3260 case REAL_CST:
3261 /* Real constants are the same only if the same width of type. In
3262 addition to the same width, we need to check whether the modes are the
3263 same. There might be two floating point modes that are the same size
3264 but have different representations, such as the PowerPC that has 2
3265 different 128-bit floating point types (IBM extended double and IEEE
3266 128-bit floating point). */
3267 if (TYPE_PRECISION (TREE_TYPE (t1)) != TYPE_PRECISION (TREE_TYPE (t2)))
3268 return 0;
3269 if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2)))
3270 return 0;
3271 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
3272
3273 case FIXED_CST:
3274 /* Fixed constants are the same only if the same width of type. */
3275 if (TYPE_PRECISION (TREE_TYPE (t1)) != TYPE_PRECISION (TREE_TYPE (t2)))
3276 return 0;
3277
3278 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
3279
3280 case STRING_CST:
3281 if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2))
3282 || int_size_in_bytes (TREE_TYPE (t1))
3283 != int_size_in_bytes (TREE_TYPE (t2)))
3284 return 0;
3285
3286 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
3287 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
3288 TREE_STRING_LENGTH (t1)));
3289
3290 case COMPLEX_CST:
3291 return (compare_constant (TREE_REALPART (t1), TREE_REALPART (t2))
3292 && compare_constant (TREE_IMAGPART (t1), TREE_IMAGPART (t2)));
3293
3294 case VECTOR_CST:
3295 {
3296 if (VECTOR_CST_NPATTERNS (t1)
3297 != VECTOR_CST_NPATTERNS (t2))
3298 return 0;
3299
3300 if (VECTOR_CST_NELTS_PER_PATTERN (t1)
3301 != VECTOR_CST_NELTS_PER_PATTERN (t2))
3302 return 0;
3303
3304 unsigned int count = vector_cst_encoded_nelts (t1);
3305 for (unsigned int i = 0; i < count; ++i)
3306 if (!compare_constant (VECTOR_CST_ENCODED_ELT (t1, i),
3307 VECTOR_CST_ENCODED_ELT (t2, i)))
3308 return 0;
3309
3310 return 1;
3311 }
3312
3313 case CONSTRUCTOR:
3314 {
3315 vec<constructor_elt, va_gc> *v1, *v2;
3316 unsigned HOST_WIDE_INT idx;
3317
3318 typecode = TREE_CODE (TREE_TYPE (t1));
3319 if (typecode != TREE_CODE (TREE_TYPE (t2)))
3320 return 0;
3321
3322 if (typecode == ARRAY_TYPE)
3323 {
3324 HOST_WIDE_INT size_1 = int_size_in_bytes (TREE_TYPE (t1));
3325 /* For arrays, check that mode, size and storage order match. */
3326 if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2))
3327 || size_1 == -1
3328 || size_1 != int_size_in_bytes (TREE_TYPE (t2))
3329 || TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (t1))
3330 != TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (t2)))
3331 return 0;
3332 }
3333 else
3334 {
3335 /* For record and union constructors, require exact type
3336 equality. */
3337 if (TREE_TYPE (t1) != TREE_TYPE (t2))
3338 return 0;
3339 }
3340
3341 v1 = CONSTRUCTOR_ELTS (t1);
3342 v2 = CONSTRUCTOR_ELTS (t2);
3343 if (vec_safe_length (v1) != vec_safe_length (v2))
3344 return 0;
3345
3346 for (idx = 0; idx < vec_safe_length (v1); ++idx)
3347 {
3348 constructor_elt *c1 = &(*v1)[idx];
3349 constructor_elt *c2 = &(*v2)[idx];
3350
3351 /* Check that each value is the same... */
3352 if (!compare_constant (c1->value, c2->value))
3353 return 0;
3354 /* ... and that they apply to the same fields! */
3355 if (typecode == ARRAY_TYPE)
3356 {
3357 if (!compare_constant (c1->index, c2->index))
3358 return 0;
3359 }
3360 else
3361 {
3362 if (c1->index != c2->index)
3363 return 0;
3364 }
3365 }
3366
3367 return 1;
3368 }
3369
3370 case ADDR_EXPR:
3371 case FDESC_EXPR:
3372 {
3373 class addr_const value1, value2;
3374 enum rtx_code code;
3375 int ret;
3376
3377 decode_addr_const (t1, &value1);
3378 decode_addr_const (t2, &value2);
3379
3380 if (maybe_ne (value1.offset, value2.offset))
3381 return 0;
3382
3383 code = GET_CODE (value1.base);
3384 if (code != GET_CODE (value2.base))
3385 return 0;
3386
3387 switch (code)
3388 {
3389 case SYMBOL_REF:
3390 ret = (strcmp (XSTR (value1.base, 0), XSTR (value2.base, 0)) == 0);
3391 break;
3392
3393 case LABEL_REF:
3394 ret = (CODE_LABEL_NUMBER (label_ref_label (value1.base))
3395 == CODE_LABEL_NUMBER (label_ref_label (value2.base)));
3396 break;
3397
3398 default:
3399 gcc_unreachable ();
3400 }
3401 return ret;
3402 }
3403
3404 case PLUS_EXPR:
3405 case POINTER_PLUS_EXPR:
3406 case MINUS_EXPR:
3407 case RANGE_EXPR:
3408 return (compare_constant (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0))
3409 && compare_constant (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1)));
3410
3411 CASE_CONVERT:
3412 case VIEW_CONVERT_EXPR:
3413 return compare_constant (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
3414
3415 default:
3416 return 0;
3417 }
3418 }
3419
3420 /* Return the section into which constant EXP should be placed. */
3421
3422 static section *
get_constant_section(tree exp,unsigned int align)3423 get_constant_section (tree exp, unsigned int align)
3424 {
3425 return targetm.asm_out.select_section (exp,
3426 compute_reloc_for_constant (exp),
3427 align);
3428 }
3429
3430 /* Return the size of constant EXP in bytes. */
3431
3432 static HOST_WIDE_INT
get_constant_size(tree exp)3433 get_constant_size (tree exp)
3434 {
3435 HOST_WIDE_INT size;
3436
3437 size = int_size_in_bytes (TREE_TYPE (exp));
3438 gcc_checking_assert (size >= 0);
3439 gcc_checking_assert (TREE_CODE (exp) != STRING_CST
3440 || size >= TREE_STRING_LENGTH (exp));
3441 return size;
3442 }
3443
3444 /* Subroutine of output_constant_def:
3445 No constant equal to EXP is known to have been output.
3446 Make a constant descriptor to enter EXP in the hash table.
3447 Assign the label number and construct RTL to refer to the
3448 constant's location in memory.
3449 Caller is responsible for updating the hash table. */
3450
3451 static struct constant_descriptor_tree *
build_constant_desc(tree exp)3452 build_constant_desc (tree exp)
3453 {
3454 struct constant_descriptor_tree *desc;
3455 rtx symbol, rtl;
3456 char label[256];
3457 int labelno;
3458 tree decl;
3459
3460 desc = ggc_alloc<constant_descriptor_tree> ();
3461 desc->value = exp;
3462
3463 /* Create a string containing the label name, in LABEL. */
3464 labelno = const_labelno++;
3465 ASM_GENERATE_INTERNAL_LABEL (label, "LC", labelno);
3466
3467 /* Construct the VAR_DECL associated with the constant. */
3468 decl = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (label),
3469 TREE_TYPE (exp));
3470 DECL_ARTIFICIAL (decl) = 1;
3471 DECL_IGNORED_P (decl) = 1;
3472 TREE_READONLY (decl) = 1;
3473 TREE_STATIC (decl) = 1;
3474 TREE_ADDRESSABLE (decl) = 1;
3475 /* We don't set the RTL yet as this would cause varpool to assume that the
3476 variable is referenced. Moreover, it would just be dropped in LTO mode.
3477 Instead we set the flag that will be recognized in make_decl_rtl. */
3478 DECL_IN_CONSTANT_POOL (decl) = 1;
3479 DECL_INITIAL (decl) = desc->value;
3480 /* ??? targetm.constant_alignment hasn't been updated for vector types on
3481 most architectures so use DATA_ALIGNMENT as well, except for strings. */
3482 if (TREE_CODE (exp) == STRING_CST)
3483 SET_DECL_ALIGN (decl, targetm.constant_alignment (exp, DECL_ALIGN (decl)));
3484 else
3485 {
3486 align_variable (decl, 0);
3487 if (DECL_ALIGN (decl) < GET_MODE_ALIGNMENT (DECL_MODE (decl))
3488 && ((optab_handler (movmisalign_optab, DECL_MODE (decl))
3489 != CODE_FOR_nothing)
3490 || targetm.slow_unaligned_access (DECL_MODE (decl),
3491 DECL_ALIGN (decl))))
3492 SET_DECL_ALIGN (decl, GET_MODE_ALIGNMENT (DECL_MODE (decl)));
3493 }
3494
3495 /* Now construct the SYMBOL_REF and the MEM. */
3496 if (use_object_blocks_p ())
3497 {
3498 int align = (TREE_CODE (decl) == CONST_DECL
3499 || (VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl))
3500 ? DECL_ALIGN (decl)
3501 : symtab_node::get (decl)->definition_alignment ());
3502 section *sect = get_constant_section (exp, align);
3503 symbol = create_block_symbol (ggc_strdup (label),
3504 get_block_for_section (sect), -1);
3505 }
3506 else
3507 symbol = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (label));
3508 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_LOCAL;
3509 SET_SYMBOL_REF_DECL (symbol, decl);
3510 TREE_CONSTANT_POOL_ADDRESS_P (symbol) = 1;
3511
3512 rtl = gen_const_mem (TYPE_MODE (TREE_TYPE (exp)), symbol);
3513 set_mem_alias_set (rtl, 0);
3514
3515 /* Putting EXP into the literal pool might have imposed a different
3516 alignment which should be visible in the RTX as well. */
3517 set_mem_align (rtl, DECL_ALIGN (decl));
3518
3519 /* We cannot share RTX'es in pool entries.
3520 Mark this piece of RTL as required for unsharing. */
3521 RTX_FLAG (rtl, used) = 1;
3522
3523 /* Set flags or add text to the name to record information, such as
3524 that it is a local symbol. If the name is changed, the macro
3525 ASM_OUTPUT_LABELREF will have to know how to strip this
3526 information. This call might invalidate our local variable
3527 SYMBOL; we can't use it afterward. */
3528 targetm.encode_section_info (exp, rtl, true);
3529
3530 desc->rtl = rtl;
3531
3532 return desc;
3533 }
3534
3535 /* Subroutine of output_constant_def and tree_output_constant_def:
3536 Add a constant to the hash table that tracks which constants
3537 already have labels. */
3538
3539 static constant_descriptor_tree *
add_constant_to_table(tree exp,int defer)3540 add_constant_to_table (tree exp, int defer)
3541 {
3542 /* The hash table methods may call output_constant_def for addressed
3543 constants, so handle them first. */
3544 output_addressed_constants (exp, defer);
3545
3546 /* Sanity check to catch recursive insertion. */
3547 static bool inserting;
3548 gcc_assert (!inserting);
3549 inserting = true;
3550
3551 /* Look up EXP in the table of constant descriptors. If we didn't
3552 find it, create a new one. */
3553 struct constant_descriptor_tree key;
3554 key.value = exp;
3555 key.hash = const_hash_1 (exp);
3556 constant_descriptor_tree **loc
3557 = const_desc_htab->find_slot_with_hash (&key, key.hash, INSERT);
3558
3559 inserting = false;
3560
3561 struct constant_descriptor_tree *desc = *loc;
3562 if (!desc)
3563 {
3564 desc = build_constant_desc (exp);
3565 desc->hash = key.hash;
3566 *loc = desc;
3567 }
3568
3569 return desc;
3570 }
3571
3572 /* Return an rtx representing a reference to constant data in memory
3573 for the constant expression EXP.
3574
3575 If assembler code for such a constant has already been output,
3576 return an rtx to refer to it.
3577 Otherwise, output such a constant in memory
3578 and generate an rtx for it.
3579
3580 If DEFER is nonzero, this constant can be deferred and output only
3581 if referenced in the function after all optimizations.
3582
3583 `const_desc_table' records which constants already have label strings. */
3584
3585 rtx
output_constant_def(tree exp,int defer)3586 output_constant_def (tree exp, int defer)
3587 {
3588 struct constant_descriptor_tree *desc = add_constant_to_table (exp, defer);
3589 maybe_output_constant_def_contents (desc, defer);
3590 return desc->rtl;
3591 }
3592
3593 /* Subroutine of output_constant_def: Decide whether or not we need to
3594 output the constant DESC now, and if so, do it. */
3595 static void
maybe_output_constant_def_contents(struct constant_descriptor_tree * desc,int defer)3596 maybe_output_constant_def_contents (struct constant_descriptor_tree *desc,
3597 int defer)
3598 {
3599 rtx symbol = XEXP (desc->rtl, 0);
3600 tree exp = desc->value;
3601
3602 if (flag_syntax_only)
3603 return;
3604
3605 if (TREE_ASM_WRITTEN (exp))
3606 /* Already output; don't do it again. */
3607 return;
3608
3609 /* We can always defer constants as long as the context allows
3610 doing so. */
3611 if (defer)
3612 {
3613 /* Increment n_deferred_constants if it exists. It needs to be at
3614 least as large as the number of constants actually referred to
3615 by the function. If it's too small we'll stop looking too early
3616 and fail to emit constants; if it's too large we'll only look
3617 through the entire function when we could have stopped earlier. */
3618 if (cfun)
3619 n_deferred_constants++;
3620 return;
3621 }
3622
3623 output_constant_def_contents (symbol);
3624 }
3625
3626 /* Subroutine of output_constant_def_contents. Output the definition
3627 of constant EXP, which is pointed to by label LABEL. ALIGN is the
3628 constant's alignment in bits. */
3629
3630 static void
assemble_constant_contents(tree exp,const char * label,unsigned int align,bool merge_strings)3631 assemble_constant_contents (tree exp, const char *label, unsigned int align,
3632 bool merge_strings)
3633 {
3634 HOST_WIDE_INT size;
3635
3636 size = get_constant_size (exp);
3637
3638 /* Do any machine/system dependent processing of the constant. */
3639 targetm.asm_out.declare_constant_name (asm_out_file, label, exp, size);
3640
3641 /* Output the value of EXP. */
3642 output_constant (exp, size, align, false, merge_strings);
3643
3644 targetm.asm_out.decl_end ();
3645 }
3646
3647 /* We must output the constant data referred to by SYMBOL; do so. */
3648
3649 static void
output_constant_def_contents(rtx symbol)3650 output_constant_def_contents (rtx symbol)
3651 {
3652 tree decl = SYMBOL_REF_DECL (symbol);
3653 tree exp = DECL_INITIAL (decl);
3654 bool asan_protected = false;
3655
3656 /* Make sure any other constants whose addresses appear in EXP
3657 are assigned label numbers. */
3658 output_addressed_constants (exp, 0);
3659
3660 /* We are no longer deferring this constant. */
3661 TREE_ASM_WRITTEN (decl) = TREE_ASM_WRITTEN (exp) = 1;
3662
3663 if ((flag_sanitize & SANITIZE_ADDRESS)
3664 && TREE_CODE (exp) == STRING_CST
3665 && asan_protect_global (exp))
3666 {
3667 asan_protected = true;
3668 SET_DECL_ALIGN (decl, MAX (DECL_ALIGN (decl),
3669 ASAN_RED_ZONE_SIZE * BITS_PER_UNIT));
3670 }
3671
3672 /* If the constant is part of an object block, make sure that the
3673 decl has been positioned within its block, but do not write out
3674 its definition yet. output_object_blocks will do that later. */
3675 if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol) && SYMBOL_REF_BLOCK (symbol))
3676 place_block_symbol (symbol);
3677 else
3678 {
3679 int align = (TREE_CODE (decl) == CONST_DECL
3680 || (VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl))
3681 ? DECL_ALIGN (decl)
3682 : symtab_node::get (decl)->definition_alignment ());
3683 section *sect = get_constant_section (exp, align);
3684 switch_to_section (sect);
3685 if (align > BITS_PER_UNIT)
3686 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT));
3687 assemble_constant_contents (exp, XSTR (symbol, 0), align,
3688 (sect->common.flags & SECTION_MERGE)
3689 && (sect->common.flags & SECTION_STRINGS));
3690 if (asan_protected)
3691 {
3692 HOST_WIDE_INT size = get_constant_size (exp);
3693 assemble_zeros (asan_red_zone_size (size));
3694 }
3695 }
3696 }
3697
3698 /* Look up EXP in the table of constant descriptors. Return the rtl
3699 if it has been emitted, else null. */
3700
3701 rtx
lookup_constant_def(tree exp)3702 lookup_constant_def (tree exp)
3703 {
3704 struct constant_descriptor_tree key;
3705
3706 key.value = exp;
3707 key.hash = const_hash_1 (exp);
3708 constant_descriptor_tree *desc
3709 = const_desc_htab->find_with_hash (&key, key.hash);
3710
3711 return (desc ? desc->rtl : NULL_RTX);
3712 }
3713
3714 /* Return a tree representing a reference to constant data in memory
3715 for the constant expression EXP.
3716
3717 This is the counterpart of output_constant_def at the Tree level. */
3718
3719 tree
tree_output_constant_def(tree exp)3720 tree_output_constant_def (tree exp)
3721 {
3722 struct constant_descriptor_tree *desc = add_constant_to_table (exp, 1);
3723 tree decl = SYMBOL_REF_DECL (XEXP (desc->rtl, 0));
3724 varpool_node::finalize_decl (decl);
3725 return decl;
3726 }
3727
3728 class GTY((chain_next ("%h.next"), for_user)) constant_descriptor_rtx {
3729 public:
3730 class constant_descriptor_rtx *next;
3731 rtx mem;
3732 rtx sym;
3733 rtx constant;
3734 HOST_WIDE_INT offset;
3735 hashval_t hash;
3736 fixed_size_mode mode;
3737 unsigned int align;
3738 int labelno;
3739 int mark;
3740 };
3741
3742 struct const_rtx_desc_hasher : ggc_ptr_hash<constant_descriptor_rtx>
3743 {
3744 static hashval_t hash (constant_descriptor_rtx *);
3745 static bool equal (constant_descriptor_rtx *, constant_descriptor_rtx *);
3746 };
3747
3748 /* Used in the hash tables to avoid outputting the same constant
3749 twice. Unlike 'struct constant_descriptor_tree', RTX constants
3750 are output once per function, not once per file. */
3751 /* ??? Only a few targets need per-function constant pools. Most
3752 can use one per-file pool. Should add a targetm bit to tell the
3753 difference. */
3754
3755 struct GTY(()) rtx_constant_pool {
3756 /* Pointers to first and last constant in pool, as ordered by offset. */
3757 class constant_descriptor_rtx *first;
3758 class constant_descriptor_rtx *last;
3759
3760 /* Hash facility for making memory-constants from constant rtl-expressions.
3761 It is used on RISC machines where immediate integer arguments and
3762 constant addresses are restricted so that such constants must be stored
3763 in memory. */
3764 hash_table<const_rtx_desc_hasher> *const_rtx_htab;
3765
3766 /* Current offset in constant pool (does not include any
3767 machine-specific header). */
3768 HOST_WIDE_INT offset;
3769 };
3770
3771 /* Hash and compare functions for const_rtx_htab. */
3772
3773 hashval_t
hash(constant_descriptor_rtx * desc)3774 const_rtx_desc_hasher::hash (constant_descriptor_rtx *desc)
3775 {
3776 return desc->hash;
3777 }
3778
3779 bool
equal(constant_descriptor_rtx * x,constant_descriptor_rtx * y)3780 const_rtx_desc_hasher::equal (constant_descriptor_rtx *x,
3781 constant_descriptor_rtx *y)
3782 {
3783 if (x->mode != y->mode)
3784 return 0;
3785 return rtx_equal_p (x->constant, y->constant);
3786 }
3787
3788 /* Hash one component of a constant. */
3789
3790 static hashval_t
const_rtx_hash_1(const_rtx x)3791 const_rtx_hash_1 (const_rtx x)
3792 {
3793 unsigned HOST_WIDE_INT hwi;
3794 machine_mode mode;
3795 enum rtx_code code;
3796 hashval_t h;
3797 int i;
3798
3799 code = GET_CODE (x);
3800 mode = GET_MODE (x);
3801 h = (hashval_t) code * 1048573 + mode;
3802
3803 switch (code)
3804 {
3805 case CONST_INT:
3806 hwi = INTVAL (x);
3807
3808 fold_hwi:
3809 {
3810 int shift = sizeof (hashval_t) * CHAR_BIT;
3811 const int n = sizeof (HOST_WIDE_INT) / sizeof (hashval_t);
3812
3813 h ^= (hashval_t) hwi;
3814 for (i = 1; i < n; ++i)
3815 {
3816 hwi >>= shift;
3817 h ^= (hashval_t) hwi;
3818 }
3819 }
3820 break;
3821
3822 case CONST_WIDE_INT:
3823 hwi = 0;
3824 {
3825 for (i = 0; i < CONST_WIDE_INT_NUNITS (x); i++)
3826 hwi ^= CONST_WIDE_INT_ELT (x, i);
3827 goto fold_hwi;
3828 }
3829
3830 case CONST_DOUBLE:
3831 if (TARGET_SUPPORTS_WIDE_INT == 0 && mode == VOIDmode)
3832 {
3833 hwi = CONST_DOUBLE_LOW (x) ^ CONST_DOUBLE_HIGH (x);
3834 goto fold_hwi;
3835 }
3836 else
3837 h ^= real_hash (CONST_DOUBLE_REAL_VALUE (x));
3838 break;
3839
3840 case CONST_FIXED:
3841 h ^= fixed_hash (CONST_FIXED_VALUE (x));
3842 break;
3843
3844 case SYMBOL_REF:
3845 h ^= htab_hash_string (XSTR (x, 0));
3846 break;
3847
3848 case LABEL_REF:
3849 h = h * 251 + CODE_LABEL_NUMBER (label_ref_label (x));
3850 break;
3851
3852 case UNSPEC:
3853 case UNSPEC_VOLATILE:
3854 h = h * 251 + XINT (x, 1);
3855 break;
3856
3857 default:
3858 break;
3859 }
3860
3861 return h;
3862 }
3863
3864 /* Compute a hash value for X, which should be a constant. */
3865
3866 static hashval_t
const_rtx_hash(rtx x)3867 const_rtx_hash (rtx x)
3868 {
3869 hashval_t h = 0;
3870 subrtx_iterator::array_type array;
3871 FOR_EACH_SUBRTX (iter, array, x, ALL)
3872 h = h * 509 + const_rtx_hash_1 (*iter);
3873 return h;
3874 }
3875
3876
3877 /* Create and return a new rtx constant pool. */
3878
3879 static struct rtx_constant_pool *
create_constant_pool(void)3880 create_constant_pool (void)
3881 {
3882 struct rtx_constant_pool *pool;
3883
3884 pool = ggc_alloc<rtx_constant_pool> ();
3885 pool->const_rtx_htab = hash_table<const_rtx_desc_hasher>::create_ggc (31);
3886 pool->first = NULL;
3887 pool->last = NULL;
3888 pool->offset = 0;
3889 return pool;
3890 }
3891
3892 /* Initialize constant pool hashing for a new function. */
3893
3894 void
init_varasm_status(void)3895 init_varasm_status (void)
3896 {
3897 crtl->varasm.pool = create_constant_pool ();
3898 crtl->varasm.deferred_constants = 0;
3899 }
3900
3901 /* Given a MINUS expression, simplify it if both sides
3902 include the same symbol. */
3903
3904 rtx
simplify_subtraction(rtx x)3905 simplify_subtraction (rtx x)
3906 {
3907 rtx r = simplify_rtx (x);
3908 return r ? r : x;
3909 }
3910
3911 /* Given a constant rtx X, make (or find) a memory constant for its value
3912 and return a MEM rtx to refer to it in memory. IN_MODE is the mode
3913 of X. */
3914
3915 rtx
force_const_mem(machine_mode in_mode,rtx x)3916 force_const_mem (machine_mode in_mode, rtx x)
3917 {
3918 class constant_descriptor_rtx *desc, tmp;
3919 struct rtx_constant_pool *pool;
3920 char label[256];
3921 rtx def, symbol;
3922 hashval_t hash;
3923 unsigned int align;
3924 constant_descriptor_rtx **slot;
3925 fixed_size_mode mode;
3926
3927 /* We can't force variable-sized objects to memory. */
3928 if (!is_a <fixed_size_mode> (in_mode, &mode))
3929 return NULL_RTX;
3930
3931 /* If we're not allowed to drop X into the constant pool, don't. */
3932 if (targetm.cannot_force_const_mem (mode, x))
3933 return NULL_RTX;
3934
3935 /* Record that this function has used a constant pool entry. */
3936 crtl->uses_const_pool = 1;
3937
3938 /* Decide which pool to use. */
3939 pool = (targetm.use_blocks_for_constant_p (mode, x)
3940 ? shared_constant_pool
3941 : crtl->varasm.pool);
3942
3943 /* Lookup the value in the hashtable. */
3944 tmp.constant = x;
3945 tmp.mode = mode;
3946 hash = const_rtx_hash (x);
3947 slot = pool->const_rtx_htab->find_slot_with_hash (&tmp, hash, INSERT);
3948 desc = *slot;
3949
3950 /* If the constant was already present, return its memory. */
3951 if (desc)
3952 return copy_rtx (desc->mem);
3953
3954 /* Otherwise, create a new descriptor. */
3955 desc = ggc_alloc<constant_descriptor_rtx> ();
3956 *slot = desc;
3957
3958 /* Align the location counter as required by EXP's data type. */
3959 machine_mode align_mode = (mode == VOIDmode ? word_mode : mode);
3960 align = targetm.static_rtx_alignment (align_mode);
3961
3962 pool->offset += (align / BITS_PER_UNIT) - 1;
3963 pool->offset &= ~ ((align / BITS_PER_UNIT) - 1);
3964
3965 desc->next = NULL;
3966 desc->constant = copy_rtx (tmp.constant);
3967 desc->offset = pool->offset;
3968 desc->hash = hash;
3969 desc->mode = mode;
3970 desc->align = align;
3971 desc->labelno = const_labelno;
3972 desc->mark = 0;
3973
3974 pool->offset += GET_MODE_SIZE (mode);
3975 if (pool->last)
3976 pool->last->next = desc;
3977 else
3978 pool->first = pool->last = desc;
3979 pool->last = desc;
3980
3981 /* Create a string containing the label name, in LABEL. */
3982 ASM_GENERATE_INTERNAL_LABEL (label, "LC", const_labelno);
3983 ++const_labelno;
3984
3985 /* Construct the SYMBOL_REF. Make sure to mark it as belonging to
3986 the constants pool. */
3987 if (use_object_blocks_p () && targetm.use_blocks_for_constant_p (mode, x))
3988 {
3989 section *sect = targetm.asm_out.select_rtx_section (mode, x, align);
3990 symbol = create_block_symbol (ggc_strdup (label),
3991 get_block_for_section (sect), -1);
3992 }
3993 else
3994 symbol = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (label));
3995 desc->sym = symbol;
3996 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_LOCAL;
3997 CONSTANT_POOL_ADDRESS_P (symbol) = 1;
3998 SET_SYMBOL_REF_CONSTANT (symbol, desc);
3999
4000 /* Construct the MEM. */
4001 desc->mem = def = gen_const_mem (mode, symbol);
4002 set_mem_align (def, align);
4003
4004 /* If we're dropping a label to the constant pool, make sure we
4005 don't delete it. */
4006 if (GET_CODE (x) == LABEL_REF)
4007 LABEL_PRESERVE_P (XEXP (x, 0)) = 1;
4008
4009 return copy_rtx (def);
4010 }
4011
4012 /* Given a constant pool SYMBOL_REF, return the corresponding constant. */
4013
4014 rtx
get_pool_constant(const_rtx addr)4015 get_pool_constant (const_rtx addr)
4016 {
4017 return SYMBOL_REF_CONSTANT (addr)->constant;
4018 }
4019
4020 /* Given a constant pool SYMBOL_REF, return the corresponding constant
4021 and whether it has been output or not. */
4022
4023 rtx
get_pool_constant_mark(rtx addr,bool * pmarked)4024 get_pool_constant_mark (rtx addr, bool *pmarked)
4025 {
4026 class constant_descriptor_rtx *desc;
4027
4028 desc = SYMBOL_REF_CONSTANT (addr);
4029 *pmarked = (desc->mark != 0);
4030 return desc->constant;
4031 }
4032
4033 /* Similar, return the mode. */
4034
4035 fixed_size_mode
get_pool_mode(const_rtx addr)4036 get_pool_mode (const_rtx addr)
4037 {
4038 return SYMBOL_REF_CONSTANT (addr)->mode;
4039 }
4040
4041 /* Return TRUE if and only if the constant pool has no entries. Note
4042 that even entries we might end up choosing not to emit are counted
4043 here, so there is the potential for missed optimizations. */
4044
4045 bool
constant_pool_empty_p(void)4046 constant_pool_empty_p (void)
4047 {
4048 return crtl->varasm.pool->first == NULL;
4049 }
4050
4051 /* Worker function for output_constant_pool_1. Emit assembly for X
4052 in MODE with known alignment ALIGN. */
4053
4054 static void
output_constant_pool_2(fixed_size_mode mode,rtx x,unsigned int align)4055 output_constant_pool_2 (fixed_size_mode mode, rtx x, unsigned int align)
4056 {
4057 switch (GET_MODE_CLASS (mode))
4058 {
4059 case MODE_FLOAT:
4060 case MODE_DECIMAL_FLOAT:
4061 {
4062 gcc_assert (CONST_DOUBLE_AS_FLOAT_P (x));
4063 assemble_real (*CONST_DOUBLE_REAL_VALUE (x),
4064 as_a <scalar_float_mode> (mode), align, false);
4065 break;
4066 }
4067
4068 case MODE_INT:
4069 case MODE_PARTIAL_INT:
4070 case MODE_FRACT:
4071 case MODE_UFRACT:
4072 case MODE_ACCUM:
4073 case MODE_UACCUM:
4074 assemble_integer (x, GET_MODE_SIZE (mode), align, 1);
4075 break;
4076
4077 case MODE_VECTOR_BOOL:
4078 {
4079 gcc_assert (GET_CODE (x) == CONST_VECTOR);
4080
4081 /* Pick the smallest integer mode that contains at least one
4082 whole element. Often this is byte_mode and contains more
4083 than one element. */
4084 unsigned int nelts = GET_MODE_NUNITS (mode);
4085 unsigned int elt_bits = GET_MODE_BITSIZE (mode) / nelts;
4086 unsigned int int_bits = MAX (elt_bits, BITS_PER_UNIT);
4087 scalar_int_mode int_mode = int_mode_for_size (int_bits, 0).require ();
4088 unsigned int mask = GET_MODE_MASK (GET_MODE_INNER (mode));
4089
4090 /* Build the constant up one integer at a time. */
4091 unsigned int elts_per_int = int_bits / elt_bits;
4092 for (unsigned int i = 0; i < nelts; i += elts_per_int)
4093 {
4094 unsigned HOST_WIDE_INT value = 0;
4095 unsigned int limit = MIN (nelts - i, elts_per_int);
4096 for (unsigned int j = 0; j < limit; ++j)
4097 {
4098 auto elt = INTVAL (CONST_VECTOR_ELT (x, i + j));
4099 value |= (elt & mask) << (j * elt_bits);
4100 }
4101 output_constant_pool_2 (int_mode, gen_int_mode (value, int_mode),
4102 i != 0 ? MIN (align, int_bits) : align);
4103 }
4104 break;
4105 }
4106 case MODE_VECTOR_FLOAT:
4107 case MODE_VECTOR_INT:
4108 case MODE_VECTOR_FRACT:
4109 case MODE_VECTOR_UFRACT:
4110 case MODE_VECTOR_ACCUM:
4111 case MODE_VECTOR_UACCUM:
4112 {
4113 int i, units;
4114 scalar_mode submode = GET_MODE_INNER (mode);
4115 unsigned int subalign = MIN (align, GET_MODE_BITSIZE (submode));
4116
4117 gcc_assert (GET_CODE (x) == CONST_VECTOR);
4118 units = GET_MODE_NUNITS (mode);
4119
4120 for (i = 0; i < units; i++)
4121 {
4122 rtx elt = CONST_VECTOR_ELT (x, i);
4123 output_constant_pool_2 (submode, elt, i ? subalign : align);
4124 }
4125 }
4126 break;
4127
4128 default:
4129 gcc_unreachable ();
4130 }
4131 }
4132
4133 /* Worker function for output_constant_pool. Emit constant DESC,
4134 giving it ALIGN bits of alignment. */
4135
4136 static void
output_constant_pool_1(class constant_descriptor_rtx * desc,unsigned int align)4137 output_constant_pool_1 (class constant_descriptor_rtx *desc,
4138 unsigned int align)
4139 {
4140 rtx x, tmp;
4141
4142 x = desc->constant;
4143
4144 /* See if X is a LABEL_REF (or a CONST referring to a LABEL_REF)
4145 whose CODE_LABEL has been deleted. This can occur if a jump table
4146 is eliminated by optimization. If so, write a constant of zero
4147 instead. Note that this can also happen by turning the
4148 CODE_LABEL into a NOTE. */
4149 /* ??? This seems completely and utterly wrong. Certainly it's
4150 not true for NOTE_INSN_DELETED_LABEL, but I disbelieve proper
4151 functioning even with rtx_insn::deleted and friends. */
4152
4153 tmp = x;
4154 switch (GET_CODE (tmp))
4155 {
4156 case CONST:
4157 if (GET_CODE (XEXP (tmp, 0)) != PLUS
4158 || GET_CODE (XEXP (XEXP (tmp, 0), 0)) != LABEL_REF)
4159 break;
4160 tmp = XEXP (XEXP (tmp, 0), 0);
4161 /* FALLTHRU */
4162
4163 case LABEL_REF:
4164 {
4165 rtx_insn *insn = label_ref_label (tmp);
4166 gcc_assert (!insn->deleted ());
4167 gcc_assert (!NOTE_P (insn)
4168 || NOTE_KIND (insn) != NOTE_INSN_DELETED);
4169 break;
4170 }
4171
4172 default:
4173 break;
4174 }
4175
4176 #ifdef ASM_OUTPUT_SPECIAL_POOL_ENTRY
4177 ASM_OUTPUT_SPECIAL_POOL_ENTRY (asm_out_file, x, desc->mode,
4178 align, desc->labelno, done);
4179 #endif
4180
4181 assemble_align (align);
4182
4183 /* Output the label. */
4184 targetm.asm_out.internal_label (asm_out_file, "LC", desc->labelno);
4185
4186 /* Output the data.
4187 Pass actual alignment value while emitting string constant to asm code
4188 as function 'output_constant_pool_1' explicitly passes the alignment as 1
4189 assuming that the data is already aligned which prevents the generation
4190 of fix-up table entries. */
4191 output_constant_pool_2 (desc->mode, x, desc->align);
4192
4193 /* Make sure all constants in SECTION_MERGE and not SECTION_STRINGS
4194 sections have proper size. */
4195 if (align > GET_MODE_BITSIZE (desc->mode)
4196 && in_section
4197 && (in_section->common.flags & SECTION_MERGE))
4198 assemble_align (align);
4199
4200 #ifdef ASM_OUTPUT_SPECIAL_POOL_ENTRY
4201 done:
4202 #endif
4203 return;
4204 }
4205
4206 /* Recompute the offsets of entries in POOL, and the overall size of
4207 POOL. Do this after calling mark_constant_pool to ensure that we
4208 are computing the offset values for the pool which we will actually
4209 emit. */
4210
4211 static void
recompute_pool_offsets(struct rtx_constant_pool * pool)4212 recompute_pool_offsets (struct rtx_constant_pool *pool)
4213 {
4214 class constant_descriptor_rtx *desc;
4215 pool->offset = 0;
4216
4217 for (desc = pool->first; desc ; desc = desc->next)
4218 if (desc->mark)
4219 {
4220 /* Recalculate offset. */
4221 unsigned int align = desc->align;
4222 pool->offset += (align / BITS_PER_UNIT) - 1;
4223 pool->offset &= ~ ((align / BITS_PER_UNIT) - 1);
4224 desc->offset = pool->offset;
4225 pool->offset += GET_MODE_SIZE (desc->mode);
4226 }
4227 }
4228
4229 /* Mark all constants that are referenced by SYMBOL_REFs in X.
4230 Emit referenced deferred strings. */
4231
4232 static void
mark_constants_in_pattern(rtx insn)4233 mark_constants_in_pattern (rtx insn)
4234 {
4235 subrtx_iterator::array_type array;
4236 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), ALL)
4237 {
4238 const_rtx x = *iter;
4239 if (GET_CODE (x) == SYMBOL_REF)
4240 {
4241 if (CONSTANT_POOL_ADDRESS_P (x))
4242 {
4243 class constant_descriptor_rtx *desc = SYMBOL_REF_CONSTANT (x);
4244 if (desc->mark == 0)
4245 {
4246 desc->mark = 1;
4247 iter.substitute (desc->constant);
4248 }
4249 }
4250 else if (TREE_CONSTANT_POOL_ADDRESS_P (x))
4251 {
4252 tree decl = SYMBOL_REF_DECL (x);
4253 if (!TREE_ASM_WRITTEN (DECL_INITIAL (decl)))
4254 {
4255 n_deferred_constants--;
4256 output_constant_def_contents (CONST_CAST_RTX (x));
4257 }
4258 }
4259 }
4260 }
4261 }
4262
4263 /* Look through appropriate parts of INSN, marking all entries in the
4264 constant pool which are actually being used. Entries that are only
4265 referenced by other constants are also marked as used. Emit
4266 deferred strings that are used. */
4267
4268 static void
mark_constants(rtx_insn * insn)4269 mark_constants (rtx_insn *insn)
4270 {
4271 if (!INSN_P (insn))
4272 return;
4273
4274 /* Insns may appear inside a SEQUENCE. Only check the patterns of
4275 insns, not any notes that may be attached. We don't want to mark
4276 a constant just because it happens to appear in a REG_EQUIV note. */
4277 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
4278 {
4279 int i, n = seq->len ();
4280 for (i = 0; i < n; ++i)
4281 {
4282 rtx subinsn = seq->element (i);
4283 if (INSN_P (subinsn))
4284 mark_constants_in_pattern (subinsn);
4285 }
4286 }
4287 else
4288 mark_constants_in_pattern (insn);
4289 }
4290
4291 /* Look through the instructions for this function, and mark all the
4292 entries in POOL which are actually being used. Emit deferred constants
4293 which have indeed been used. */
4294
4295 static void
mark_constant_pool(void)4296 mark_constant_pool (void)
4297 {
4298 rtx_insn *insn;
4299
4300 if (!crtl->uses_const_pool && n_deferred_constants == 0)
4301 return;
4302
4303 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4304 mark_constants (insn);
4305 }
4306
4307 /* Write all the constants in POOL. */
4308
4309 static void
output_constant_pool_contents(struct rtx_constant_pool * pool)4310 output_constant_pool_contents (struct rtx_constant_pool *pool)
4311 {
4312 class constant_descriptor_rtx *desc;
4313
4314 for (desc = pool->first; desc ; desc = desc->next)
4315 if (desc->mark < 0)
4316 {
4317 #ifdef ASM_OUTPUT_DEF
4318 const char *name = XSTR (desc->sym, 0);
4319 char label[256];
4320 char buffer[256 + 32];
4321 const char *p;
4322
4323 ASM_GENERATE_INTERNAL_LABEL (label, "LC", ~desc->mark);
4324 p = label;
4325 if (desc->offset)
4326 {
4327 sprintf (buffer, "%s+%ld", p, (long) (desc->offset));
4328 p = buffer;
4329 }
4330 ASM_OUTPUT_DEF (asm_out_file, name, p);
4331 #else
4332 gcc_unreachable ();
4333 #endif
4334 }
4335 else if (desc->mark)
4336 {
4337 /* If the constant is part of an object_block, make sure that
4338 the constant has been positioned within its block, but do not
4339 write out its definition yet. output_object_blocks will do
4340 that later. */
4341 if (SYMBOL_REF_HAS_BLOCK_INFO_P (desc->sym)
4342 && SYMBOL_REF_BLOCK (desc->sym))
4343 place_block_symbol (desc->sym);
4344 else
4345 {
4346 switch_to_section (targetm.asm_out.select_rtx_section
4347 (desc->mode, desc->constant, desc->align));
4348 output_constant_pool_1 (desc, desc->align);
4349 }
4350 }
4351 }
4352
4353 struct constant_descriptor_rtx_data {
4354 constant_descriptor_rtx *desc;
4355 target_unit *bytes;
4356 unsigned short size;
4357 unsigned short offset;
4358 unsigned int hash;
4359 };
4360
4361 /* qsort callback to sort constant_descriptor_rtx_data * vector by
4362 decreasing size. */
4363
4364 static int
constant_descriptor_rtx_data_cmp(const void * p1,const void * p2)4365 constant_descriptor_rtx_data_cmp (const void *p1, const void *p2)
4366 {
4367 constant_descriptor_rtx_data *const data1
4368 = *(constant_descriptor_rtx_data * const *) p1;
4369 constant_descriptor_rtx_data *const data2
4370 = *(constant_descriptor_rtx_data * const *) p2;
4371 if (data1->size > data2->size)
4372 return -1;
4373 if (data1->size < data2->size)
4374 return 1;
4375 if (data1->hash < data2->hash)
4376 return -1;
4377 gcc_assert (data1->hash > data2->hash);
4378 return 1;
4379 }
4380
4381 struct const_rtx_data_hasher : nofree_ptr_hash<constant_descriptor_rtx_data>
4382 {
4383 static hashval_t hash (constant_descriptor_rtx_data *);
4384 static bool equal (constant_descriptor_rtx_data *,
4385 constant_descriptor_rtx_data *);
4386 };
4387
4388 /* Hash and compare functions for const_rtx_data_htab. */
4389
4390 hashval_t
hash(constant_descriptor_rtx_data * data)4391 const_rtx_data_hasher::hash (constant_descriptor_rtx_data *data)
4392 {
4393 return data->hash;
4394 }
4395
4396 bool
equal(constant_descriptor_rtx_data * x,constant_descriptor_rtx_data * y)4397 const_rtx_data_hasher::equal (constant_descriptor_rtx_data *x,
4398 constant_descriptor_rtx_data *y)
4399 {
4400 if (x->hash != y->hash || x->size != y->size)
4401 return 0;
4402 unsigned int align1 = x->desc->align;
4403 unsigned int align2 = y->desc->align;
4404 unsigned int offset1 = (x->offset * BITS_PER_UNIT) & (align1 - 1);
4405 unsigned int offset2 = (y->offset * BITS_PER_UNIT) & (align2 - 1);
4406 if (offset1)
4407 align1 = least_bit_hwi (offset1);
4408 if (offset2)
4409 align2 = least_bit_hwi (offset2);
4410 if (align2 > align1)
4411 return 0;
4412 if (memcmp (x->bytes, y->bytes, x->size * sizeof (target_unit)) != 0)
4413 return 0;
4414 return 1;
4415 }
4416
4417 /* Attempt to optimize constant pool POOL. If it contains both CONST_VECTOR
4418 constants and scalar constants with the values of CONST_VECTOR elements,
4419 try to alias the scalar constants with the CONST_VECTOR elements. */
4420
4421 static void
optimize_constant_pool(struct rtx_constant_pool * pool)4422 optimize_constant_pool (struct rtx_constant_pool *pool)
4423 {
4424 auto_vec<target_unit, 128> buffer;
4425 auto_vec<constant_descriptor_rtx_data *, 128> vec;
4426 object_allocator<constant_descriptor_rtx_data>
4427 data_pool ("constant_descriptor_rtx_data_pool");
4428 int idx = 0;
4429 size_t size = 0;
4430 for (constant_descriptor_rtx *desc = pool->first; desc; desc = desc->next)
4431 if (desc->mark > 0
4432 && ! (SYMBOL_REF_HAS_BLOCK_INFO_P (desc->sym)
4433 && SYMBOL_REF_BLOCK (desc->sym)))
4434 {
4435 buffer.truncate (0);
4436 buffer.reserve (GET_MODE_SIZE (desc->mode));
4437 if (native_encode_rtx (desc->mode, desc->constant, buffer, 0,
4438 GET_MODE_SIZE (desc->mode)))
4439 {
4440 constant_descriptor_rtx_data *data = data_pool.allocate ();
4441 data->desc = desc;
4442 data->bytes = NULL;
4443 data->size = GET_MODE_SIZE (desc->mode);
4444 data->offset = 0;
4445 data->hash = idx++;
4446 size += data->size;
4447 vec.safe_push (data);
4448 }
4449 }
4450 if (idx)
4451 {
4452 vec.qsort (constant_descriptor_rtx_data_cmp);
4453 unsigned min_size = vec.last ()->size;
4454 target_unit *bytes = XNEWVEC (target_unit, size);
4455 unsigned int i;
4456 constant_descriptor_rtx_data *data;
4457 hash_table<const_rtx_data_hasher> * htab
4458 = new hash_table<const_rtx_data_hasher> (31);
4459 size = 0;
4460 FOR_EACH_VEC_ELT (vec, i, data)
4461 {
4462 buffer.truncate (0);
4463 native_encode_rtx (data->desc->mode, data->desc->constant,
4464 buffer, 0, data->size);
4465 memcpy (bytes + size, buffer.address (), data->size);
4466 data->bytes = bytes + size;
4467 data->hash = iterative_hash (data->bytes,
4468 data->size * sizeof (target_unit), 0);
4469 size += data->size;
4470 constant_descriptor_rtx_data **slot
4471 = htab->find_slot_with_hash (data, data->hash, INSERT);
4472 if (*slot)
4473 {
4474 data->desc->mark = ~(*slot)->desc->labelno;
4475 data->desc->offset = (*slot)->offset;
4476 }
4477 else
4478 {
4479 unsigned int sz = 1 << floor_log2 (data->size);
4480
4481 *slot = data;
4482 for (sz >>= 1; sz >= min_size; sz >>= 1)
4483 for (unsigned off = 0; off + sz <= data->size; off += sz)
4484 {
4485 constant_descriptor_rtx_data tmp;
4486 tmp.desc = data->desc;
4487 tmp.bytes = data->bytes + off;
4488 tmp.size = sz;
4489 tmp.offset = off;
4490 tmp.hash = iterative_hash (tmp.bytes,
4491 sz * sizeof (target_unit), 0);
4492 slot = htab->find_slot_with_hash (&tmp, tmp.hash, INSERT);
4493 if (*slot == NULL)
4494 {
4495 *slot = data_pool.allocate ();
4496 **slot = tmp;
4497 }
4498 }
4499 }
4500 }
4501 delete htab;
4502 XDELETE (bytes);
4503 }
4504 data_pool.release ();
4505 }
4506
4507 /* Mark all constants that are used in the current function, then write
4508 out the function's private constant pool. */
4509
4510 static void
output_constant_pool(const char * fnname ATTRIBUTE_UNUSED,tree fndecl ATTRIBUTE_UNUSED)4511 output_constant_pool (const char *fnname ATTRIBUTE_UNUSED,
4512 tree fndecl ATTRIBUTE_UNUSED)
4513 {
4514 struct rtx_constant_pool *pool = crtl->varasm.pool;
4515
4516 /* It is possible for gcc to call force_const_mem and then to later
4517 discard the instructions which refer to the constant. In such a
4518 case we do not need to output the constant. */
4519 mark_constant_pool ();
4520
4521 /* Having marked the constant pool entries we'll actually emit, we
4522 now need to rebuild the offset information, which may have become
4523 stale. */
4524 recompute_pool_offsets (pool);
4525
4526 #ifdef ASM_OUTPUT_POOL_PROLOGUE
4527 ASM_OUTPUT_POOL_PROLOGUE (asm_out_file, fnname, fndecl, pool->offset);
4528 #endif
4529
4530 output_constant_pool_contents (pool);
4531
4532 #ifdef ASM_OUTPUT_POOL_EPILOGUE
4533 ASM_OUTPUT_POOL_EPILOGUE (asm_out_file, fnname, fndecl, pool->offset);
4534 #endif
4535 }
4536
4537 /* Write the contents of the shared constant pool. */
4538
4539 void
output_shared_constant_pool(void)4540 output_shared_constant_pool (void)
4541 {
4542 if (optimize
4543 && TARGET_SUPPORTS_ALIASES)
4544 optimize_constant_pool (shared_constant_pool);
4545
4546 output_constant_pool_contents (shared_constant_pool);
4547 }
4548
4549 /* Determine what kind of relocations EXP may need. */
4550
4551 int
compute_reloc_for_constant(tree exp)4552 compute_reloc_for_constant (tree exp)
4553 {
4554 int reloc = 0, reloc2;
4555 tree tem;
4556
4557 switch (TREE_CODE (exp))
4558 {
4559 case ADDR_EXPR:
4560 case FDESC_EXPR:
4561 /* Go inside any operations that get_inner_reference can handle and see
4562 if what's inside is a constant: no need to do anything here for
4563 addresses of variables or functions. */
4564 for (tem = TREE_OPERAND (exp, 0); handled_component_p (tem);
4565 tem = TREE_OPERAND (tem, 0))
4566 ;
4567
4568 if (TREE_CODE (tem) == MEM_REF
4569 && TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR)
4570 {
4571 reloc = compute_reloc_for_constant (TREE_OPERAND (tem, 0));
4572 break;
4573 }
4574
4575 if (!targetm.binds_local_p (tem))
4576 reloc |= 2;
4577 else
4578 reloc |= 1;
4579 break;
4580
4581 case PLUS_EXPR:
4582 case POINTER_PLUS_EXPR:
4583 reloc = compute_reloc_for_constant (TREE_OPERAND (exp, 0));
4584 reloc |= compute_reloc_for_constant (TREE_OPERAND (exp, 1));
4585 break;
4586
4587 case MINUS_EXPR:
4588 reloc = compute_reloc_for_constant (TREE_OPERAND (exp, 0));
4589 reloc2 = compute_reloc_for_constant (TREE_OPERAND (exp, 1));
4590 /* The difference of two local labels is computable at link time. */
4591 if (reloc == 1 && reloc2 == 1)
4592 reloc = 0;
4593 else
4594 reloc |= reloc2;
4595 break;
4596
4597 CASE_CONVERT:
4598 case VIEW_CONVERT_EXPR:
4599 reloc = compute_reloc_for_constant (TREE_OPERAND (exp, 0));
4600 break;
4601
4602 case CONSTRUCTOR:
4603 {
4604 unsigned HOST_WIDE_INT idx;
4605 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, tem)
4606 if (tem != 0)
4607 reloc |= compute_reloc_for_constant (tem);
4608 }
4609 break;
4610
4611 default:
4612 break;
4613 }
4614 return reloc;
4615 }
4616
4617 /* Find all the constants whose addresses are referenced inside of EXP,
4618 and make sure assembler code with a label has been output for each one.
4619 Indicate whether an ADDR_EXPR has been encountered. */
4620
4621 static void
output_addressed_constants(tree exp,int defer)4622 output_addressed_constants (tree exp, int defer)
4623 {
4624 tree tem;
4625
4626 switch (TREE_CODE (exp))
4627 {
4628 case ADDR_EXPR:
4629 case FDESC_EXPR:
4630 /* Go inside any operations that get_inner_reference can handle and see
4631 if what's inside is a constant: no need to do anything here for
4632 addresses of variables or functions. */
4633 for (tem = TREE_OPERAND (exp, 0); handled_component_p (tem);
4634 tem = TREE_OPERAND (tem, 0))
4635 ;
4636
4637 /* If we have an initialized CONST_DECL, retrieve the initializer. */
4638 if (TREE_CODE (tem) == CONST_DECL && DECL_INITIAL (tem))
4639 tem = DECL_INITIAL (tem);
4640
4641 if (CONSTANT_CLASS_P (tem) || TREE_CODE (tem) == CONSTRUCTOR)
4642 output_constant_def (tem, defer);
4643
4644 if (TREE_CODE (tem) == MEM_REF)
4645 output_addressed_constants (TREE_OPERAND (tem, 0), defer);
4646 break;
4647
4648 case PLUS_EXPR:
4649 case POINTER_PLUS_EXPR:
4650 case MINUS_EXPR:
4651 output_addressed_constants (TREE_OPERAND (exp, 1), defer);
4652 gcc_fallthrough ();
4653
4654 CASE_CONVERT:
4655 case VIEW_CONVERT_EXPR:
4656 output_addressed_constants (TREE_OPERAND (exp, 0), defer);
4657 break;
4658
4659 case CONSTRUCTOR:
4660 {
4661 unsigned HOST_WIDE_INT idx;
4662 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, tem)
4663 if (tem != 0)
4664 output_addressed_constants (tem, defer);
4665 }
4666 break;
4667
4668 default:
4669 break;
4670 }
4671 }
4672
4673 /* Whether a constructor CTOR is a valid static constant initializer if all
4674 its elements are. This used to be internal to initializer_constant_valid_p
4675 and has been exposed to let other functions like categorize_ctor_elements
4676 evaluate the property while walking a constructor for other purposes. */
4677
4678 bool
constructor_static_from_elts_p(const_tree ctor)4679 constructor_static_from_elts_p (const_tree ctor)
4680 {
4681 return (TREE_CONSTANT (ctor)
4682 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4683 || TREE_CODE (TREE_TYPE (ctor)) == RECORD_TYPE
4684 || TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE));
4685 }
4686
4687 static tree initializer_constant_valid_p_1 (tree value, tree endtype,
4688 tree *cache);
4689
4690 /* A subroutine of initializer_constant_valid_p. VALUE is a MINUS_EXPR,
4691 PLUS_EXPR or POINTER_PLUS_EXPR. This looks for cases of VALUE
4692 which are valid when ENDTYPE is an integer of any size; in
4693 particular, this does not accept a pointer minus a constant. This
4694 returns null_pointer_node if the VALUE is an absolute constant
4695 which can be used to initialize a static variable. Otherwise it
4696 returns NULL. */
4697
4698 static tree
narrowing_initializer_constant_valid_p(tree value,tree endtype,tree * cache)4699 narrowing_initializer_constant_valid_p (tree value, tree endtype, tree *cache)
4700 {
4701 tree op0, op1;
4702
4703 if (!INTEGRAL_TYPE_P (endtype))
4704 return NULL_TREE;
4705
4706 op0 = TREE_OPERAND (value, 0);
4707 op1 = TREE_OPERAND (value, 1);
4708
4709 /* Like STRIP_NOPS except allow the operand mode to widen. This
4710 works around a feature of fold that simplifies (int)(p1 - p2) to
4711 ((int)p1 - (int)p2) under the theory that the narrower operation
4712 is cheaper. */
4713
4714 while (CONVERT_EXPR_P (op0)
4715 || TREE_CODE (op0) == NON_LVALUE_EXPR)
4716 {
4717 tree inner = TREE_OPERAND (op0, 0);
4718 if (inner == error_mark_node
4719 || ! INTEGRAL_TYPE_P (TREE_TYPE (op0))
4720 || ! SCALAR_INT_MODE_P (TYPE_MODE (TREE_TYPE (op0)))
4721 || ! INTEGRAL_TYPE_P (TREE_TYPE (inner))
4722 || ! SCALAR_INT_MODE_P (TYPE_MODE (TREE_TYPE (inner)))
4723 || (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (op0)))
4724 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (inner)))))
4725 break;
4726 op0 = inner;
4727 }
4728
4729 while (CONVERT_EXPR_P (op1)
4730 || TREE_CODE (op1) == NON_LVALUE_EXPR)
4731 {
4732 tree inner = TREE_OPERAND (op1, 0);
4733 if (inner == error_mark_node
4734 || ! INTEGRAL_TYPE_P (TREE_TYPE (op1))
4735 || ! SCALAR_INT_MODE_P (TYPE_MODE (TREE_TYPE (op1)))
4736 || ! INTEGRAL_TYPE_P (TREE_TYPE (inner))
4737 || ! SCALAR_INT_MODE_P (TYPE_MODE (TREE_TYPE (inner)))
4738 || (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (op1)))
4739 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (inner)))))
4740 break;
4741 op1 = inner;
4742 }
4743
4744 op0 = initializer_constant_valid_p_1 (op0, endtype, cache);
4745 if (!op0)
4746 return NULL_TREE;
4747
4748 op1 = initializer_constant_valid_p_1 (op1, endtype,
4749 cache ? cache + 2 : NULL);
4750 /* Both initializers must be known. */
4751 if (op1)
4752 {
4753 if (op0 == op1
4754 && (op0 == null_pointer_node
4755 || TREE_CODE (value) == MINUS_EXPR))
4756 return null_pointer_node;
4757
4758 /* Support differences between labels. */
4759 if (TREE_CODE (op0) == LABEL_DECL
4760 && TREE_CODE (op1) == LABEL_DECL)
4761 return null_pointer_node;
4762
4763 if (TREE_CODE (op0) == STRING_CST
4764 && TREE_CODE (op1) == STRING_CST
4765 && operand_equal_p (op0, op1, 1))
4766 return null_pointer_node;
4767 }
4768
4769 return NULL_TREE;
4770 }
4771
4772 /* Helper function of initializer_constant_valid_p.
4773 Return nonzero if VALUE is a valid constant-valued expression
4774 for use in initializing a static variable; one that can be an
4775 element of a "constant" initializer.
4776
4777 Return null_pointer_node if the value is absolute;
4778 if it is relocatable, return the variable that determines the relocation.
4779 We assume that VALUE has been folded as much as possible;
4780 therefore, we do not need to check for such things as
4781 arithmetic-combinations of integers.
4782
4783 Use CACHE (pointer to 2 tree values) for caching if non-NULL. */
4784
4785 static tree
initializer_constant_valid_p_1(tree value,tree endtype,tree * cache)4786 initializer_constant_valid_p_1 (tree value, tree endtype, tree *cache)
4787 {
4788 tree ret;
4789
4790 switch (TREE_CODE (value))
4791 {
4792 case CONSTRUCTOR:
4793 if (constructor_static_from_elts_p (value))
4794 {
4795 unsigned HOST_WIDE_INT idx;
4796 tree elt;
4797 bool absolute = true;
4798
4799 if (cache && cache[0] == value)
4800 return cache[1];
4801 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (value), idx, elt)
4802 {
4803 tree reloc;
4804 reloc = initializer_constant_valid_p_1 (elt, TREE_TYPE (elt),
4805 NULL);
4806 if (!reloc
4807 /* An absolute value is required with reverse SSO. */
4808 || (reloc != null_pointer_node
4809 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (value))
4810 && !AGGREGATE_TYPE_P (TREE_TYPE (elt))))
4811 {
4812 if (cache)
4813 {
4814 cache[0] = value;
4815 cache[1] = NULL_TREE;
4816 }
4817 return NULL_TREE;
4818 }
4819 if (reloc != null_pointer_node)
4820 absolute = false;
4821 }
4822 /* For a non-absolute relocation, there is no single
4823 variable that can be "the variable that determines the
4824 relocation." */
4825 if (cache)
4826 {
4827 cache[0] = value;
4828 cache[1] = absolute ? null_pointer_node : error_mark_node;
4829 }
4830 return absolute ? null_pointer_node : error_mark_node;
4831 }
4832
4833 return TREE_STATIC (value) ? null_pointer_node : NULL_TREE;
4834
4835 case INTEGER_CST:
4836 case VECTOR_CST:
4837 case REAL_CST:
4838 case FIXED_CST:
4839 case STRING_CST:
4840 case COMPLEX_CST:
4841 return null_pointer_node;
4842
4843 case ADDR_EXPR:
4844 case FDESC_EXPR:
4845 {
4846 tree op0 = staticp (TREE_OPERAND (value, 0));
4847 if (op0)
4848 {
4849 /* "&(*a).f" is like unto pointer arithmetic. If "a" turns out
4850 to be a constant, this is old-skool offsetof-like nonsense. */
4851 if (TREE_CODE (op0) == INDIRECT_REF
4852 && TREE_CONSTANT (TREE_OPERAND (op0, 0)))
4853 return null_pointer_node;
4854 /* Taking the address of a nested function involves a trampoline,
4855 unless we don't need or want one. */
4856 if (TREE_CODE (op0) == FUNCTION_DECL
4857 && DECL_STATIC_CHAIN (op0)
4858 && !TREE_NO_TRAMPOLINE (value))
4859 return NULL_TREE;
4860 /* "&{...}" requires a temporary to hold the constructed
4861 object. */
4862 if (TREE_CODE (op0) == CONSTRUCTOR)
4863 return NULL_TREE;
4864 }
4865 return op0;
4866 }
4867
4868 case NON_LVALUE_EXPR:
4869 return initializer_constant_valid_p_1 (TREE_OPERAND (value, 0),
4870 endtype, cache);
4871
4872 case VIEW_CONVERT_EXPR:
4873 {
4874 tree src = TREE_OPERAND (value, 0);
4875 tree src_type = TREE_TYPE (src);
4876 tree dest_type = TREE_TYPE (value);
4877
4878 /* Allow view-conversions from aggregate to non-aggregate type only
4879 if the bit pattern is fully preserved afterwards; otherwise, the
4880 RTL expander won't be able to apply a subsequent transformation
4881 to the underlying constructor. */
4882 if (AGGREGATE_TYPE_P (src_type) && !AGGREGATE_TYPE_P (dest_type))
4883 {
4884 if (TYPE_MODE (endtype) == TYPE_MODE (dest_type))
4885 return initializer_constant_valid_p_1 (src, endtype, cache);
4886 else
4887 return NULL_TREE;
4888 }
4889
4890 /* Allow all other kinds of view-conversion. */
4891 return initializer_constant_valid_p_1 (src, endtype, cache);
4892 }
4893
4894 CASE_CONVERT:
4895 {
4896 tree src = TREE_OPERAND (value, 0);
4897 tree src_type = TREE_TYPE (src);
4898 tree dest_type = TREE_TYPE (value);
4899
4900 /* Allow conversions between pointer types and offset types. */
4901 if ((POINTER_TYPE_P (dest_type) && POINTER_TYPE_P (src_type))
4902 || (TREE_CODE (dest_type) == OFFSET_TYPE
4903 && TREE_CODE (src_type) == OFFSET_TYPE))
4904 return initializer_constant_valid_p_1 (src, endtype, cache);
4905
4906 /* Allow length-preserving conversions between integer types and
4907 floating-point types. */
4908 if (((INTEGRAL_TYPE_P (dest_type) && INTEGRAL_TYPE_P (src_type))
4909 || (FLOAT_TYPE_P (dest_type) && FLOAT_TYPE_P (src_type)))
4910 && (TYPE_PRECISION (dest_type) == TYPE_PRECISION (src_type)))
4911 return initializer_constant_valid_p_1 (src, endtype, cache);
4912
4913 /* Allow conversions between other integer types only if
4914 explicit value. Don't allow sign-extension to a type larger
4915 than word and pointer, there aren't relocations that would
4916 allow to sign extend it to a wider type. */
4917 if (INTEGRAL_TYPE_P (dest_type)
4918 && INTEGRAL_TYPE_P (src_type)
4919 && (TYPE_UNSIGNED (src_type)
4920 || TYPE_PRECISION (dest_type) <= TYPE_PRECISION (src_type)
4921 || TYPE_PRECISION (dest_type) <= BITS_PER_WORD
4922 || TYPE_PRECISION (dest_type) <= POINTER_SIZE))
4923 {
4924 tree inner = initializer_constant_valid_p_1 (src, endtype, cache);
4925 if (inner == null_pointer_node)
4926 return null_pointer_node;
4927 break;
4928 }
4929
4930 /* Allow (int) &foo provided int is as wide as a pointer. */
4931 if (INTEGRAL_TYPE_P (dest_type) && POINTER_TYPE_P (src_type)
4932 && (TYPE_PRECISION (dest_type) >= TYPE_PRECISION (src_type)))
4933 return initializer_constant_valid_p_1 (src, endtype, cache);
4934
4935 /* Likewise conversions from int to pointers, but also allow
4936 conversions from 0. */
4937 if ((POINTER_TYPE_P (dest_type)
4938 || TREE_CODE (dest_type) == OFFSET_TYPE)
4939 && INTEGRAL_TYPE_P (src_type))
4940 {
4941 if (TREE_CODE (src) == INTEGER_CST
4942 && TYPE_PRECISION (dest_type) >= TYPE_PRECISION (src_type))
4943 return null_pointer_node;
4944 if (integer_zerop (src))
4945 return null_pointer_node;
4946 else if (TYPE_PRECISION (dest_type) <= TYPE_PRECISION (src_type))
4947 return initializer_constant_valid_p_1 (src, endtype, cache);
4948 }
4949
4950 /* Allow conversions to struct or union types if the value
4951 inside is okay. */
4952 if (TREE_CODE (dest_type) == RECORD_TYPE
4953 || TREE_CODE (dest_type) == UNION_TYPE)
4954 return initializer_constant_valid_p_1 (src, endtype, cache);
4955 }
4956 break;
4957
4958 case POINTER_PLUS_EXPR:
4959 case PLUS_EXPR:
4960 /* Any valid floating-point constants will have been folded by now;
4961 with -frounding-math we hit this with addition of two constants. */
4962 if (TREE_CODE (endtype) == REAL_TYPE)
4963 return NULL_TREE;
4964 if (cache && cache[0] == value)
4965 return cache[1];
4966 if (! INTEGRAL_TYPE_P (endtype)
4967 || TYPE_PRECISION (endtype) >= TYPE_PRECISION (TREE_TYPE (value)))
4968 {
4969 tree ncache[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
4970 tree valid0
4971 = initializer_constant_valid_p_1 (TREE_OPERAND (value, 0),
4972 endtype, ncache);
4973 tree valid1
4974 = initializer_constant_valid_p_1 (TREE_OPERAND (value, 1),
4975 endtype, ncache + 2);
4976 /* If either term is absolute, use the other term's relocation. */
4977 if (valid0 == null_pointer_node)
4978 ret = valid1;
4979 else if (valid1 == null_pointer_node)
4980 ret = valid0;
4981 /* Support narrowing pointer differences. */
4982 else
4983 ret = narrowing_initializer_constant_valid_p (value, endtype,
4984 ncache);
4985 }
4986 else
4987 /* Support narrowing pointer differences. */
4988 ret = narrowing_initializer_constant_valid_p (value, endtype, NULL);
4989 if (cache)
4990 {
4991 cache[0] = value;
4992 cache[1] = ret;
4993 }
4994 return ret;
4995
4996 case POINTER_DIFF_EXPR:
4997 case MINUS_EXPR:
4998 if (TREE_CODE (endtype) == REAL_TYPE)
4999 return NULL_TREE;
5000 if (cache && cache[0] == value)
5001 return cache[1];
5002 if (! INTEGRAL_TYPE_P (endtype)
5003 || TYPE_PRECISION (endtype) >= TYPE_PRECISION (TREE_TYPE (value)))
5004 {
5005 tree ncache[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
5006 tree valid0
5007 = initializer_constant_valid_p_1 (TREE_OPERAND (value, 0),
5008 endtype, ncache);
5009 tree valid1
5010 = initializer_constant_valid_p_1 (TREE_OPERAND (value, 1),
5011 endtype, ncache + 2);
5012 /* Win if second argument is absolute. */
5013 if (valid1 == null_pointer_node)
5014 ret = valid0;
5015 /* Win if both arguments have the same relocation.
5016 Then the value is absolute. */
5017 else if (valid0 == valid1 && valid0 != 0)
5018 ret = null_pointer_node;
5019 /* Since GCC guarantees that string constants are unique in the
5020 generated code, a subtraction between two copies of the same
5021 constant string is absolute. */
5022 else if (valid0 && TREE_CODE (valid0) == STRING_CST
5023 && valid1 && TREE_CODE (valid1) == STRING_CST
5024 && operand_equal_p (valid0, valid1, 1))
5025 ret = null_pointer_node;
5026 /* Support narrowing differences. */
5027 else
5028 ret = narrowing_initializer_constant_valid_p (value, endtype,
5029 ncache);
5030 }
5031 else
5032 /* Support narrowing differences. */
5033 ret = narrowing_initializer_constant_valid_p (value, endtype, NULL);
5034 if (cache)
5035 {
5036 cache[0] = value;
5037 cache[1] = ret;
5038 }
5039 return ret;
5040
5041 default:
5042 break;
5043 }
5044
5045 return NULL_TREE;
5046 }
5047
5048 /* Return nonzero if VALUE is a valid constant-valued expression
5049 for use in initializing a static variable; one that can be an
5050 element of a "constant" initializer.
5051
5052 Return null_pointer_node if the value is absolute;
5053 if it is relocatable, return the variable that determines the relocation.
5054 We assume that VALUE has been folded as much as possible;
5055 therefore, we do not need to check for such things as
5056 arithmetic-combinations of integers. */
5057 tree
initializer_constant_valid_p(tree value,tree endtype,bool reverse)5058 initializer_constant_valid_p (tree value, tree endtype, bool reverse)
5059 {
5060 tree reloc = initializer_constant_valid_p_1 (value, endtype, NULL);
5061
5062 /* An absolute value is required with reverse storage order. */
5063 if (reloc
5064 && reloc != null_pointer_node
5065 && reverse
5066 && !AGGREGATE_TYPE_P (endtype)
5067 && !VECTOR_TYPE_P (endtype))
5068 reloc = NULL_TREE;
5069
5070 return reloc;
5071 }
5072
5073 /* Return true if VALUE is a valid constant-valued expression
5074 for use in initializing a static bit-field; one that can be
5075 an element of a "constant" initializer. */
5076
5077 bool
initializer_constant_valid_for_bitfield_p(tree value)5078 initializer_constant_valid_for_bitfield_p (tree value)
5079 {
5080 /* For bitfields we support integer constants or possibly nested aggregates
5081 of such. */
5082 switch (TREE_CODE (value))
5083 {
5084 case CONSTRUCTOR:
5085 {
5086 unsigned HOST_WIDE_INT idx;
5087 tree elt;
5088
5089 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (value), idx, elt)
5090 if (!initializer_constant_valid_for_bitfield_p (elt))
5091 return false;
5092 return true;
5093 }
5094
5095 case INTEGER_CST:
5096 case REAL_CST:
5097 return true;
5098
5099 case VIEW_CONVERT_EXPR:
5100 case NON_LVALUE_EXPR:
5101 return
5102 initializer_constant_valid_for_bitfield_p (TREE_OPERAND (value, 0));
5103
5104 default:
5105 break;
5106 }
5107
5108 return false;
5109 }
5110
5111 /* Check if a STRING_CST fits into the field.
5112 Tolerate only the case when the NUL termination
5113 does not fit into the field. */
5114
5115 static bool
check_string_literal(tree string,unsigned HOST_WIDE_INT size)5116 check_string_literal (tree string, unsigned HOST_WIDE_INT size)
5117 {
5118 tree type = TREE_TYPE (string);
5119 tree eltype = TREE_TYPE (type);
5120 unsigned HOST_WIDE_INT elts = tree_to_uhwi (TYPE_SIZE_UNIT (eltype));
5121 unsigned HOST_WIDE_INT mem_size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
5122 int len = TREE_STRING_LENGTH (string);
5123
5124 if (elts != 1 && elts != 2 && elts != 4)
5125 return false;
5126 if (len < 0 || len % elts != 0)
5127 return false;
5128 if (size < (unsigned)len)
5129 return false;
5130 if (mem_size != size)
5131 return false;
5132 return true;
5133 }
5134
5135 /* output_constructor outer state of relevance in recursive calls, typically
5136 for nested aggregate bitfields. */
5137
5138 struct oc_outer_state {
5139 unsigned int bit_offset; /* current position in ... */
5140 int byte; /* ... the outer byte buffer. */
5141 };
5142
5143 static unsigned HOST_WIDE_INT
5144 output_constructor (tree, unsigned HOST_WIDE_INT, unsigned int, bool,
5145 oc_outer_state *);
5146
5147 /* Output assembler code for constant EXP, with no label.
5148 This includes the pseudo-op such as ".int" or ".byte", and a newline.
5149 Assumes output_addressed_constants has been done on EXP already.
5150
5151 Generate at least SIZE bytes of assembler data, padding at the end
5152 with zeros if necessary. SIZE must always be specified. The returned
5153 value is the actual number of bytes of assembler data generated, which
5154 may be bigger than SIZE if the object contains a variable length field.
5155
5156 SIZE is important for structure constructors,
5157 since trailing members may have been omitted from the constructor.
5158 It is also important for initialization of arrays from string constants
5159 since the full length of the string constant might not be wanted.
5160 It is also needed for initialization of unions, where the initializer's
5161 type is just one member, and that may not be as long as the union.
5162
5163 There a case in which we would fail to output exactly SIZE bytes:
5164 for a structure constructor that wants to produce more than SIZE bytes.
5165 But such constructors will never be generated for any possible input.
5166
5167 ALIGN is the alignment of the data in bits.
5168
5169 If REVERSE is true, EXP is output in reverse storage order. */
5170
5171 static unsigned HOST_WIDE_INT
output_constant(tree exp,unsigned HOST_WIDE_INT size,unsigned int align,bool reverse,bool merge_strings)5172 output_constant (tree exp, unsigned HOST_WIDE_INT size, unsigned int align,
5173 bool reverse, bool merge_strings)
5174 {
5175 enum tree_code code;
5176 unsigned HOST_WIDE_INT thissize;
5177 rtx cst;
5178
5179 if (size == 0 || flag_syntax_only)
5180 return size;
5181
5182 /* See if we're trying to initialize a pointer in a non-default mode
5183 to the address of some declaration somewhere. If the target says
5184 the mode is valid for pointers, assume the target has a way of
5185 resolving it. */
5186 if (TREE_CODE (exp) == NOP_EXPR
5187 && POINTER_TYPE_P (TREE_TYPE (exp))
5188 && targetm.addr_space.valid_pointer_mode
5189 (SCALAR_INT_TYPE_MODE (TREE_TYPE (exp)),
5190 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)))))
5191 {
5192 tree saved_type = TREE_TYPE (exp);
5193
5194 /* Peel off any intermediate conversions-to-pointer for valid
5195 pointer modes. */
5196 while (TREE_CODE (exp) == NOP_EXPR
5197 && POINTER_TYPE_P (TREE_TYPE (exp))
5198 && targetm.addr_space.valid_pointer_mode
5199 (SCALAR_INT_TYPE_MODE (TREE_TYPE (exp)),
5200 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)))))
5201 exp = TREE_OPERAND (exp, 0);
5202
5203 /* If what we're left with is the address of something, we can
5204 convert the address to the final type and output it that
5205 way. */
5206 if (TREE_CODE (exp) == ADDR_EXPR)
5207 exp = build1 (ADDR_EXPR, saved_type, TREE_OPERAND (exp, 0));
5208 /* Likewise for constant ints. */
5209 else if (TREE_CODE (exp) == INTEGER_CST)
5210 exp = fold_convert (saved_type, exp);
5211
5212 }
5213
5214 /* Eliminate any conversions since we'll be outputting the underlying
5215 constant. */
5216 while (CONVERT_EXPR_P (exp)
5217 || TREE_CODE (exp) == NON_LVALUE_EXPR
5218 || TREE_CODE (exp) == VIEW_CONVERT_EXPR)
5219 {
5220 HOST_WIDE_INT type_size = int_size_in_bytes (TREE_TYPE (exp));
5221 HOST_WIDE_INT op_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0)));
5222
5223 /* Make sure eliminating the conversion is really a no-op, except with
5224 VIEW_CONVERT_EXPRs to allow for wild Ada unchecked conversions and
5225 union types to allow for Ada unchecked unions. */
5226 if (type_size > op_size
5227 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5228 && TREE_CODE (TREE_TYPE (exp)) != UNION_TYPE)
5229 /* Keep the conversion. */
5230 break;
5231 else
5232 exp = TREE_OPERAND (exp, 0);
5233 }
5234
5235 code = TREE_CODE (TREE_TYPE (exp));
5236 thissize = int_size_in_bytes (TREE_TYPE (exp));
5237
5238 /* Allow a constructor with no elements for any data type.
5239 This means to fill the space with zeros. */
5240 if (TREE_CODE (exp) == CONSTRUCTOR
5241 && vec_safe_is_empty (CONSTRUCTOR_ELTS (exp)))
5242 {
5243 assemble_zeros (size);
5244 return size;
5245 }
5246
5247 if (TREE_CODE (exp) == FDESC_EXPR)
5248 {
5249 #ifdef ASM_OUTPUT_FDESC
5250 HOST_WIDE_INT part = tree_to_shwi (TREE_OPERAND (exp, 1));
5251 tree decl = TREE_OPERAND (exp, 0);
5252 ASM_OUTPUT_FDESC (asm_out_file, decl, part);
5253 #else
5254 gcc_unreachable ();
5255 #endif
5256 return size;
5257 }
5258
5259 /* Now output the underlying data. If we've handling the padding, return.
5260 Otherwise, break and ensure SIZE is the size written. */
5261 switch (code)
5262 {
5263 case BOOLEAN_TYPE:
5264 case INTEGER_TYPE:
5265 case ENUMERAL_TYPE:
5266 case POINTER_TYPE:
5267 case REFERENCE_TYPE:
5268 case OFFSET_TYPE:
5269 case FIXED_POINT_TYPE:
5270 case NULLPTR_TYPE:
5271 cst = expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
5272 if (reverse)
5273 cst = flip_storage_order (TYPE_MODE (TREE_TYPE (exp)), cst);
5274 if (!assemble_integer (cst, MIN (size, thissize), align, 0))
5275 error ("initializer for integer/fixed-point value is too complicated");
5276 break;
5277
5278 case REAL_TYPE:
5279 gcc_assert (size == thissize);
5280 if (TREE_CODE (exp) != REAL_CST)
5281 error ("initializer for floating value is not a floating constant");
5282 else
5283 assemble_real (TREE_REAL_CST (exp),
5284 SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (exp)),
5285 align, reverse);
5286 break;
5287
5288 case COMPLEX_TYPE:
5289 output_constant (TREE_REALPART (exp), thissize / 2, align,
5290 reverse, false);
5291 output_constant (TREE_IMAGPART (exp), thissize / 2,
5292 min_align (align, BITS_PER_UNIT * (thissize / 2)),
5293 reverse, false);
5294 break;
5295
5296 case ARRAY_TYPE:
5297 case VECTOR_TYPE:
5298 switch (TREE_CODE (exp))
5299 {
5300 case CONSTRUCTOR:
5301 return output_constructor (exp, size, align, reverse, NULL);
5302 case STRING_CST:
5303 thissize = (unsigned HOST_WIDE_INT)TREE_STRING_LENGTH (exp);
5304 if (merge_strings
5305 && (thissize == 0
5306 || TREE_STRING_POINTER (exp) [thissize - 1] != '\0'))
5307 thissize++;
5308 gcc_checking_assert (check_string_literal (exp, size));
5309 assemble_string (TREE_STRING_POINTER (exp), thissize);
5310 break;
5311 case VECTOR_CST:
5312 {
5313 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5314 unsigned int nalign = MIN (align, GET_MODE_ALIGNMENT (inner));
5315 int elt_size = GET_MODE_SIZE (inner);
5316 output_constant (VECTOR_CST_ELT (exp, 0), elt_size, align,
5317 reverse, false);
5318 thissize = elt_size;
5319 /* Static constants must have a fixed size. */
5320 unsigned int nunits = VECTOR_CST_NELTS (exp).to_constant ();
5321 for (unsigned int i = 1; i < nunits; i++)
5322 {
5323 output_constant (VECTOR_CST_ELT (exp, i), elt_size, nalign,
5324 reverse, false);
5325 thissize += elt_size;
5326 }
5327 break;
5328 }
5329 default:
5330 gcc_unreachable ();
5331 }
5332 break;
5333
5334 case RECORD_TYPE:
5335 case UNION_TYPE:
5336 gcc_assert (TREE_CODE (exp) == CONSTRUCTOR);
5337 return output_constructor (exp, size, align, reverse, NULL);
5338
5339 case ERROR_MARK:
5340 return 0;
5341
5342 default:
5343 gcc_unreachable ();
5344 }
5345
5346 if (size > thissize)
5347 assemble_zeros (size - thissize);
5348
5349 return size;
5350 }
5351
5352 /* Subroutine of output_constructor, used for computing the size of
5353 arrays of unspecified length. VAL must be a CONSTRUCTOR of an array
5354 type with an unspecified upper bound. */
5355
5356 static unsigned HOST_WIDE_INT
array_size_for_constructor(tree val)5357 array_size_for_constructor (tree val)
5358 {
5359 tree max_index;
5360 unsigned HOST_WIDE_INT cnt;
5361 tree index, value, tmp;
5362 offset_int i;
5363
5364 /* This code used to attempt to handle string constants that are not
5365 arrays of single-bytes, but nothing else does, so there's no point in
5366 doing it here. */
5367 if (TREE_CODE (val) == STRING_CST)
5368 return TREE_STRING_LENGTH (val);
5369
5370 max_index = NULL_TREE;
5371 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (val), cnt, index, value)
5372 {
5373 if (TREE_CODE (index) == RANGE_EXPR)
5374 index = TREE_OPERAND (index, 1);
5375 if (max_index == NULL_TREE || tree_int_cst_lt (max_index, index))
5376 max_index = index;
5377 }
5378
5379 if (max_index == NULL_TREE)
5380 return 0;
5381
5382 /* Compute the total number of array elements. */
5383 tmp = TYPE_MIN_VALUE (TYPE_DOMAIN (TREE_TYPE (val)));
5384 i = wi::to_offset (max_index) - wi::to_offset (tmp) + 1;
5385
5386 /* Multiply by the array element unit size to find number of bytes. */
5387 i *= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (val))));
5388
5389 gcc_assert (wi::fits_uhwi_p (i));
5390 return i.to_uhwi ();
5391 }
5392
5393 /* Other datastructures + helpers for output_constructor. */
5394
5395 /* output_constructor local state to support interaction with helpers. */
5396
5397 struct oc_local_state {
5398
5399 /* Received arguments. */
5400 tree exp; /* Constructor expression. */
5401 tree type; /* Type of constructor expression. */
5402 unsigned HOST_WIDE_INT size; /* # bytes to output - pad if necessary. */
5403 unsigned int align; /* Known initial alignment. */
5404 tree min_index; /* Lower bound if specified for an array. */
5405
5406 /* Output processing state. */
5407 HOST_WIDE_INT total_bytes; /* # bytes output so far / current position. */
5408 int byte; /* Part of a bitfield byte yet to be output. */
5409 int last_relative_index; /* Implicit or explicit index of the last
5410 array element output within a bitfield. */
5411 bool byte_buffer_in_use; /* Whether BYTE is in use. */
5412 bool reverse; /* Whether reverse storage order is in use. */
5413
5414 /* Current element. */
5415 tree field; /* Current field decl in a record. */
5416 tree val; /* Current element value. */
5417 tree index; /* Current element index. */
5418
5419 };
5420
5421 /* Helper for output_constructor. From the current LOCAL state, output a
5422 RANGE_EXPR element. */
5423
5424 static void
output_constructor_array_range(oc_local_state * local)5425 output_constructor_array_range (oc_local_state *local)
5426 {
5427 /* Perform the index calculation in modulo arithmetic but
5428 sign-extend the result because Ada has negative DECL_FIELD_OFFSETs
5429 but we are using an unsigned sizetype. */
5430 unsigned prec = TYPE_PRECISION (sizetype);
5431 offset_int idx = wi::sext (wi::to_offset (TREE_OPERAND (local->index, 0))
5432 - wi::to_offset (local->min_index), prec);
5433 tree valtype = TREE_TYPE (local->val);
5434 HOST_WIDE_INT fieldpos
5435 = (idx * wi::to_offset (TYPE_SIZE_UNIT (valtype))).to_short_addr ();
5436
5437 /* Advance to offset of this element. */
5438 if (fieldpos > local->total_bytes)
5439 {
5440 assemble_zeros (fieldpos - local->total_bytes);
5441 local->total_bytes = fieldpos;
5442 }
5443 else
5444 /* Must not go backwards. */
5445 gcc_assert (fieldpos == local->total_bytes);
5446
5447 unsigned HOST_WIDE_INT fieldsize
5448 = int_size_in_bytes (TREE_TYPE (local->type));
5449
5450 HOST_WIDE_INT lo_index
5451 = tree_to_shwi (TREE_OPERAND (local->index, 0));
5452 HOST_WIDE_INT hi_index
5453 = tree_to_shwi (TREE_OPERAND (local->index, 1));
5454 HOST_WIDE_INT index;
5455
5456 unsigned int align2
5457 = min_align (local->align, fieldsize * BITS_PER_UNIT);
5458
5459 for (index = lo_index; index <= hi_index; index++)
5460 {
5461 /* Output the element's initial value. */
5462 if (local->val == NULL_TREE)
5463 assemble_zeros (fieldsize);
5464 else
5465 fieldsize = output_constant (local->val, fieldsize, align2,
5466 local->reverse, false);
5467
5468 /* Count its size. */
5469 local->total_bytes += fieldsize;
5470 }
5471 }
5472
5473 /* Helper for output_constructor. From the current LOCAL state, output a
5474 field element that is not true bitfield or part of an outer one. */
5475
5476 static void
output_constructor_regular_field(oc_local_state * local)5477 output_constructor_regular_field (oc_local_state *local)
5478 {
5479 /* Field size and position. Since this structure is static, we know the
5480 positions are constant. */
5481 unsigned HOST_WIDE_INT fieldsize;
5482 HOST_WIDE_INT fieldpos;
5483
5484 unsigned int align2;
5485
5486 /* Output any buffered-up bit-fields preceding this element. */
5487 if (local->byte_buffer_in_use)
5488 {
5489 assemble_integer (GEN_INT (local->byte), 1, BITS_PER_UNIT, 1);
5490 local->total_bytes++;
5491 local->byte_buffer_in_use = false;
5492 }
5493
5494 if (local->index != NULL_TREE)
5495 {
5496 /* Perform the index calculation in modulo arithmetic but
5497 sign-extend the result because Ada has negative DECL_FIELD_OFFSETs
5498 but we are using an unsigned sizetype. */
5499 unsigned prec = TYPE_PRECISION (sizetype);
5500 offset_int idx = wi::sext (wi::to_offset (local->index)
5501 - wi::to_offset (local->min_index), prec);
5502 fieldpos = (idx * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (local->val))))
5503 .to_short_addr ();
5504 }
5505 else if (local->field != NULL_TREE)
5506 fieldpos = int_byte_position (local->field);
5507 else
5508 fieldpos = 0;
5509
5510 /* Advance to offset of this element.
5511 Note no alignment needed in an array, since that is guaranteed
5512 if each element has the proper size. */
5513 if (local->field != NULL_TREE || local->index != NULL_TREE)
5514 {
5515 if (fieldpos > local->total_bytes)
5516 {
5517 assemble_zeros (fieldpos - local->total_bytes);
5518 local->total_bytes = fieldpos;
5519 }
5520 else
5521 /* Must not go backwards. */
5522 gcc_assert (fieldpos == local->total_bytes);
5523 }
5524
5525 /* Find the alignment of this element. */
5526 align2 = min_align (local->align, BITS_PER_UNIT * fieldpos);
5527
5528 /* Determine size this element should occupy. */
5529 if (local->field)
5530 {
5531 fieldsize = 0;
5532
5533 /* If this is an array with an unspecified upper bound,
5534 the initializer determines the size. */
5535 /* ??? This ought to only checked if DECL_SIZE_UNIT is NULL,
5536 but we cannot do this until the deprecated support for
5537 initializing zero-length array members is removed. */
5538 if (TREE_CODE (TREE_TYPE (local->field)) == ARRAY_TYPE
5539 && (!TYPE_DOMAIN (TREE_TYPE (local->field))
5540 || !TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (local->field)))))
5541 {
5542 unsigned HOST_WIDE_INT fldsize
5543 = array_size_for_constructor (local->val);
5544 fieldsize = int_size_in_bytes (TREE_TYPE (local->val));
5545 /* In most cases fieldsize == fldsize as the size of the initializer
5546 determines how many elements the flexible array member has. For
5547 C++ fldsize can be smaller though, if the last or several last or
5548 all initializers of the flexible array member have side-effects
5549 and the FE splits them into dynamic initialization. */
5550 gcc_checking_assert (fieldsize >= fldsize);
5551 /* Given a non-empty initialization, this field had better
5552 be last. Given a flexible array member, the next field
5553 on the chain is a TYPE_DECL of the enclosing struct. */
5554 const_tree next = DECL_CHAIN (local->field);
5555 gcc_assert (!fieldsize || !next || TREE_CODE (next) != FIELD_DECL);
5556 }
5557 else
5558 fieldsize = tree_to_uhwi (DECL_SIZE_UNIT (local->field));
5559 }
5560 else
5561 fieldsize = int_size_in_bytes (TREE_TYPE (local->type));
5562
5563 /* Output the element's initial value. */
5564 if (local->val == NULL_TREE)
5565 assemble_zeros (fieldsize);
5566 else
5567 fieldsize = output_constant (local->val, fieldsize, align2,
5568 local->reverse, false);
5569
5570 /* Count its size. */
5571 local->total_bytes += fieldsize;
5572 }
5573
5574 /* Helper for output_constructor. From the LOCAL state, output an element
5575 that is a true bitfield or part of an outer one. BIT_OFFSET is the offset
5576 from the start of a possibly ongoing outer byte buffer. */
5577
5578 static void
output_constructor_bitfield(oc_local_state * local,unsigned int bit_offset)5579 output_constructor_bitfield (oc_local_state *local, unsigned int bit_offset)
5580 {
5581 /* Bit size of this element. */
5582 HOST_WIDE_INT ebitsize
5583 = (local->field
5584 ? tree_to_uhwi (DECL_SIZE (local->field))
5585 : tree_to_uhwi (TYPE_SIZE (TREE_TYPE (local->type))));
5586
5587 /* Relative index of this element if this is an array component. */
5588 HOST_WIDE_INT relative_index
5589 = (!local->field
5590 ? (local->index
5591 ? (tree_to_shwi (local->index)
5592 - tree_to_shwi (local->min_index))
5593 : local->last_relative_index + 1)
5594 : 0);
5595
5596 /* Bit position of this element from the start of the containing
5597 constructor. */
5598 HOST_WIDE_INT constructor_relative_ebitpos
5599 = (local->field
5600 ? int_bit_position (local->field)
5601 : ebitsize * relative_index);
5602
5603 /* Bit position of this element from the start of a possibly ongoing
5604 outer byte buffer. */
5605 HOST_WIDE_INT byte_relative_ebitpos
5606 = bit_offset + constructor_relative_ebitpos;
5607
5608 /* From the start of a possibly ongoing outer byte buffer, offsets to
5609 the first bit of this element and to the first bit past the end of
5610 this element. */
5611 HOST_WIDE_INT next_offset = byte_relative_ebitpos;
5612 HOST_WIDE_INT end_offset = byte_relative_ebitpos + ebitsize;
5613
5614 local->last_relative_index = relative_index;
5615
5616 if (local->val == NULL_TREE)
5617 local->val = integer_zero_node;
5618
5619 while (TREE_CODE (local->val) == VIEW_CONVERT_EXPR
5620 || TREE_CODE (local->val) == NON_LVALUE_EXPR)
5621 local->val = TREE_OPERAND (local->val, 0);
5622
5623 if (TREE_CODE (local->val) != INTEGER_CST
5624 && TREE_CODE (local->val) != CONSTRUCTOR)
5625 {
5626 error ("invalid initial value for member %qE", DECL_NAME (local->field));
5627 return;
5628 }
5629
5630 /* If this field does not start in this (or next) byte, skip some bytes. */
5631 if (next_offset / BITS_PER_UNIT != local->total_bytes)
5632 {
5633 /* Output remnant of any bit field in previous bytes. */
5634 if (local->byte_buffer_in_use)
5635 {
5636 assemble_integer (GEN_INT (local->byte), 1, BITS_PER_UNIT, 1);
5637 local->total_bytes++;
5638 local->byte_buffer_in_use = false;
5639 }
5640
5641 /* If still not at proper byte, advance to there. */
5642 if (next_offset / BITS_PER_UNIT != local->total_bytes)
5643 {
5644 gcc_assert (next_offset / BITS_PER_UNIT >= local->total_bytes);
5645 assemble_zeros (next_offset / BITS_PER_UNIT - local->total_bytes);
5646 local->total_bytes = next_offset / BITS_PER_UNIT;
5647 }
5648 }
5649
5650 /* Set up the buffer if necessary. */
5651 if (!local->byte_buffer_in_use)
5652 {
5653 local->byte = 0;
5654 if (ebitsize > 0)
5655 local->byte_buffer_in_use = true;
5656 }
5657
5658 /* If this is nested constructor, recurse passing the bit offset and the
5659 pending data, then retrieve the new pending data afterwards. */
5660 if (TREE_CODE (local->val) == CONSTRUCTOR)
5661 {
5662 oc_outer_state temp_state;
5663 temp_state.bit_offset = next_offset % BITS_PER_UNIT;
5664 temp_state.byte = local->byte;
5665 local->total_bytes
5666 += output_constructor (local->val, 0, 0, local->reverse, &temp_state);
5667 local->byte = temp_state.byte;
5668 return;
5669 }
5670
5671 /* Otherwise, we must split the element into pieces that fall within
5672 separate bytes, and combine each byte with previous or following
5673 bit-fields. */
5674 while (next_offset < end_offset)
5675 {
5676 int this_time;
5677 int shift;
5678 unsigned HOST_WIDE_INT value;
5679 HOST_WIDE_INT next_byte = next_offset / BITS_PER_UNIT;
5680 HOST_WIDE_INT next_bit = next_offset % BITS_PER_UNIT;
5681
5682 /* Advance from byte to byte within this element when necessary. */
5683 while (next_byte != local->total_bytes)
5684 {
5685 assemble_integer (GEN_INT (local->byte), 1, BITS_PER_UNIT, 1);
5686 local->total_bytes++;
5687 local->byte = 0;
5688 }
5689
5690 /* Number of bits we can process at once (all part of the same byte). */
5691 this_time = MIN (end_offset - next_offset, BITS_PER_UNIT - next_bit);
5692 if (local->reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5693 {
5694 /* For big-endian data, take the most significant bits (of the
5695 bits that are significant) first and put them into bytes from
5696 the most significant end. */
5697 shift = end_offset - next_offset - this_time;
5698
5699 /* Don't try to take a bunch of bits that cross
5700 the word boundary in the INTEGER_CST. We can
5701 only select bits from one element. */
5702 if ((shift / HOST_BITS_PER_WIDE_INT)
5703 != ((shift + this_time - 1) / HOST_BITS_PER_WIDE_INT))
5704 {
5705 const int end = shift + this_time - 1;
5706 shift = end & -HOST_BITS_PER_WIDE_INT;
5707 this_time = end - shift + 1;
5708 }
5709
5710 /* Now get the bits we want to insert. */
5711 value = wi::extract_uhwi (wi::to_widest (local->val),
5712 shift, this_time);
5713
5714 /* Get the result. This works only when:
5715 1 <= this_time <= HOST_BITS_PER_WIDE_INT. */
5716 local->byte |= value << (BITS_PER_UNIT - this_time - next_bit);
5717 }
5718 else
5719 {
5720 /* On little-endian machines, take the least significant bits of
5721 the value first and pack them starting at the least significant
5722 bits of the bytes. */
5723 shift = next_offset - byte_relative_ebitpos;
5724
5725 /* Don't try to take a bunch of bits that cross
5726 the word boundary in the INTEGER_CST. We can
5727 only select bits from one element. */
5728 if ((shift / HOST_BITS_PER_WIDE_INT)
5729 != ((shift + this_time - 1) / HOST_BITS_PER_WIDE_INT))
5730 this_time
5731 = HOST_BITS_PER_WIDE_INT - (shift & (HOST_BITS_PER_WIDE_INT - 1));
5732
5733 /* Now get the bits we want to insert. */
5734 value = wi::extract_uhwi (wi::to_widest (local->val),
5735 shift, this_time);
5736
5737 /* Get the result. This works only when:
5738 1 <= this_time <= HOST_BITS_PER_WIDE_INT. */
5739 local->byte |= value << next_bit;
5740 }
5741
5742 next_offset += this_time;
5743 local->byte_buffer_in_use = true;
5744 }
5745 }
5746
5747 /* Subroutine of output_constant, used for CONSTRUCTORs (aggregate constants).
5748 Generate at least SIZE bytes, padding if necessary. OUTER designates the
5749 caller output state of relevance in recursive invocations. */
5750
5751 static unsigned HOST_WIDE_INT
output_constructor(tree exp,unsigned HOST_WIDE_INT size,unsigned int align,bool reverse,oc_outer_state * outer)5752 output_constructor (tree exp, unsigned HOST_WIDE_INT size, unsigned int align,
5753 bool reverse, oc_outer_state *outer)
5754 {
5755 unsigned HOST_WIDE_INT cnt;
5756 constructor_elt *ce;
5757 oc_local_state local;
5758
5759 /* Setup our local state to communicate with helpers. */
5760 local.exp = exp;
5761 local.type = TREE_TYPE (exp);
5762 local.size = size;
5763 local.align = align;
5764 if (TREE_CODE (local.type) == ARRAY_TYPE && TYPE_DOMAIN (local.type))
5765 local.min_index = TYPE_MIN_VALUE (TYPE_DOMAIN (local.type));
5766 else
5767 local.min_index = integer_zero_node;
5768
5769 local.total_bytes = 0;
5770 local.byte_buffer_in_use = outer != NULL;
5771 local.byte = outer ? outer->byte : 0;
5772 local.last_relative_index = -1;
5773 /* The storage order is specified for every aggregate type. */
5774 if (AGGREGATE_TYPE_P (local.type))
5775 local.reverse = TYPE_REVERSE_STORAGE_ORDER (local.type);
5776 else
5777 local.reverse = reverse;
5778
5779 gcc_assert (HOST_BITS_PER_WIDE_INT >= BITS_PER_UNIT);
5780
5781 /* As CE goes through the elements of the constant, FIELD goes through the
5782 structure fields if the constant is a structure. If the constant is a
5783 union, we override this by getting the field from the TREE_LIST element.
5784 But the constant could also be an array. Then FIELD is zero.
5785
5786 There is always a maximum of one element in the chain LINK for unions
5787 (even if the initializer in a source program incorrectly contains
5788 more one). */
5789
5790 if (TREE_CODE (local.type) == RECORD_TYPE)
5791 local.field = TYPE_FIELDS (local.type);
5792 else
5793 local.field = NULL_TREE;
5794
5795 for (cnt = 0;
5796 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), cnt, &ce);
5797 cnt++, local.field = local.field ? DECL_CHAIN (local.field) : 0)
5798 {
5799 local.val = ce->value;
5800 local.index = NULL_TREE;
5801
5802 /* The element in a union constructor specifies the proper field
5803 or index. */
5804 if (RECORD_OR_UNION_TYPE_P (local.type) && ce->index != NULL_TREE)
5805 local.field = ce->index;
5806
5807 else if (TREE_CODE (local.type) == ARRAY_TYPE)
5808 local.index = ce->index;
5809
5810 if (local.field && flag_verbose_asm)
5811 fprintf (asm_out_file, "%s %s:\n",
5812 ASM_COMMENT_START,
5813 DECL_NAME (local.field)
5814 ? IDENTIFIER_POINTER (DECL_NAME (local.field))
5815 : "<anonymous>");
5816
5817 /* Eliminate the marker that makes a cast not be an lvalue. */
5818 if (local.val != NULL_TREE)
5819 STRIP_NOPS (local.val);
5820
5821 /* Output the current element, using the appropriate helper ... */
5822
5823 /* For an array slice not part of an outer bitfield. */
5824 if (!outer
5825 && local.index != NULL_TREE
5826 && TREE_CODE (local.index) == RANGE_EXPR)
5827 output_constructor_array_range (&local);
5828
5829 /* For a field that is neither a true bitfield nor part of an outer one,
5830 known to be at least byte aligned and multiple-of-bytes long. */
5831 else if (!outer
5832 && (local.field == NULL_TREE
5833 || !CONSTRUCTOR_BITFIELD_P (local.field)))
5834 output_constructor_regular_field (&local);
5835
5836 /* For a true bitfield or part of an outer one. Only INTEGER_CSTs are
5837 supported for scalar fields, so we may need to convert first. */
5838 else
5839 {
5840 if (TREE_CODE (local.val) == REAL_CST)
5841 local.val
5842 = fold_unary (VIEW_CONVERT_EXPR,
5843 build_nonstandard_integer_type
5844 (TYPE_PRECISION (TREE_TYPE (local.val)), 0),
5845 local.val);
5846 output_constructor_bitfield (&local, outer ? outer->bit_offset : 0);
5847 }
5848 }
5849
5850 /* If we are not at toplevel, save the pending data for our caller.
5851 Otherwise output the pending data and padding zeros as needed. */
5852 if (outer)
5853 outer->byte = local.byte;
5854 else
5855 {
5856 if (local.byte_buffer_in_use)
5857 {
5858 assemble_integer (GEN_INT (local.byte), 1, BITS_PER_UNIT, 1);
5859 local.total_bytes++;
5860 }
5861
5862 if ((unsigned HOST_WIDE_INT)local.total_bytes < local.size)
5863 {
5864 assemble_zeros (local.size - local.total_bytes);
5865 local.total_bytes = local.size;
5866 }
5867 }
5868
5869 return local.total_bytes;
5870 }
5871
5872 /* Mark DECL as weak. */
5873
5874 static void
mark_weak(tree decl)5875 mark_weak (tree decl)
5876 {
5877 if (DECL_WEAK (decl))
5878 return;
5879
5880 struct symtab_node *n = symtab_node::get (decl);
5881 if (n && n->refuse_visibility_changes)
5882 error ("%qD declared weak after being used", decl);
5883 DECL_WEAK (decl) = 1;
5884
5885 if (DECL_RTL_SET_P (decl)
5886 && MEM_P (DECL_RTL (decl))
5887 && XEXP (DECL_RTL (decl), 0)
5888 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == SYMBOL_REF)
5889 SYMBOL_REF_WEAK (XEXP (DECL_RTL (decl), 0)) = 1;
5890 }
5891
5892 /* Merge weak status between NEWDECL and OLDDECL. */
5893
5894 void
merge_weak(tree newdecl,tree olddecl)5895 merge_weak (tree newdecl, tree olddecl)
5896 {
5897 if (DECL_WEAK (newdecl) == DECL_WEAK (olddecl))
5898 {
5899 if (DECL_WEAK (newdecl) && TARGET_SUPPORTS_WEAK)
5900 {
5901 tree *pwd;
5902 /* We put the NEWDECL on the weak_decls list at some point
5903 and OLDDECL as well. Keep just OLDDECL on the list. */
5904 for (pwd = &weak_decls; *pwd; pwd = &TREE_CHAIN (*pwd))
5905 if (TREE_VALUE (*pwd) == newdecl)
5906 {
5907 *pwd = TREE_CHAIN (*pwd);
5908 break;
5909 }
5910 }
5911 return;
5912 }
5913
5914 if (DECL_WEAK (newdecl))
5915 {
5916 tree wd;
5917
5918 /* NEWDECL is weak, but OLDDECL is not. */
5919
5920 /* If we already output the OLDDECL, we're in trouble; we can't
5921 go back and make it weak. This should never happen in
5922 unit-at-a-time compilation. */
5923 gcc_assert (!TREE_ASM_WRITTEN (olddecl));
5924
5925 /* If we've already generated rtl referencing OLDDECL, we may
5926 have done so in a way that will not function properly with
5927 a weak symbol. Again in unit-at-a-time this should be
5928 impossible. */
5929 gcc_assert (!TREE_USED (olddecl)
5930 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (olddecl)));
5931
5932 /* PR 49899: You cannot convert a static function into a weak, public function. */
5933 if (! TREE_PUBLIC (olddecl) && TREE_PUBLIC (newdecl))
5934 error ("weak declaration of %q+D being applied to a already "
5935 "existing, static definition", newdecl);
5936
5937 if (TARGET_SUPPORTS_WEAK)
5938 {
5939 /* We put the NEWDECL on the weak_decls list at some point.
5940 Replace it with the OLDDECL. */
5941 for (wd = weak_decls; wd; wd = TREE_CHAIN (wd))
5942 if (TREE_VALUE (wd) == newdecl)
5943 {
5944 TREE_VALUE (wd) = olddecl;
5945 break;
5946 }
5947 /* We may not find the entry on the list. If NEWDECL is a
5948 weak alias, then we will have already called
5949 globalize_decl to remove the entry; in that case, we do
5950 not need to do anything. */
5951 }
5952
5953 /* Make the OLDDECL weak; it's OLDDECL that we'll be keeping. */
5954 mark_weak (olddecl);
5955 }
5956 else
5957 /* OLDDECL was weak, but NEWDECL was not explicitly marked as
5958 weak. Just update NEWDECL to indicate that it's weak too. */
5959 mark_weak (newdecl);
5960 }
5961
5962 /* Declare DECL to be a weak symbol. */
5963
5964 void
declare_weak(tree decl)5965 declare_weak (tree decl)
5966 {
5967 /* With -fsyntax-only, TREE_ASM_WRITTEN might be set on certain function
5968 decls earlier than normally, but as with -fsyntax-only nothing is really
5969 emitted, there is no harm in marking it weak later. */
5970 gcc_assert (TREE_CODE (decl) != FUNCTION_DECL
5971 || !TREE_ASM_WRITTEN (decl)
5972 || flag_syntax_only);
5973 if (! TREE_PUBLIC (decl))
5974 {
5975 error ("weak declaration of %q+D must be public", decl);
5976 return;
5977 }
5978 else if (!TARGET_SUPPORTS_WEAK)
5979 warning (0, "weak declaration of %q+D not supported", decl);
5980
5981 mark_weak (decl);
5982 if (!lookup_attribute ("weak", DECL_ATTRIBUTES (decl)))
5983 DECL_ATTRIBUTES (decl)
5984 = tree_cons (get_identifier ("weak"), NULL, DECL_ATTRIBUTES (decl));
5985 }
5986
5987 static void
weak_finish_1(tree decl)5988 weak_finish_1 (tree decl)
5989 {
5990 #if defined (ASM_WEAKEN_DECL) || defined (ASM_WEAKEN_LABEL)
5991 const char *const name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
5992 #endif
5993
5994 if (! TREE_USED (decl))
5995 return;
5996
5997 #ifdef ASM_WEAKEN_DECL
5998 ASM_WEAKEN_DECL (asm_out_file, decl, name, NULL);
5999 #else
6000 #ifdef ASM_WEAKEN_LABEL
6001 ASM_WEAKEN_LABEL (asm_out_file, name);
6002 #else
6003 #ifdef ASM_OUTPUT_WEAK_ALIAS
6004 {
6005 static bool warn_once = 0;
6006 if (! warn_once)
6007 {
6008 warning (0, "only weak aliases are supported in this configuration");
6009 warn_once = 1;
6010 }
6011 return;
6012 }
6013 #endif
6014 #endif
6015 #endif
6016 }
6017
6018 /* Fiven an assembly name, find the decl it is associated with. */
6019 static tree
find_decl(tree target)6020 find_decl (tree target)
6021 {
6022 symtab_node *node = symtab_node::get_for_asmname (target);
6023 if (node)
6024 return node->decl;
6025 return NULL_TREE;
6026 }
6027
6028 /* This TREE_LIST contains weakref targets. */
6029
6030 static GTY(()) tree weakref_targets;
6031
6032 /* Emit any pending weak declarations. */
6033
6034 void
weak_finish(void)6035 weak_finish (void)
6036 {
6037 tree t;
6038
6039 for (t = weakref_targets; t; t = TREE_CHAIN (t))
6040 {
6041 tree alias_decl = TREE_PURPOSE (t);
6042 tree target = ultimate_transparent_alias_target (&TREE_VALUE (t));
6043
6044 if (! TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (alias_decl))
6045 || TREE_SYMBOL_REFERENCED (target))
6046 /* Remove alias_decl from the weak list, but leave entries for
6047 the target alone. */
6048 target = NULL_TREE;
6049 #ifndef ASM_OUTPUT_WEAKREF
6050 else if (! TREE_SYMBOL_REFERENCED (target))
6051 {
6052 /* Use ASM_WEAKEN_LABEL only if ASM_WEAKEN_DECL is not
6053 defined, otherwise we and weak_finish_1 would use
6054 different macros. */
6055 # if defined ASM_WEAKEN_LABEL && ! defined ASM_WEAKEN_DECL
6056 ASM_WEAKEN_LABEL (asm_out_file, IDENTIFIER_POINTER (target));
6057 # else
6058 tree decl = find_decl (target);
6059
6060 if (! decl)
6061 {
6062 decl = build_decl (DECL_SOURCE_LOCATION (alias_decl),
6063 TREE_CODE (alias_decl), target,
6064 TREE_TYPE (alias_decl));
6065
6066 DECL_EXTERNAL (decl) = 1;
6067 TREE_PUBLIC (decl) = 1;
6068 DECL_ARTIFICIAL (decl) = 1;
6069 TREE_NOTHROW (decl) = TREE_NOTHROW (alias_decl);
6070 TREE_USED (decl) = 1;
6071 }
6072
6073 weak_finish_1 (decl);
6074 # endif
6075 }
6076 #endif
6077
6078 {
6079 tree *p;
6080 tree t2;
6081
6082 /* Remove the alias and the target from the pending weak list
6083 so that we do not emit any .weak directives for the former,
6084 nor multiple .weak directives for the latter. */
6085 for (p = &weak_decls; (t2 = *p) ; )
6086 {
6087 if (TREE_VALUE (t2) == alias_decl
6088 || target == DECL_ASSEMBLER_NAME (TREE_VALUE (t2)))
6089 *p = TREE_CHAIN (t2);
6090 else
6091 p = &TREE_CHAIN (t2);
6092 }
6093
6094 /* Remove other weakrefs to the same target, to speed things up. */
6095 for (p = &TREE_CHAIN (t); (t2 = *p) ; )
6096 {
6097 if (target == ultimate_transparent_alias_target (&TREE_VALUE (t2)))
6098 *p = TREE_CHAIN (t2);
6099 else
6100 p = &TREE_CHAIN (t2);
6101 }
6102 }
6103 }
6104
6105 for (t = weak_decls; t; t = TREE_CHAIN (t))
6106 {
6107 tree decl = TREE_VALUE (t);
6108
6109 weak_finish_1 (decl);
6110 }
6111 }
6112
6113 /* Emit the assembly bits to indicate that DECL is globally visible. */
6114
6115 static void
globalize_decl(tree decl)6116 globalize_decl (tree decl)
6117 {
6118
6119 #if defined (ASM_WEAKEN_LABEL) || defined (ASM_WEAKEN_DECL)
6120 if (DECL_WEAK (decl))
6121 {
6122 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
6123 tree *p, t;
6124
6125 #ifdef ASM_WEAKEN_DECL
6126 ASM_WEAKEN_DECL (asm_out_file, decl, name, 0);
6127 #else
6128 ASM_WEAKEN_LABEL (asm_out_file, name);
6129 #endif
6130
6131 /* Remove this function from the pending weak list so that
6132 we do not emit multiple .weak directives for it. */
6133 for (p = &weak_decls; (t = *p) ; )
6134 {
6135 if (DECL_ASSEMBLER_NAME (decl) == DECL_ASSEMBLER_NAME (TREE_VALUE (t)))
6136 *p = TREE_CHAIN (t);
6137 else
6138 p = &TREE_CHAIN (t);
6139 }
6140
6141 /* Remove weakrefs to the same target from the pending weakref
6142 list, for the same reason. */
6143 for (p = &weakref_targets; (t = *p) ; )
6144 {
6145 if (DECL_ASSEMBLER_NAME (decl)
6146 == ultimate_transparent_alias_target (&TREE_VALUE (t)))
6147 *p = TREE_CHAIN (t);
6148 else
6149 p = &TREE_CHAIN (t);
6150 }
6151
6152 return;
6153 }
6154 #endif
6155
6156 targetm.asm_out.globalize_decl_name (asm_out_file, decl);
6157 }
6158
6159 vec<alias_pair, va_gc> *alias_pairs;
6160
6161 /* Output the assembler code for a define (equate) using ASM_OUTPUT_DEF
6162 or ASM_OUTPUT_DEF_FROM_DECLS. The function defines the symbol whose
6163 tree node is DECL to have the value of the tree node TARGET. */
6164
6165 void
do_assemble_alias(tree decl,tree target)6166 do_assemble_alias (tree decl, tree target)
6167 {
6168 tree id;
6169
6170 /* Emulated TLS had better not get this var. */
6171 gcc_assert (!(!targetm.have_tls
6172 && VAR_P (decl)
6173 && DECL_THREAD_LOCAL_P (decl)));
6174
6175 if (TREE_ASM_WRITTEN (decl))
6176 return;
6177
6178 id = DECL_ASSEMBLER_NAME (decl);
6179 ultimate_transparent_alias_target (&id);
6180 ultimate_transparent_alias_target (&target);
6181
6182 /* We must force creation of DECL_RTL for debug info generation, even though
6183 we don't use it here. */
6184 make_decl_rtl (decl);
6185
6186 TREE_ASM_WRITTEN (decl) = 1;
6187 TREE_ASM_WRITTEN (DECL_ASSEMBLER_NAME (decl)) = 1;
6188 TREE_ASM_WRITTEN (id) = 1;
6189
6190 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
6191 {
6192 if (!TREE_SYMBOL_REFERENCED (target))
6193 weakref_targets = tree_cons (decl, target, weakref_targets);
6194
6195 #ifdef ASM_OUTPUT_WEAKREF
6196 ASM_OUTPUT_WEAKREF (asm_out_file, decl,
6197 IDENTIFIER_POINTER (id),
6198 IDENTIFIER_POINTER (target));
6199 #else
6200 if (!TARGET_SUPPORTS_WEAK)
6201 {
6202 error_at (DECL_SOURCE_LOCATION (decl),
6203 "%qs is not supported in this configuration", "weakref ");
6204 return;
6205 }
6206 #endif
6207 return;
6208 }
6209
6210 #ifdef ASM_OUTPUT_DEF
6211 tree orig_decl = decl;
6212
6213 /* Make name accessible from other files, if appropriate. */
6214
6215 if (TREE_PUBLIC (decl) || TREE_PUBLIC (orig_decl))
6216 {
6217 globalize_decl (decl);
6218 maybe_assemble_visibility (decl);
6219 }
6220 if (TREE_CODE (decl) == FUNCTION_DECL
6221 && cgraph_node::get (decl)->ifunc_resolver)
6222 {
6223 #if defined (ASM_OUTPUT_TYPE_DIRECTIVE)
6224 if (targetm.has_ifunc_p ())
6225 ASM_OUTPUT_TYPE_DIRECTIVE
6226 (asm_out_file, IDENTIFIER_POINTER (id),
6227 IFUNC_ASM_TYPE);
6228 else
6229 #endif
6230 error_at (DECL_SOURCE_LOCATION (decl),
6231 "%qs is not supported on this target", "ifunc");
6232 }
6233
6234 # ifdef ASM_OUTPUT_DEF_FROM_DECLS
6235 ASM_OUTPUT_DEF_FROM_DECLS (asm_out_file, decl, target);
6236 # else
6237 ASM_OUTPUT_DEF (asm_out_file,
6238 IDENTIFIER_POINTER (id),
6239 IDENTIFIER_POINTER (target));
6240 # endif
6241 #elif defined (ASM_OUTPUT_WEAK_ALIAS) || defined (ASM_WEAKEN_DECL)
6242 {
6243 const char *name;
6244 tree *p, t;
6245
6246 name = IDENTIFIER_POINTER (id);
6247 # ifdef ASM_WEAKEN_DECL
6248 ASM_WEAKEN_DECL (asm_out_file, decl, name, IDENTIFIER_POINTER (target));
6249 # else
6250 ASM_OUTPUT_WEAK_ALIAS (asm_out_file, name, IDENTIFIER_POINTER (target));
6251 # endif
6252 /* Remove this function from the pending weak list so that
6253 we do not emit multiple .weak directives for it. */
6254 for (p = &weak_decls; (t = *p) ; )
6255 if (DECL_ASSEMBLER_NAME (decl) == DECL_ASSEMBLER_NAME (TREE_VALUE (t))
6256 || id == DECL_ASSEMBLER_NAME (TREE_VALUE (t)))
6257 *p = TREE_CHAIN (t);
6258 else
6259 p = &TREE_CHAIN (t);
6260
6261 /* Remove weakrefs to the same target from the pending weakref
6262 list, for the same reason. */
6263 for (p = &weakref_targets; (t = *p) ; )
6264 {
6265 if (id == ultimate_transparent_alias_target (&TREE_VALUE (t)))
6266 *p = TREE_CHAIN (t);
6267 else
6268 p = &TREE_CHAIN (t);
6269 }
6270 }
6271 #endif
6272 }
6273
6274 /* Output .symver directive. */
6275
6276 void
do_assemble_symver(tree decl,tree target)6277 do_assemble_symver (tree decl, tree target)
6278 {
6279 tree id = DECL_ASSEMBLER_NAME (decl);
6280 ultimate_transparent_alias_target (&id);
6281 ultimate_transparent_alias_target (&target);
6282 #ifdef ASM_OUTPUT_SYMVER_DIRECTIVE
6283 ASM_OUTPUT_SYMVER_DIRECTIVE (asm_out_file,
6284 IDENTIFIER_POINTER (target),
6285 IDENTIFIER_POINTER (id));
6286 #else
6287 error ("symver is only supported on ELF platforms");
6288 #endif
6289 }
6290
6291 /* Emit an assembler directive to make the symbol for DECL an alias to
6292 the symbol for TARGET. */
6293
6294 void
assemble_alias(tree decl,tree target)6295 assemble_alias (tree decl, tree target)
6296 {
6297 tree target_decl;
6298
6299 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
6300 {
6301 tree alias = DECL_ASSEMBLER_NAME (decl);
6302
6303 ultimate_transparent_alias_target (&target);
6304
6305 if (alias == target)
6306 error ("%qs symbol %q+D ultimately targets itself", "weakref", decl);
6307 if (TREE_PUBLIC (decl))
6308 error ("%qs symbol %q+D must have static linkage", "weakref", decl);
6309 }
6310 else
6311 {
6312 #if !defined (ASM_OUTPUT_DEF)
6313 # if !defined(ASM_OUTPUT_WEAK_ALIAS) && !defined (ASM_WEAKEN_DECL)
6314 error_at (DECL_SOURCE_LOCATION (decl),
6315 "alias definitions not supported in this configuration");
6316 TREE_ASM_WRITTEN (decl) = 1;
6317 return;
6318 # else
6319 if (!DECL_WEAK (decl))
6320 {
6321 /* NB: ifunc_resolver isn't set when an error is detected. */
6322 if (TREE_CODE (decl) == FUNCTION_DECL
6323 && lookup_attribute ("ifunc", DECL_ATTRIBUTES (decl)))
6324 error_at (DECL_SOURCE_LOCATION (decl),
6325 "%qs is not supported in this configuration", "ifunc");
6326 else
6327 error_at (DECL_SOURCE_LOCATION (decl),
6328 "only weak aliases are supported in this configuration");
6329 TREE_ASM_WRITTEN (decl) = 1;
6330 return;
6331 }
6332 # endif
6333 #endif
6334 }
6335 TREE_USED (decl) = 1;
6336
6337 /* Allow aliases to aliases. */
6338 if (TREE_CODE (decl) == FUNCTION_DECL)
6339 cgraph_node::get_create (decl)->alias = true;
6340 else
6341 varpool_node::get_create (decl)->alias = true;
6342
6343 /* If the target has already been emitted, we don't have to queue the
6344 alias. This saves a tad of memory. */
6345 if (symtab->global_info_ready)
6346 target_decl = find_decl (target);
6347 else
6348 target_decl= NULL;
6349 if ((target_decl && TREE_ASM_WRITTEN (target_decl))
6350 || symtab->state >= EXPANSION)
6351 do_assemble_alias (decl, target);
6352 else
6353 {
6354 alias_pair p = {decl, target};
6355 vec_safe_push (alias_pairs, p);
6356 }
6357 }
6358
6359 /* Record and output a table of translations from original function
6360 to its transaction aware clone. Note that tm_pure functions are
6361 considered to be their own clone. */
6362
6363 struct tm_clone_hasher : ggc_cache_ptr_hash<tree_map>
6364 {
hashtm_clone_hasher6365 static hashval_t hash (tree_map *m) { return tree_map_hash (m); }
equaltm_clone_hasher6366 static bool equal (tree_map *a, tree_map *b) { return tree_map_eq (a, b); }
6367
6368 static int
keep_cache_entrytm_clone_hasher6369 keep_cache_entry (tree_map *&e)
6370 {
6371 return ggc_marked_p (e->base.from);
6372 }
6373 };
6374
6375 static GTY((cache)) hash_table<tm_clone_hasher> *tm_clone_hash;
6376
6377 void
record_tm_clone_pair(tree o,tree n)6378 record_tm_clone_pair (tree o, tree n)
6379 {
6380 struct tree_map **slot, *h;
6381
6382 if (tm_clone_hash == NULL)
6383 tm_clone_hash = hash_table<tm_clone_hasher>::create_ggc (32);
6384
6385 h = ggc_alloc<tree_map> ();
6386 h->hash = htab_hash_pointer (o);
6387 h->base.from = o;
6388 h->to = n;
6389
6390 slot = tm_clone_hash->find_slot_with_hash (h, h->hash, INSERT);
6391 *slot = h;
6392 }
6393
6394 tree
get_tm_clone_pair(tree o)6395 get_tm_clone_pair (tree o)
6396 {
6397 if (tm_clone_hash)
6398 {
6399 struct tree_map *h, in;
6400
6401 in.base.from = o;
6402 in.hash = htab_hash_pointer (o);
6403 h = tm_clone_hash->find_with_hash (&in, in.hash);
6404 if (h)
6405 return h->to;
6406 }
6407 return NULL_TREE;
6408 }
6409
6410 struct tm_alias_pair
6411 {
6412 unsigned int uid;
6413 tree from;
6414 tree to;
6415 };
6416
6417
6418 /* Dump the actual pairs to the .tm_clone_table section. */
6419
6420 static void
dump_tm_clone_pairs(vec<tm_alias_pair> tm_alias_pairs)6421 dump_tm_clone_pairs (vec<tm_alias_pair> tm_alias_pairs)
6422 {
6423 unsigned i;
6424 tm_alias_pair *p;
6425 bool switched = false;
6426
6427 FOR_EACH_VEC_ELT (tm_alias_pairs, i, p)
6428 {
6429 tree src = p->from;
6430 tree dst = p->to;
6431 struct cgraph_node *src_n = cgraph_node::get (src);
6432 struct cgraph_node *dst_n = cgraph_node::get (dst);
6433
6434 /* The function ipa_tm_create_version() marks the clone as needed if
6435 the original function was needed. But we also mark the clone as
6436 needed if we ever called the clone indirectly through
6437 TM_GETTMCLONE. If neither of these are true, we didn't generate
6438 a clone, and we didn't call it indirectly... no sense keeping it
6439 in the clone table. */
6440 if (!dst_n || !dst_n->definition)
6441 continue;
6442
6443 /* This covers the case where we have optimized the original
6444 function away, and only access the transactional clone. */
6445 if (!src_n || !src_n->definition)
6446 continue;
6447
6448 if (!switched)
6449 {
6450 switch_to_section (targetm.asm_out.tm_clone_table_section ());
6451 assemble_align (POINTER_SIZE);
6452 switched = true;
6453 }
6454
6455 assemble_integer (XEXP (DECL_RTL (src), 0),
6456 POINTER_SIZE_UNITS, POINTER_SIZE, 1);
6457 assemble_integer (XEXP (DECL_RTL (dst), 0),
6458 POINTER_SIZE_UNITS, POINTER_SIZE, 1);
6459 }
6460 }
6461
6462 /* Provide a default for the tm_clone_table section. */
6463
6464 section *
default_clone_table_section(void)6465 default_clone_table_section (void)
6466 {
6467 return get_named_section (NULL, ".tm_clone_table", 3);
6468 }
6469
6470 /* Helper comparison function for qsorting by the DECL_UID stored in
6471 alias_pair->emitted_diags. */
6472
6473 static int
tm_alias_pair_cmp(const void * x,const void * y)6474 tm_alias_pair_cmp (const void *x, const void *y)
6475 {
6476 const tm_alias_pair *p1 = (const tm_alias_pair *) x;
6477 const tm_alias_pair *p2 = (const tm_alias_pair *) y;
6478 if (p1->uid < p2->uid)
6479 return -1;
6480 if (p1->uid > p2->uid)
6481 return 1;
6482 return 0;
6483 }
6484
6485 void
finish_tm_clone_pairs(void)6486 finish_tm_clone_pairs (void)
6487 {
6488 vec<tm_alias_pair> tm_alias_pairs = vNULL;
6489
6490 if (tm_clone_hash == NULL)
6491 return;
6492
6493 /* We need a determenistic order for the .tm_clone_table, otherwise
6494 we will get bootstrap comparison failures, so dump the hash table
6495 to a vector, sort it, and dump the vector. */
6496
6497 /* Dump the hashtable to a vector. */
6498 tree_map *map;
6499 hash_table<tm_clone_hasher>::iterator iter;
6500 FOR_EACH_HASH_TABLE_ELEMENT (*tm_clone_hash, map, tree_map *, iter)
6501 {
6502 tm_alias_pair p = {DECL_UID (map->base.from), map->base.from, map->to};
6503 tm_alias_pairs.safe_push (p);
6504 }
6505 /* Sort it. */
6506 tm_alias_pairs.qsort (tm_alias_pair_cmp);
6507
6508 /* Dump it. */
6509 dump_tm_clone_pairs (tm_alias_pairs);
6510
6511 tm_clone_hash->empty ();
6512 tm_clone_hash = NULL;
6513 tm_alias_pairs.release ();
6514 }
6515
6516
6517 /* Emit an assembler directive to set symbol for DECL visibility to
6518 the visibility type VIS, which must not be VISIBILITY_DEFAULT. */
6519
6520 void
default_assemble_visibility(tree decl ATTRIBUTE_UNUSED,int vis ATTRIBUTE_UNUSED)6521 default_assemble_visibility (tree decl ATTRIBUTE_UNUSED,
6522 int vis ATTRIBUTE_UNUSED)
6523 {
6524 #ifdef HAVE_GAS_HIDDEN
6525 static const char * const visibility_types[] = {
6526 NULL, "protected", "hidden", "internal"
6527 };
6528
6529 const char *name, *type;
6530 tree id;
6531
6532 id = DECL_ASSEMBLER_NAME (decl);
6533 ultimate_transparent_alias_target (&id);
6534 name = IDENTIFIER_POINTER (id);
6535
6536 type = visibility_types[vis];
6537
6538 fprintf (asm_out_file, "\t.%s\t", type);
6539 assemble_name (asm_out_file, name);
6540 fprintf (asm_out_file, "\n");
6541 #else
6542 if (!DECL_ARTIFICIAL (decl))
6543 warning (OPT_Wattributes, "visibility attribute not supported "
6544 "in this configuration; ignored");
6545 #endif
6546 }
6547
6548 /* A helper function to call assemble_visibility when needed for a decl. */
6549
6550 int
maybe_assemble_visibility(tree decl)6551 maybe_assemble_visibility (tree decl)
6552 {
6553 enum symbol_visibility vis = DECL_VISIBILITY (decl);
6554 if (vis != VISIBILITY_DEFAULT)
6555 {
6556 targetm.asm_out.assemble_visibility (decl, vis);
6557 return 1;
6558 }
6559 else
6560 return 0;
6561 }
6562
6563 /* Returns 1 if the target configuration supports defining public symbols
6564 so that one of them will be chosen at link time instead of generating a
6565 multiply-defined symbol error, whether through the use of weak symbols or
6566 a target-specific mechanism for having duplicates discarded. */
6567
6568 int
supports_one_only(void)6569 supports_one_only (void)
6570 {
6571 if (SUPPORTS_ONE_ONLY)
6572 return 1;
6573 return TARGET_SUPPORTS_WEAK;
6574 }
6575
6576 /* Set up DECL as a public symbol that can be defined in multiple
6577 translation units without generating a linker error. */
6578
6579 void
make_decl_one_only(tree decl,tree comdat_group)6580 make_decl_one_only (tree decl, tree comdat_group)
6581 {
6582 struct symtab_node *symbol;
6583 gcc_assert (VAR_OR_FUNCTION_DECL_P (decl));
6584
6585 TREE_PUBLIC (decl) = 1;
6586
6587 if (VAR_P (decl))
6588 symbol = varpool_node::get_create (decl);
6589 else
6590 symbol = cgraph_node::get_create (decl);
6591
6592 if (SUPPORTS_ONE_ONLY)
6593 {
6594 #ifdef MAKE_DECL_ONE_ONLY
6595 MAKE_DECL_ONE_ONLY (decl);
6596 #endif
6597 symbol->set_comdat_group (comdat_group);
6598 }
6599 else if (VAR_P (decl)
6600 && (DECL_INITIAL (decl) == 0
6601 || (!in_lto_p && DECL_INITIAL (decl) == error_mark_node)))
6602 DECL_COMMON (decl) = 1;
6603 else
6604 {
6605 gcc_assert (TARGET_SUPPORTS_WEAK);
6606 DECL_WEAK (decl) = 1;
6607 }
6608 }
6609
6610 void
init_varasm_once(void)6611 init_varasm_once (void)
6612 {
6613 section_htab = hash_table<section_hasher>::create_ggc (31);
6614 object_block_htab = hash_table<object_block_hasher>::create_ggc (31);
6615 const_desc_htab = hash_table<tree_descriptor_hasher>::create_ggc (1009);
6616
6617 shared_constant_pool = create_constant_pool ();
6618
6619 #ifdef TEXT_SECTION_ASM_OP
6620 text_section = get_unnamed_section (SECTION_CODE, output_section_asm_op,
6621 TEXT_SECTION_ASM_OP);
6622 #endif
6623
6624 #ifdef DATA_SECTION_ASM_OP
6625 data_section = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
6626 DATA_SECTION_ASM_OP);
6627 #endif
6628
6629 #ifdef SDATA_SECTION_ASM_OP
6630 sdata_section = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
6631 SDATA_SECTION_ASM_OP);
6632 #endif
6633
6634 #ifdef READONLY_DATA_SECTION_ASM_OP
6635 readonly_data_section = get_unnamed_section (0, output_section_asm_op,
6636 READONLY_DATA_SECTION_ASM_OP);
6637 #endif
6638
6639 #ifdef CTORS_SECTION_ASM_OP
6640 ctors_section = get_unnamed_section (0, output_section_asm_op,
6641 CTORS_SECTION_ASM_OP);
6642 #endif
6643
6644 #ifdef DTORS_SECTION_ASM_OP
6645 dtors_section = get_unnamed_section (0, output_section_asm_op,
6646 DTORS_SECTION_ASM_OP);
6647 #endif
6648
6649 #ifdef BSS_SECTION_ASM_OP
6650 bss_section = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
6651 output_section_asm_op,
6652 BSS_SECTION_ASM_OP);
6653 #endif
6654
6655 #ifdef SBSS_SECTION_ASM_OP
6656 sbss_section = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
6657 output_section_asm_op,
6658 SBSS_SECTION_ASM_OP);
6659 #endif
6660
6661 tls_comm_section = get_noswitch_section (SECTION_WRITE | SECTION_BSS
6662 | SECTION_COMMON, emit_tls_common);
6663 lcomm_section = get_noswitch_section (SECTION_WRITE | SECTION_BSS
6664 | SECTION_COMMON, emit_local);
6665 comm_section = get_noswitch_section (SECTION_WRITE | SECTION_BSS
6666 | SECTION_COMMON, emit_common);
6667
6668 #if defined ASM_OUTPUT_ALIGNED_BSS
6669 bss_noswitch_section = get_noswitch_section (SECTION_WRITE | SECTION_BSS,
6670 emit_bss);
6671 #endif
6672
6673 targetm.asm_out.init_sections ();
6674
6675 if (readonly_data_section == NULL)
6676 readonly_data_section = text_section;
6677
6678 #ifdef ASM_OUTPUT_EXTERNAL
6679 pending_assemble_externals_set = new hash_set<tree>;
6680 #endif
6681 }
6682
6683 enum tls_model
decl_default_tls_model(const_tree decl)6684 decl_default_tls_model (const_tree decl)
6685 {
6686 enum tls_model kind;
6687 bool is_local;
6688
6689 is_local = targetm.binds_local_p (decl);
6690 if (!flag_shlib)
6691 {
6692 if (is_local)
6693 kind = TLS_MODEL_LOCAL_EXEC;
6694 else
6695 kind = TLS_MODEL_INITIAL_EXEC;
6696 }
6697
6698 /* Local dynamic is inefficient when we're not combining the
6699 parts of the address. */
6700 else if (optimize && is_local)
6701 kind = TLS_MODEL_LOCAL_DYNAMIC;
6702 else
6703 kind = TLS_MODEL_GLOBAL_DYNAMIC;
6704 if (kind < flag_tls_default)
6705 kind = flag_tls_default;
6706
6707 return kind;
6708 }
6709
6710 /* Select a set of attributes for section NAME based on the properties
6711 of DECL and whether or not RELOC indicates that DECL's initializer
6712 might contain runtime relocations.
6713
6714 We make the section read-only and executable for a function decl,
6715 read-only for a const data decl, and writable for a non-const data decl. */
6716
6717 unsigned int
default_section_type_flags(tree decl,const char * name,int reloc)6718 default_section_type_flags (tree decl, const char *name, int reloc)
6719 {
6720 unsigned int flags;
6721
6722 if (decl && TREE_CODE (decl) == FUNCTION_DECL)
6723 flags = SECTION_CODE;
6724 else if (decl)
6725 {
6726 enum section_category category
6727 = categorize_decl_for_section (decl, reloc);
6728 if (decl_readonly_section_1 (category))
6729 flags = 0;
6730 else if (category == SECCAT_DATA_REL_RO
6731 || category == SECCAT_DATA_REL_RO_LOCAL)
6732 flags = SECTION_WRITE | SECTION_RELRO;
6733 else
6734 flags = SECTION_WRITE;
6735 }
6736 else
6737 {
6738 flags = SECTION_WRITE;
6739 if (strcmp (name, ".data.rel.ro") == 0
6740 || strcmp (name, ".data.rel.ro.local") == 0)
6741 flags |= SECTION_RELRO;
6742 }
6743
6744 if (decl && DECL_P (decl) && DECL_COMDAT_GROUP (decl))
6745 flags |= SECTION_LINKONCE;
6746
6747 if (strcmp (name, ".vtable_map_vars") == 0)
6748 flags |= SECTION_LINKONCE;
6749
6750 if (decl && VAR_P (decl) && DECL_THREAD_LOCAL_P (decl))
6751 flags |= SECTION_TLS | SECTION_WRITE;
6752
6753 if (strcmp (name, ".bss") == 0
6754 || startswith (name, ".bss.")
6755 || startswith (name, ".gnu.linkonce.b.")
6756 || strcmp (name, ".persistent.bss") == 0
6757 || strcmp (name, ".sbss") == 0
6758 || startswith (name, ".sbss.")
6759 || startswith (name, ".gnu.linkonce.sb."))
6760 flags |= SECTION_BSS;
6761
6762 if (strcmp (name, ".tdata") == 0
6763 || startswith (name, ".tdata.")
6764 || startswith (name, ".gnu.linkonce.td."))
6765 flags |= SECTION_TLS;
6766
6767 if (strcmp (name, ".tbss") == 0
6768 || startswith (name, ".tbss.")
6769 || startswith (name, ".gnu.linkonce.tb."))
6770 flags |= SECTION_TLS | SECTION_BSS;
6771
6772 if (strcmp (name, ".noinit") == 0)
6773 flags |= SECTION_WRITE | SECTION_BSS | SECTION_NOTYPE;
6774
6775 if (strcmp (name, ".persistent") == 0)
6776 flags |= SECTION_WRITE | SECTION_NOTYPE;
6777
6778 /* Various sections have special ELF types that the assembler will
6779 assign by default based on the name. They are neither SHT_PROGBITS
6780 nor SHT_NOBITS, so when changing sections we don't want to print a
6781 section type (@progbits or @nobits). Rather than duplicating the
6782 assembler's knowledge of what those special name patterns are, just
6783 let the assembler choose the type if we don't know a specific
6784 reason to set it to something other than the default. SHT_PROGBITS
6785 is the default for sections whose name is not specially known to
6786 the assembler, so it does no harm to leave the choice to the
6787 assembler when @progbits is the best thing we know to use. If
6788 someone is silly enough to emit code or TLS variables to one of
6789 these sections, then don't handle them specially.
6790
6791 default_elf_asm_named_section (below) handles the BSS, TLS, ENTSIZE, and
6792 LINKONCE cases when NOTYPE is not set, so leave those to its logic. */
6793 if (!(flags & (SECTION_CODE | SECTION_BSS | SECTION_TLS | SECTION_ENTSIZE))
6794 && !(HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE)))
6795 flags |= SECTION_NOTYPE;
6796
6797 return flags;
6798 }
6799
6800 /* Return true if the target supports some form of global BSS,
6801 either through bss_noswitch_section, or by selecting a BSS
6802 section in TARGET_ASM_SELECT_SECTION. */
6803
6804 bool
have_global_bss_p(void)6805 have_global_bss_p (void)
6806 {
6807 return bss_noswitch_section || targetm.have_switchable_bss_sections;
6808 }
6809
6810 /* Output assembly to switch to section NAME with attribute FLAGS.
6811 Four variants for common object file formats. */
6812
6813 void
default_no_named_section(const char * name ATTRIBUTE_UNUSED,unsigned int flags ATTRIBUTE_UNUSED,tree decl ATTRIBUTE_UNUSED)6814 default_no_named_section (const char *name ATTRIBUTE_UNUSED,
6815 unsigned int flags ATTRIBUTE_UNUSED,
6816 tree decl ATTRIBUTE_UNUSED)
6817 {
6818 /* Some object formats don't support named sections at all. The
6819 front-end should already have flagged this as an error. */
6820 gcc_unreachable ();
6821 }
6822
6823 #ifndef TLS_SECTION_ASM_FLAG
6824 #define TLS_SECTION_ASM_FLAG 'T'
6825 #endif
6826
6827 void
default_elf_asm_named_section(const char * name,unsigned int flags,tree decl)6828 default_elf_asm_named_section (const char *name, unsigned int flags,
6829 tree decl)
6830 {
6831 char flagchars[11], *f = flagchars;
6832 unsigned int numeric_value = 0;
6833
6834 /* If we have already declared this section, we can use an
6835 abbreviated form to switch back to it -- unless this section is
6836 part of a COMDAT groups or with SHF_GNU_RETAIN or with SHF_LINK_ORDER,
6837 in which case GAS requires the full declaration every time. */
6838 if (!(HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE))
6839 && !(flags & (SECTION_RETAIN | SECTION_LINK_ORDER))
6840 && (flags & SECTION_DECLARED))
6841 {
6842 fprintf (asm_out_file, "\t.section\t%s\n", name);
6843 return;
6844 }
6845
6846 /* If we have a machine specific flag, then use the numeric value to pass
6847 this on to GAS. */
6848 if (targetm.asm_out.elf_flags_numeric (flags, &numeric_value))
6849 snprintf (f, sizeof (flagchars), "0x%08x", numeric_value);
6850 else
6851 {
6852 if (!(flags & SECTION_DEBUG))
6853 *f++ = 'a';
6854 #if HAVE_GAS_SECTION_EXCLUDE
6855 if (flags & SECTION_EXCLUDE)
6856 *f++ = 'e';
6857 #endif
6858 if (flags & SECTION_WRITE)
6859 *f++ = 'w';
6860 if (flags & SECTION_CODE)
6861 *f++ = 'x';
6862 if (flags & SECTION_SMALL)
6863 *f++ = 's';
6864 if (flags & SECTION_MERGE)
6865 *f++ = 'M';
6866 if (flags & SECTION_STRINGS)
6867 *f++ = 'S';
6868 if (flags & SECTION_TLS)
6869 *f++ = TLS_SECTION_ASM_FLAG;
6870 if (HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE))
6871 *f++ = 'G';
6872 if (flags & SECTION_RETAIN)
6873 *f++ = 'R';
6874 if (flags & SECTION_LINK_ORDER)
6875 *f++ = 'o';
6876 #ifdef MACH_DEP_SECTION_ASM_FLAG
6877 if (flags & SECTION_MACH_DEP)
6878 *f++ = MACH_DEP_SECTION_ASM_FLAG;
6879 #endif
6880 *f = '\0';
6881 }
6882
6883 fprintf (asm_out_file, "\t.section\t%s,\"%s\"", name, flagchars);
6884
6885 /* default_section_type_flags (above) knows which flags need special
6886 handling here, and sets NOTYPE when none of these apply so that the
6887 assembler's logic for default types can apply to user-chosen
6888 section names. */
6889 if (!(flags & SECTION_NOTYPE))
6890 {
6891 const char *type;
6892 const char *format;
6893
6894 if (flags & SECTION_BSS)
6895 type = "nobits";
6896 else
6897 type = "progbits";
6898
6899 format = ",@%s";
6900 /* On platforms that use "@" as the assembly comment character,
6901 use "%" instead. */
6902 if (strcmp (ASM_COMMENT_START, "@") == 0)
6903 format = ",%%%s";
6904 fprintf (asm_out_file, format, type);
6905
6906 if (flags & SECTION_ENTSIZE)
6907 fprintf (asm_out_file, ",%d", flags & SECTION_ENTSIZE);
6908 if (flags & SECTION_LINK_ORDER)
6909 {
6910 tree id = DECL_ASSEMBLER_NAME (decl);
6911 ultimate_transparent_alias_target (&id);
6912 const char *name = IDENTIFIER_POINTER (id);
6913 name = targetm.strip_name_encoding (name);
6914 fprintf (asm_out_file, ",%s", name);
6915 }
6916 if (HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE))
6917 {
6918 if (TREE_CODE (decl) == IDENTIFIER_NODE)
6919 fprintf (asm_out_file, ",%s,comdat", IDENTIFIER_POINTER (decl));
6920 else
6921 fprintf (asm_out_file, ",%s,comdat",
6922 IDENTIFIER_POINTER (DECL_COMDAT_GROUP (decl)));
6923 }
6924 }
6925
6926 putc ('\n', asm_out_file);
6927 }
6928
6929 void
default_coff_asm_named_section(const char * name,unsigned int flags,tree decl ATTRIBUTE_UNUSED)6930 default_coff_asm_named_section (const char *name, unsigned int flags,
6931 tree decl ATTRIBUTE_UNUSED)
6932 {
6933 char flagchars[8], *f = flagchars;
6934
6935 if (flags & SECTION_WRITE)
6936 *f++ = 'w';
6937 if (flags & SECTION_CODE)
6938 *f++ = 'x';
6939 *f = '\0';
6940
6941 fprintf (asm_out_file, "\t.section\t%s,\"%s\"\n", name, flagchars);
6942 }
6943
6944 void
default_pe_asm_named_section(const char * name,unsigned int flags,tree decl)6945 default_pe_asm_named_section (const char *name, unsigned int flags,
6946 tree decl)
6947 {
6948 default_coff_asm_named_section (name, flags, decl);
6949
6950 if (flags & SECTION_LINKONCE)
6951 {
6952 /* Functions may have been compiled at various levels of
6953 optimization so we can't use `same_size' here.
6954 Instead, have the linker pick one. */
6955 fprintf (asm_out_file, "\t.linkonce %s\n",
6956 (flags & SECTION_CODE ? "discard" : "same_size"));
6957 }
6958 }
6959
6960 /* The lame default section selector. */
6961
6962 section *
default_select_section(tree decl,int reloc,unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)6963 default_select_section (tree decl, int reloc,
6964 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
6965 {
6966 if (DECL_P (decl))
6967 {
6968 if (decl_readonly_section (decl, reloc))
6969 return readonly_data_section;
6970 }
6971 else if (TREE_CODE (decl) == CONSTRUCTOR)
6972 {
6973 if (! ((flag_pic && reloc)
6974 || !TREE_READONLY (decl)
6975 || TREE_SIDE_EFFECTS (decl)
6976 || !TREE_CONSTANT (decl)))
6977 return readonly_data_section;
6978 }
6979 else if (TREE_CODE (decl) == STRING_CST)
6980 return readonly_data_section;
6981 else if (! (flag_pic && reloc))
6982 return readonly_data_section;
6983
6984 return data_section;
6985 }
6986
6987 enum section_category
categorize_decl_for_section(const_tree decl,int reloc)6988 categorize_decl_for_section (const_tree decl, int reloc)
6989 {
6990 enum section_category ret;
6991
6992 if (TREE_CODE (decl) == FUNCTION_DECL)
6993 return SECCAT_TEXT;
6994 else if (TREE_CODE (decl) == STRING_CST)
6995 {
6996 if ((flag_sanitize & SANITIZE_ADDRESS)
6997 && asan_protect_global (CONST_CAST_TREE (decl)))
6998 /* or !flag_merge_constants */
6999 return SECCAT_RODATA;
7000 else
7001 return SECCAT_RODATA_MERGE_STR;
7002 }
7003 else if (VAR_P (decl))
7004 {
7005 tree d = CONST_CAST_TREE (decl);
7006 if (bss_initializer_p (decl))
7007 ret = SECCAT_BSS;
7008 else if (! TREE_READONLY (decl)
7009 || TREE_SIDE_EFFECTS (decl)
7010 || (DECL_INITIAL (decl)
7011 && ! TREE_CONSTANT (DECL_INITIAL (decl))))
7012 {
7013 /* Here the reloc_rw_mask is not testing whether the section should
7014 be read-only or not, but whether the dynamic link will have to
7015 do something. If so, we wish to segregate the data in order to
7016 minimize cache misses inside the dynamic linker. */
7017 if (reloc & targetm.asm_out.reloc_rw_mask ())
7018 ret = reloc == 1 ? SECCAT_DATA_REL_LOCAL : SECCAT_DATA_REL;
7019 else
7020 ret = SECCAT_DATA;
7021 }
7022 else if (reloc & targetm.asm_out.reloc_rw_mask ())
7023 ret = reloc == 1 ? SECCAT_DATA_REL_RO_LOCAL : SECCAT_DATA_REL_RO;
7024 else if (reloc || flag_merge_constants < 2
7025 || ((flag_sanitize & SANITIZE_ADDRESS)
7026 /* PR 81697: for architectures that use section anchors we
7027 need to ignore DECL_RTL_SET_P (decl) for string constants
7028 inside this asan_protect_global call because otherwise
7029 we'll wrongly put them into SECCAT_RODATA_MERGE_CONST
7030 section, set DECL_RTL (decl) later on and add DECL to
7031 protected globals via successive asan_protect_global
7032 calls. In this scenario we'll end up with wrong
7033 alignment of these strings at runtime and possible ASan
7034 false positives. */
7035 && asan_protect_global (d, use_object_blocks_p ()
7036 && use_blocks_for_decl_p (d))))
7037 /* C and C++ don't allow different variables to share the same
7038 location. -fmerge-all-constants allows even that (at the
7039 expense of not conforming). */
7040 ret = SECCAT_RODATA;
7041 else if (DECL_INITIAL (decl)
7042 && TREE_CODE (DECL_INITIAL (decl)) == STRING_CST)
7043 ret = SECCAT_RODATA_MERGE_STR_INIT;
7044 else
7045 ret = SECCAT_RODATA_MERGE_CONST;
7046 }
7047 else if (TREE_CODE (decl) == CONSTRUCTOR)
7048 {
7049 if ((reloc & targetm.asm_out.reloc_rw_mask ())
7050 || TREE_SIDE_EFFECTS (decl)
7051 || ! TREE_CONSTANT (decl))
7052 ret = SECCAT_DATA;
7053 else
7054 ret = SECCAT_RODATA;
7055 }
7056 else
7057 ret = SECCAT_RODATA;
7058
7059 /* There are no read-only thread-local sections. */
7060 if (VAR_P (decl) && DECL_THREAD_LOCAL_P (decl))
7061 {
7062 /* Note that this would be *just* SECCAT_BSS, except that there's
7063 no concept of a read-only thread-local-data section. */
7064 if (ret == SECCAT_BSS
7065 || DECL_INITIAL (decl) == NULL
7066 || (flag_zero_initialized_in_bss
7067 && initializer_zerop (DECL_INITIAL (decl))))
7068 ret = SECCAT_TBSS;
7069 else
7070 ret = SECCAT_TDATA;
7071 }
7072
7073 /* If the target uses small data sections, select it. */
7074 else if (targetm.in_small_data_p (decl))
7075 {
7076 if (ret == SECCAT_BSS)
7077 ret = SECCAT_SBSS;
7078 else if (targetm.have_srodata_section && ret == SECCAT_RODATA)
7079 ret = SECCAT_SRODATA;
7080 else
7081 ret = SECCAT_SDATA;
7082 }
7083
7084 return ret;
7085 }
7086
7087 static bool
decl_readonly_section_1(enum section_category category)7088 decl_readonly_section_1 (enum section_category category)
7089 {
7090 switch (category)
7091 {
7092 case SECCAT_RODATA:
7093 case SECCAT_RODATA_MERGE_STR:
7094 case SECCAT_RODATA_MERGE_STR_INIT:
7095 case SECCAT_RODATA_MERGE_CONST:
7096 case SECCAT_SRODATA:
7097 return true;
7098 default:
7099 return false;
7100 }
7101 }
7102
7103 bool
decl_readonly_section(const_tree decl,int reloc)7104 decl_readonly_section (const_tree decl, int reloc)
7105 {
7106 return decl_readonly_section_1 (categorize_decl_for_section (decl, reloc));
7107 }
7108
7109 /* Select a section based on the above categorization. */
7110
7111 section *
default_elf_select_section(tree decl,int reloc,unsigned HOST_WIDE_INT align)7112 default_elf_select_section (tree decl, int reloc,
7113 unsigned HOST_WIDE_INT align)
7114 {
7115 const char *sname;
7116
7117 switch (categorize_decl_for_section (decl, reloc))
7118 {
7119 case SECCAT_TEXT:
7120 /* We're not supposed to be called on FUNCTION_DECLs. */
7121 gcc_unreachable ();
7122 case SECCAT_RODATA:
7123 return readonly_data_section;
7124 case SECCAT_RODATA_MERGE_STR:
7125 return mergeable_string_section (decl, align, 0);
7126 case SECCAT_RODATA_MERGE_STR_INIT:
7127 return mergeable_string_section (DECL_INITIAL (decl), align, 0);
7128 case SECCAT_RODATA_MERGE_CONST:
7129 return mergeable_constant_section (DECL_MODE (decl), align, 0);
7130 case SECCAT_SRODATA:
7131 sname = ".sdata2";
7132 break;
7133 case SECCAT_DATA:
7134 if (DECL_P (decl) && DECL_PERSISTENT_P (decl))
7135 {
7136 sname = ".persistent";
7137 break;
7138 }
7139 return data_section;
7140 case SECCAT_DATA_REL:
7141 sname = ".data.rel";
7142 break;
7143 case SECCAT_DATA_REL_LOCAL:
7144 sname = ".data.rel.local";
7145 break;
7146 case SECCAT_DATA_REL_RO:
7147 sname = ".data.rel.ro";
7148 break;
7149 case SECCAT_DATA_REL_RO_LOCAL:
7150 sname = ".data.rel.ro.local";
7151 break;
7152 case SECCAT_SDATA:
7153 sname = ".sdata";
7154 break;
7155 case SECCAT_TDATA:
7156 sname = ".tdata";
7157 break;
7158 case SECCAT_BSS:
7159 if (DECL_P (decl) && DECL_NOINIT_P (decl))
7160 {
7161 sname = ".noinit";
7162 break;
7163 }
7164 if (bss_section)
7165 return bss_section;
7166 sname = ".bss";
7167 break;
7168 case SECCAT_SBSS:
7169 sname = ".sbss";
7170 break;
7171 case SECCAT_TBSS:
7172 sname = ".tbss";
7173 break;
7174 default:
7175 gcc_unreachable ();
7176 }
7177
7178 return get_named_section (decl, sname, reloc);
7179 }
7180
7181 /* Construct a unique section name based on the decl name and the
7182 categorization performed above. */
7183
7184 void
default_unique_section(tree decl,int reloc)7185 default_unique_section (tree decl, int reloc)
7186 {
7187 /* We only need to use .gnu.linkonce if we don't have COMDAT groups. */
7188 bool one_only = DECL_ONE_ONLY (decl) && !HAVE_COMDAT_GROUP;
7189 const char *prefix, *name, *linkonce;
7190 char *string;
7191 tree id;
7192
7193 switch (categorize_decl_for_section (decl, reloc))
7194 {
7195 case SECCAT_TEXT:
7196 prefix = one_only ? ".t" : ".text";
7197 break;
7198 case SECCAT_RODATA:
7199 case SECCAT_RODATA_MERGE_STR:
7200 case SECCAT_RODATA_MERGE_STR_INIT:
7201 case SECCAT_RODATA_MERGE_CONST:
7202 prefix = one_only ? ".r" : ".rodata";
7203 break;
7204 case SECCAT_SRODATA:
7205 prefix = one_only ? ".s2" : ".sdata2";
7206 break;
7207 case SECCAT_DATA:
7208 prefix = one_only ? ".d" : ".data";
7209 if (DECL_P (decl) && DECL_PERSISTENT_P (decl))
7210 {
7211 prefix = one_only ? ".p" : ".persistent";
7212 break;
7213 }
7214 break;
7215 case SECCAT_DATA_REL:
7216 prefix = one_only ? ".d.rel" : ".data.rel";
7217 break;
7218 case SECCAT_DATA_REL_LOCAL:
7219 prefix = one_only ? ".d.rel.local" : ".data.rel.local";
7220 break;
7221 case SECCAT_DATA_REL_RO:
7222 prefix = one_only ? ".d.rel.ro" : ".data.rel.ro";
7223 break;
7224 case SECCAT_DATA_REL_RO_LOCAL:
7225 prefix = one_only ? ".d.rel.ro.local" : ".data.rel.ro.local";
7226 break;
7227 case SECCAT_SDATA:
7228 prefix = one_only ? ".s" : ".sdata";
7229 break;
7230 case SECCAT_BSS:
7231 if (DECL_P (decl) && DECL_NOINIT_P (decl))
7232 {
7233 prefix = one_only ? ".n" : ".noinit";
7234 break;
7235 }
7236 prefix = one_only ? ".b" : ".bss";
7237 break;
7238 case SECCAT_SBSS:
7239 prefix = one_only ? ".sb" : ".sbss";
7240 break;
7241 case SECCAT_TDATA:
7242 prefix = one_only ? ".td" : ".tdata";
7243 break;
7244 case SECCAT_TBSS:
7245 prefix = one_only ? ".tb" : ".tbss";
7246 break;
7247 default:
7248 gcc_unreachable ();
7249 }
7250
7251 id = DECL_ASSEMBLER_NAME (decl);
7252 ultimate_transparent_alias_target (&id);
7253 name = IDENTIFIER_POINTER (id);
7254 name = targetm.strip_name_encoding (name);
7255
7256 /* If we're using one_only, then there needs to be a .gnu.linkonce
7257 prefix to the section name. */
7258 linkonce = one_only ? ".gnu.linkonce" : "";
7259
7260 string = ACONCAT ((linkonce, prefix, ".", name, NULL));
7261
7262 set_decl_section_name (decl, string);
7263 }
7264
7265 /* Subroutine of compute_reloc_for_rtx for leaf rtxes. */
7266
7267 static int
compute_reloc_for_rtx_1(const_rtx x)7268 compute_reloc_for_rtx_1 (const_rtx x)
7269 {
7270 switch (GET_CODE (x))
7271 {
7272 case SYMBOL_REF:
7273 return SYMBOL_REF_LOCAL_P (x) ? 1 : 2;
7274 case LABEL_REF:
7275 return 1;
7276 default:
7277 return 0;
7278 }
7279 }
7280
7281 /* Like compute_reloc_for_constant, except for an RTX. The return value
7282 is a mask for which bit 1 indicates a global relocation, and bit 0
7283 indicates a local relocation. Used by default_select_rtx_section
7284 and default_elf_select_rtx_section. */
7285
7286 static int
compute_reloc_for_rtx(const_rtx x)7287 compute_reloc_for_rtx (const_rtx x)
7288 {
7289 switch (GET_CODE (x))
7290 {
7291 case SYMBOL_REF:
7292 case LABEL_REF:
7293 return compute_reloc_for_rtx_1 (x);
7294
7295 case CONST:
7296 {
7297 int reloc = 0;
7298 subrtx_iterator::array_type array;
7299 FOR_EACH_SUBRTX (iter, array, x, ALL)
7300 reloc |= compute_reloc_for_rtx_1 (*iter);
7301 return reloc;
7302 }
7303
7304 default:
7305 return 0;
7306 }
7307 }
7308
7309 section *
default_select_rtx_section(machine_mode mode ATTRIBUTE_UNUSED,rtx x,unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)7310 default_select_rtx_section (machine_mode mode ATTRIBUTE_UNUSED,
7311 rtx x,
7312 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
7313 {
7314 if (compute_reloc_for_rtx (x) & targetm.asm_out.reloc_rw_mask ())
7315 return data_section;
7316 else
7317 return readonly_data_section;
7318 }
7319
7320 section *
default_elf_select_rtx_section(machine_mode mode,rtx x,unsigned HOST_WIDE_INT align)7321 default_elf_select_rtx_section (machine_mode mode, rtx x,
7322 unsigned HOST_WIDE_INT align)
7323 {
7324 int reloc = compute_reloc_for_rtx (x);
7325
7326 /* ??? Handle small data here somehow. */
7327
7328 if (reloc & targetm.asm_out.reloc_rw_mask ())
7329 {
7330 if (reloc == 1)
7331 return get_named_section (NULL, ".data.rel.ro.local", 1);
7332 else
7333 return get_named_section (NULL, ".data.rel.ro", 3);
7334 }
7335
7336 return mergeable_constant_section (mode, align, 0);
7337 }
7338
7339 /* Set the generally applicable flags on the SYMBOL_REF for EXP. */
7340
7341 void
default_encode_section_info(tree decl,rtx rtl,int first ATTRIBUTE_UNUSED)7342 default_encode_section_info (tree decl, rtx rtl, int first ATTRIBUTE_UNUSED)
7343 {
7344 rtx symbol;
7345 int flags;
7346
7347 /* Careful not to prod global register variables. */
7348 if (!MEM_P (rtl))
7349 return;
7350 symbol = XEXP (rtl, 0);
7351 if (GET_CODE (symbol) != SYMBOL_REF)
7352 return;
7353
7354 flags = SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_HAS_BLOCK_INFO;
7355 if (TREE_CODE (decl) == FUNCTION_DECL)
7356 flags |= SYMBOL_FLAG_FUNCTION;
7357 if (targetm.binds_local_p (decl))
7358 flags |= SYMBOL_FLAG_LOCAL;
7359 if (VAR_P (decl) && DECL_THREAD_LOCAL_P (decl))
7360 flags |= DECL_TLS_MODEL (decl) << SYMBOL_FLAG_TLS_SHIFT;
7361 else if (targetm.in_small_data_p (decl))
7362 flags |= SYMBOL_FLAG_SMALL;
7363 /* ??? Why is DECL_EXTERNAL ever set for non-PUBLIC names? Without
7364 being PUBLIC, the thing *must* be defined in this translation unit.
7365 Prevent this buglet from being propagated into rtl code as well. */
7366 if (DECL_P (decl) && DECL_EXTERNAL (decl) && TREE_PUBLIC (decl))
7367 flags |= SYMBOL_FLAG_EXTERNAL;
7368
7369 SYMBOL_REF_FLAGS (symbol) = flags;
7370 }
7371
7372 /* By default, we do nothing for encode_section_info, so we need not
7373 do anything but discard the '*' marker. */
7374
7375 const char *
default_strip_name_encoding(const char * str)7376 default_strip_name_encoding (const char *str)
7377 {
7378 return str + (*str == '*');
7379 }
7380
7381 #ifdef ASM_OUTPUT_DEF
7382 /* The default implementation of TARGET_ASM_OUTPUT_ANCHOR. Define the
7383 anchor relative to ".", the current section position. */
7384
7385 void
default_asm_output_anchor(rtx symbol)7386 default_asm_output_anchor (rtx symbol)
7387 {
7388 char buffer[100];
7389
7390 sprintf (buffer, "*. + " HOST_WIDE_INT_PRINT_DEC,
7391 SYMBOL_REF_BLOCK_OFFSET (symbol));
7392 ASM_OUTPUT_DEF (asm_out_file, XSTR (symbol, 0), buffer);
7393 }
7394 #endif
7395
7396 /* The default implementation of TARGET_USE_ANCHORS_FOR_SYMBOL_P. */
7397
7398 bool
default_use_anchors_for_symbol_p(const_rtx symbol)7399 default_use_anchors_for_symbol_p (const_rtx symbol)
7400 {
7401 tree decl;
7402 section *sect = SYMBOL_REF_BLOCK (symbol)->sect;
7403
7404 /* This function should only be called with non-zero SYMBOL_REF_BLOCK,
7405 furthermore get_block_for_section should not create object blocks
7406 for mergeable sections. */
7407 gcc_checking_assert (sect && !(sect->common.flags & SECTION_MERGE));
7408
7409 /* Don't use anchors for small data sections. The small data register
7410 acts as an anchor for such sections. */
7411 if (sect->common.flags & SECTION_SMALL)
7412 return false;
7413
7414 decl = SYMBOL_REF_DECL (symbol);
7415 if (decl && DECL_P (decl))
7416 {
7417 /* Don't use section anchors for decls that might be defined or
7418 usurped by other modules. */
7419 if (TREE_PUBLIC (decl) && !decl_binds_to_current_def_p (decl))
7420 return false;
7421
7422 /* Don't use section anchors for decls that will be placed in a
7423 small data section. */
7424 /* ??? Ideally, this check would be redundant with the SECTION_SMALL
7425 one above. The problem is that we only use SECTION_SMALL for
7426 sections that should be marked as small in the section directive. */
7427 if (targetm.in_small_data_p (decl))
7428 return false;
7429
7430 /* Don't use section anchors for decls that won't fit inside a single
7431 anchor range to reduce the amount of instructions required to refer
7432 to the entire declaration. */
7433 if (DECL_SIZE_UNIT (decl) == NULL_TREE
7434 || !tree_fits_uhwi_p (DECL_SIZE_UNIT (decl))
7435 || (tree_to_uhwi (DECL_SIZE_UNIT (decl))
7436 >= (unsigned HOST_WIDE_INT) targetm.max_anchor_offset))
7437 return false;
7438
7439 }
7440 return true;
7441 }
7442
7443 /* Return true when RESOLUTION indicate that symbol will be bound to the
7444 definition provided by current .o file. */
7445
7446 static bool
resolution_to_local_definition_p(enum ld_plugin_symbol_resolution resolution)7447 resolution_to_local_definition_p (enum ld_plugin_symbol_resolution resolution)
7448 {
7449 return (resolution == LDPR_PREVAILING_DEF
7450 || resolution == LDPR_PREVAILING_DEF_IRONLY_EXP
7451 || resolution == LDPR_PREVAILING_DEF_IRONLY);
7452 }
7453
7454 /* Return true when RESOLUTION indicate that symbol will be bound locally
7455 within current executable or DSO. */
7456
7457 static bool
resolution_local_p(enum ld_plugin_symbol_resolution resolution)7458 resolution_local_p (enum ld_plugin_symbol_resolution resolution)
7459 {
7460 return (resolution == LDPR_PREVAILING_DEF
7461 || resolution == LDPR_PREVAILING_DEF_IRONLY
7462 || resolution == LDPR_PREVAILING_DEF_IRONLY_EXP
7463 || resolution == LDPR_PREEMPTED_REG
7464 || resolution == LDPR_PREEMPTED_IR
7465 || resolution == LDPR_RESOLVED_IR
7466 || resolution == LDPR_RESOLVED_EXEC);
7467 }
7468
7469 /* COMMON_LOCAL_P is true means that the linker can guarantee that an
7470 uninitialized common symbol in the executable will still be defined
7471 (through COPY relocation) in the executable. */
7472
7473 bool
default_binds_local_p_3(const_tree exp,bool shlib,bool weak_dominate,bool extern_protected_data,bool common_local_p)7474 default_binds_local_p_3 (const_tree exp, bool shlib, bool weak_dominate,
7475 bool extern_protected_data, bool common_local_p)
7476 {
7477 /* A non-decl is an entry in the constant pool. */
7478 if (!DECL_P (exp))
7479 return true;
7480
7481 /* Weakrefs may not bind locally, even though the weakref itself is always
7482 static and therefore local. Similarly, the resolver for ifunc functions
7483 might resolve to a non-local function.
7484 FIXME: We can resolve the weakref case more curefuly by looking at the
7485 weakref alias. */
7486 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (exp))
7487 || (!targetm.ifunc_ref_local_ok ()
7488 && TREE_CODE (exp) == FUNCTION_DECL
7489 && cgraph_node::get (exp)
7490 && cgraph_node::get (exp)->ifunc_resolver))
7491 return false;
7492
7493 /* Static variables are always local. */
7494 if (! TREE_PUBLIC (exp))
7495 return true;
7496
7497 /* With resolution file in hand, take look into resolutions.
7498 We can't just return true for resolved_locally symbols,
7499 because dynamic linking might overwrite symbols
7500 in shared libraries. */
7501 bool resolved_locally = false;
7502
7503 bool uninited_common = (DECL_COMMON (exp)
7504 && (DECL_INITIAL (exp) == NULL
7505 || (!in_lto_p
7506 && DECL_INITIAL (exp) == error_mark_node)));
7507
7508 /* A non-external variable is defined locally only if it isn't
7509 uninitialized COMMON variable or common_local_p is true. */
7510 bool defined_locally = (!DECL_EXTERNAL (exp)
7511 && (!uninited_common || common_local_p));
7512 if (symtab_node *node = symtab_node::get (exp))
7513 {
7514 if (node->in_other_partition)
7515 defined_locally = true;
7516 if (node->can_be_discarded_p ())
7517 ;
7518 else if (resolution_to_local_definition_p (node->resolution))
7519 defined_locally = resolved_locally = true;
7520 else if (resolution_local_p (node->resolution))
7521 resolved_locally = true;
7522 }
7523 if (defined_locally && weak_dominate && !shlib)
7524 resolved_locally = true;
7525
7526 /* Undefined weak symbols are never defined locally. */
7527 if (DECL_WEAK (exp) && !defined_locally)
7528 return false;
7529
7530 /* A symbol is local if the user has said explicitly that it will be,
7531 or if we have a definition for the symbol. We cannot infer visibility
7532 for undefined symbols. */
7533 if (DECL_VISIBILITY (exp) != VISIBILITY_DEFAULT
7534 && (TREE_CODE (exp) == FUNCTION_DECL
7535 || !extern_protected_data
7536 || DECL_VISIBILITY (exp) != VISIBILITY_PROTECTED)
7537 && (DECL_VISIBILITY_SPECIFIED (exp) || defined_locally))
7538 return true;
7539
7540 /* If PIC, then assume that any global name can be overridden by
7541 symbols resolved from other modules. */
7542 if (shlib)
7543 return false;
7544
7545 /* Variables defined outside this object might not be local. */
7546 if (DECL_EXTERNAL (exp) && !resolved_locally)
7547 return false;
7548
7549 /* Non-dominant weak symbols are not defined locally. */
7550 if (DECL_WEAK (exp) && !resolved_locally)
7551 return false;
7552
7553 /* Uninitialized COMMON variable may be unified with symbols
7554 resolved from other modules. */
7555 if (uninited_common && !resolved_locally)
7556 return false;
7557
7558 /* Otherwise we're left with initialized (or non-common) global data
7559 which is of necessity defined locally. */
7560 return true;
7561 }
7562
7563 /* Assume ELF-ish defaults, since that's pretty much the most liberal
7564 wrt cross-module name binding. */
7565
7566 bool
default_binds_local_p(const_tree exp)7567 default_binds_local_p (const_tree exp)
7568 {
7569 return default_binds_local_p_3 (exp, flag_shlib != 0, true, false, false);
7570 }
7571
7572 /* Similar to default_binds_local_p, but common symbol may be local and
7573 extern protected data is non-local. */
7574
7575 bool
default_binds_local_p_2(const_tree exp)7576 default_binds_local_p_2 (const_tree exp)
7577 {
7578 return default_binds_local_p_3 (exp, flag_shlib != 0, true, true,
7579 !flag_pic);
7580 }
7581
7582 bool
default_binds_local_p_1(const_tree exp,int shlib)7583 default_binds_local_p_1 (const_tree exp, int shlib)
7584 {
7585 return default_binds_local_p_3 (exp, shlib != 0, false, false, false);
7586 }
7587
7588 /* Return true when references to DECL must bind to current definition in
7589 final executable.
7590
7591 The condition is usually equivalent to whether the function binds to the
7592 current module (shared library or executable), that is to binds_local_p.
7593 We use this fact to avoid need for another target hook and implement
7594 the logic using binds_local_p and just special cases where
7595 decl_binds_to_current_def_p is stronger than binds_local_p. In particular
7596 the weak definitions (that can be overwritten at linktime by other
7597 definition from different object file) and when resolution info is available
7598 we simply use the knowledge passed to us by linker plugin. */
7599 bool
decl_binds_to_current_def_p(const_tree decl)7600 decl_binds_to_current_def_p (const_tree decl)
7601 {
7602 gcc_assert (DECL_P (decl));
7603 if (!targetm.binds_local_p (decl))
7604 return false;
7605 if (!TREE_PUBLIC (decl))
7606 return true;
7607
7608 /* When resolution is available, just use it. */
7609 if (symtab_node *node = symtab_node::get (decl))
7610 {
7611 if (node->resolution != LDPR_UNKNOWN
7612 && !node->can_be_discarded_p ())
7613 return resolution_to_local_definition_p (node->resolution);
7614 }
7615
7616 /* Otherwise we have to assume the worst for DECL_WEAK (hidden weaks
7617 binds locally but still can be overwritten), DECL_COMMON (can be merged
7618 with a non-common definition somewhere in the same module) or
7619 DECL_EXTERNAL.
7620 This rely on fact that binds_local_p behave as decl_replaceable_p
7621 for all other declaration types. */
7622 if (DECL_WEAK (decl))
7623 return false;
7624 if (DECL_COMMON (decl)
7625 && (DECL_INITIAL (decl) == NULL
7626 || (!in_lto_p && DECL_INITIAL (decl) == error_mark_node)))
7627 return false;
7628 if (DECL_EXTERNAL (decl))
7629 return false;
7630 return true;
7631 }
7632
7633 /* A replaceable function or variable is one which may be replaced
7634 at link-time with an entirely different definition, provided that the
7635 replacement has the same type. For example, functions declared
7636 with __attribute__((weak)) on most systems are replaceable.
7637 If SEMANTIC_INTERPOSITION_P is false allow interposition only on
7638 symbols explicitly declared weak.
7639
7640 COMDAT functions are not replaceable, since all definitions of the
7641 function must be equivalent. It is important that COMDAT functions
7642 not be treated as replaceable so that use of C++ template
7643 instantiations is not penalized. */
7644
7645 bool
decl_replaceable_p(tree decl,bool semantic_interposition_p)7646 decl_replaceable_p (tree decl, bool semantic_interposition_p)
7647 {
7648 gcc_assert (DECL_P (decl));
7649 if (!TREE_PUBLIC (decl) || DECL_COMDAT (decl))
7650 return false;
7651 if (!semantic_interposition_p
7652 && !DECL_WEAK (decl))
7653 return false;
7654 return !decl_binds_to_current_def_p (decl);
7655 }
7656
7657 /* Default function to output code that will globalize a label. A
7658 target must define GLOBAL_ASM_OP or provide its own function to
7659 globalize a label. */
7660 #ifdef GLOBAL_ASM_OP
7661 void
default_globalize_label(FILE * stream,const char * name)7662 default_globalize_label (FILE * stream, const char *name)
7663 {
7664 fputs (GLOBAL_ASM_OP, stream);
7665 assemble_name (stream, name);
7666 putc ('\n', stream);
7667 }
7668 #endif /* GLOBAL_ASM_OP */
7669
7670 /* Default function to output code that will globalize a declaration. */
7671 void
default_globalize_decl_name(FILE * stream,tree decl)7672 default_globalize_decl_name (FILE * stream, tree decl)
7673 {
7674 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
7675 targetm.asm_out.globalize_label (stream, name);
7676 }
7677
7678 /* Default function to output a label for unwind information. The
7679 default is to do nothing. A target that needs nonlocal labels for
7680 unwind information must provide its own function to do this. */
7681 void
default_emit_unwind_label(FILE * stream ATTRIBUTE_UNUSED,tree decl ATTRIBUTE_UNUSED,int for_eh ATTRIBUTE_UNUSED,int empty ATTRIBUTE_UNUSED)7682 default_emit_unwind_label (FILE * stream ATTRIBUTE_UNUSED,
7683 tree decl ATTRIBUTE_UNUSED,
7684 int for_eh ATTRIBUTE_UNUSED,
7685 int empty ATTRIBUTE_UNUSED)
7686 {
7687 }
7688
7689 /* Default function to output a label to divide up the exception table.
7690 The default is to do nothing. A target that needs/wants to divide
7691 up the table must provide it's own function to do this. */
7692 void
default_emit_except_table_label(FILE * stream ATTRIBUTE_UNUSED)7693 default_emit_except_table_label (FILE * stream ATTRIBUTE_UNUSED)
7694 {
7695 }
7696
7697 /* This is how to output an internal numbered label where PREFIX is
7698 the class of label and LABELNO is the number within the class. */
7699
7700 void
default_generate_internal_label(char * buf,const char * prefix,unsigned long labelno)7701 default_generate_internal_label (char *buf, const char *prefix,
7702 unsigned long labelno)
7703 {
7704 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, labelno);
7705 }
7706
7707 /* This is how to output an internal numbered label where PREFIX is
7708 the class of label and LABELNO is the number within the class. */
7709
7710 void
default_internal_label(FILE * stream,const char * prefix,unsigned long labelno)7711 default_internal_label (FILE *stream, const char *prefix,
7712 unsigned long labelno)
7713 {
7714 char *const buf = (char *) alloca (40 + strlen (prefix));
7715 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, labelno);
7716 ASM_OUTPUT_INTERNAL_LABEL (stream, buf);
7717 }
7718
7719
7720 /* The default implementation of ASM_DECLARE_CONSTANT_NAME. */
7721
7722 void
default_asm_declare_constant_name(FILE * file,const char * name,const_tree exp ATTRIBUTE_UNUSED,HOST_WIDE_INT size ATTRIBUTE_UNUSED)7723 default_asm_declare_constant_name (FILE *file, const char *name,
7724 const_tree exp ATTRIBUTE_UNUSED,
7725 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
7726 {
7727 assemble_label (file, name);
7728 }
7729
7730 /* This is the default behavior at the beginning of a file. It's
7731 controlled by two other target-hook toggles. */
7732 void
default_file_start(void)7733 default_file_start (void)
7734 {
7735 if (targetm.asm_file_start_app_off
7736 && !(flag_verbose_asm || flag_debug_asm || flag_dump_rtl_in_asm))
7737 fputs (ASM_APP_OFF, asm_out_file);
7738
7739 if (targetm.asm_file_start_file_directive)
7740 {
7741 /* LTO produced units have no meaningful main_input_filename. */
7742 if (in_lto_p)
7743 output_file_directive (asm_out_file, "<artificial>");
7744 else
7745 output_file_directive (asm_out_file, main_input_filename);
7746 }
7747 }
7748
7749 /* This is a generic routine suitable for use as TARGET_ASM_FILE_END
7750 which emits a special section directive used to indicate whether or
7751 not this object file needs an executable stack. This is primarily
7752 a GNU extension to ELF but could be used on other targets. */
7753
7754 int trampolines_created;
7755
7756 void
file_end_indicate_exec_stack(void)7757 file_end_indicate_exec_stack (void)
7758 {
7759 unsigned int flags = SECTION_DEBUG;
7760 if (trampolines_created)
7761 flags |= SECTION_CODE;
7762
7763 switch_to_section (get_section (".note.GNU-stack", flags, NULL));
7764 }
7765
7766 /* Emit a special section directive to indicate that this object file
7767 was compiled with -fsplit-stack. This is used to let the linker
7768 detect calls between split-stack code and non-split-stack code, so
7769 that it can modify the split-stack code to allocate a sufficiently
7770 large stack. We emit another special section if there are any
7771 functions in this file which have the no_split_stack attribute, to
7772 prevent the linker from warning about being unable to convert the
7773 functions if they call non-split-stack code. */
7774
7775 void
file_end_indicate_split_stack(void)7776 file_end_indicate_split_stack (void)
7777 {
7778 if (flag_split_stack)
7779 {
7780 switch_to_section (get_section (".note.GNU-split-stack", SECTION_DEBUG,
7781 NULL));
7782 if (saw_no_split_stack)
7783 switch_to_section (get_section (".note.GNU-no-split-stack",
7784 SECTION_DEBUG, NULL));
7785 }
7786 }
7787
7788 /* Output DIRECTIVE (a C string) followed by a newline. This is used as
7789 a get_unnamed_section callback. */
7790
7791 void
output_section_asm_op(const char * directive)7792 output_section_asm_op (const char *directive)
7793 {
7794 fprintf (asm_out_file, "%s\n", directive);
7795 }
7796
7797 /* Emit assembly code to switch to section NEW_SECTION. Do nothing if
7798 the current section is NEW_SECTION. */
7799
7800 void
switch_to_section(section * new_section,tree decl)7801 switch_to_section (section *new_section, tree decl)
7802 {
7803 bool retain_p;
7804 if ((new_section->common.flags & SECTION_NAMED)
7805 && decl != nullptr
7806 && DECL_P (decl)
7807 && ((retain_p = !!lookup_attribute ("retain",
7808 DECL_ATTRIBUTES (decl)))
7809 != !!(new_section->common.flags & SECTION_RETAIN)))
7810 {
7811 /* If the SECTION_RETAIN bit doesn't match, switch to a new
7812 section. */
7813 tree used_decl, no_used_decl;
7814
7815 if (retain_p)
7816 {
7817 new_section->common.flags |= SECTION_RETAIN;
7818 used_decl = decl;
7819 no_used_decl = new_section->named.decl;
7820 }
7821 else
7822 {
7823 new_section->common.flags &= ~(SECTION_RETAIN
7824 | SECTION_DECLARED);
7825 used_decl = new_section->named.decl;
7826 no_used_decl = decl;
7827 }
7828 if (no_used_decl != used_decl)
7829 {
7830 warning (OPT_Wattributes,
7831 "%+qD without %<retain%> attribute and %qD with "
7832 "%<retain%> attribute are placed in a section with "
7833 "the same name", no_used_decl, used_decl);
7834 inform (DECL_SOURCE_LOCATION (used_decl),
7835 "%qD was declared here", used_decl);
7836 }
7837 }
7838 else if (in_section == new_section)
7839 return;
7840
7841 in_section = new_section;
7842
7843 switch (SECTION_STYLE (new_section))
7844 {
7845 case SECTION_NAMED:
7846 targetm.asm_out.named_section (new_section->named.name,
7847 new_section->named.common.flags,
7848 new_section->named.decl);
7849 break;
7850
7851 case SECTION_UNNAMED:
7852 new_section->unnamed.callback (new_section->unnamed.data);
7853 break;
7854
7855 case SECTION_NOSWITCH:
7856 gcc_unreachable ();
7857 break;
7858 }
7859
7860 new_section->common.flags |= SECTION_DECLARED;
7861 }
7862
7863 /* If block symbol SYMBOL has not yet been assigned an offset, place
7864 it at the end of its block. */
7865
7866 void
place_block_symbol(rtx symbol)7867 place_block_symbol (rtx symbol)
7868 {
7869 unsigned HOST_WIDE_INT size, mask, offset;
7870 class constant_descriptor_rtx *desc;
7871 unsigned int alignment;
7872 struct object_block *block;
7873 tree decl;
7874
7875 gcc_assert (SYMBOL_REF_BLOCK (symbol));
7876 if (SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0)
7877 return;
7878
7879 /* Work out the symbol's size and alignment. */
7880 if (CONSTANT_POOL_ADDRESS_P (symbol))
7881 {
7882 desc = SYMBOL_REF_CONSTANT (symbol);
7883 alignment = desc->align;
7884 size = GET_MODE_SIZE (desc->mode);
7885 }
7886 else if (TREE_CONSTANT_POOL_ADDRESS_P (symbol))
7887 {
7888 decl = SYMBOL_REF_DECL (symbol);
7889 gcc_checking_assert (DECL_IN_CONSTANT_POOL (decl));
7890 alignment = DECL_ALIGN (decl);
7891 size = get_constant_size (DECL_INITIAL (decl));
7892 if ((flag_sanitize & SANITIZE_ADDRESS)
7893 && TREE_CODE (DECL_INITIAL (decl)) == STRING_CST
7894 && asan_protect_global (DECL_INITIAL (decl)))
7895 {
7896 size += asan_red_zone_size (size);
7897 alignment = MAX (alignment,
7898 ASAN_RED_ZONE_SIZE * BITS_PER_UNIT);
7899 }
7900 }
7901 else
7902 {
7903 struct symtab_node *snode;
7904 decl = SYMBOL_REF_DECL (symbol);
7905
7906 snode = symtab_node::get (decl);
7907 if (snode->alias)
7908 {
7909 rtx target = DECL_RTL (snode->ultimate_alias_target ()->decl);
7910
7911 gcc_assert (MEM_P (target)
7912 && GET_CODE (XEXP (target, 0)) == SYMBOL_REF
7913 && SYMBOL_REF_HAS_BLOCK_INFO_P (XEXP (target, 0)));
7914 target = XEXP (target, 0);
7915 place_block_symbol (target);
7916 SYMBOL_REF_BLOCK_OFFSET (symbol) = SYMBOL_REF_BLOCK_OFFSET (target);
7917 return;
7918 }
7919 alignment = get_variable_align (decl);
7920 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
7921 if ((flag_sanitize & SANITIZE_ADDRESS)
7922 && asan_protect_global (decl))
7923 {
7924 size += asan_red_zone_size (size);
7925 alignment = MAX (alignment,
7926 ASAN_RED_ZONE_SIZE * BITS_PER_UNIT);
7927 }
7928 }
7929
7930 /* Calculate the object's offset from the start of the block. */
7931 block = SYMBOL_REF_BLOCK (symbol);
7932 mask = alignment / BITS_PER_UNIT - 1;
7933 offset = (block->size + mask) & ~mask;
7934 SYMBOL_REF_BLOCK_OFFSET (symbol) = offset;
7935
7936 /* Record the block's new alignment and size. */
7937 block->alignment = MAX (block->alignment, alignment);
7938 block->size = offset + size;
7939
7940 vec_safe_push (block->objects, symbol);
7941 }
7942
7943 /* Return the anchor that should be used to address byte offset OFFSET
7944 from the first object in BLOCK. MODEL is the TLS model used
7945 to access it. */
7946
7947 rtx
get_section_anchor(struct object_block * block,HOST_WIDE_INT offset,enum tls_model model)7948 get_section_anchor (struct object_block *block, HOST_WIDE_INT offset,
7949 enum tls_model model)
7950 {
7951 char label[100];
7952 unsigned int begin, middle, end;
7953 unsigned HOST_WIDE_INT min_offset, max_offset, range, bias, delta;
7954 rtx anchor;
7955
7956 /* Work out the anchor's offset. Use an offset of 0 for the first
7957 anchor so that we don't pessimize the case where we take the address
7958 of a variable at the beginning of the block. This is particularly
7959 useful when a block has only one variable assigned to it.
7960
7961 We try to place anchors RANGE bytes apart, so there can then be
7962 anchors at +/-RANGE, +/-2 * RANGE, and so on, up to the limits of
7963 a ptr_mode offset. With some target settings, the lowest such
7964 anchor might be out of range for the lowest ptr_mode offset;
7965 likewise the highest anchor for the highest offset. Use anchors
7966 at the extreme ends of the ptr_mode range in such cases.
7967
7968 All arithmetic uses unsigned integers in order to avoid
7969 signed overflow. */
7970 max_offset = (unsigned HOST_WIDE_INT) targetm.max_anchor_offset;
7971 min_offset = (unsigned HOST_WIDE_INT) targetm.min_anchor_offset;
7972 range = max_offset - min_offset + 1;
7973 if (range == 0)
7974 offset = 0;
7975 else
7976 {
7977 bias = HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (ptr_mode) - 1);
7978 if (offset < 0)
7979 {
7980 delta = -(unsigned HOST_WIDE_INT) offset + max_offset;
7981 delta -= delta % range;
7982 if (delta > bias)
7983 delta = bias;
7984 offset = (HOST_WIDE_INT) (-delta);
7985 }
7986 else
7987 {
7988 delta = (unsigned HOST_WIDE_INT) offset - min_offset;
7989 delta -= delta % range;
7990 if (delta > bias - 1)
7991 delta = bias - 1;
7992 offset = (HOST_WIDE_INT) delta;
7993 }
7994 }
7995
7996 /* Do a binary search to see if there's already an anchor we can use.
7997 Set BEGIN to the new anchor's index if not. */
7998 begin = 0;
7999 end = vec_safe_length (block->anchors);
8000 while (begin != end)
8001 {
8002 middle = (end + begin) / 2;
8003 anchor = (*block->anchors)[middle];
8004 if (SYMBOL_REF_BLOCK_OFFSET (anchor) > offset)
8005 end = middle;
8006 else if (SYMBOL_REF_BLOCK_OFFSET (anchor) < offset)
8007 begin = middle + 1;
8008 else if (SYMBOL_REF_TLS_MODEL (anchor) > model)
8009 end = middle;
8010 else if (SYMBOL_REF_TLS_MODEL (anchor) < model)
8011 begin = middle + 1;
8012 else
8013 return anchor;
8014 }
8015
8016 /* Create a new anchor with a unique label. */
8017 ASM_GENERATE_INTERNAL_LABEL (label, "LANCHOR", anchor_labelno++);
8018 anchor = create_block_symbol (ggc_strdup (label), block, offset);
8019 SYMBOL_REF_FLAGS (anchor) |= SYMBOL_FLAG_LOCAL | SYMBOL_FLAG_ANCHOR;
8020 SYMBOL_REF_FLAGS (anchor) |= model << SYMBOL_FLAG_TLS_SHIFT;
8021
8022 /* Insert it at index BEGIN. */
8023 vec_safe_insert (block->anchors, begin, anchor);
8024 return anchor;
8025 }
8026
8027 /* Output the objects in BLOCK. */
8028
8029 static void
output_object_block(struct object_block * block)8030 output_object_block (struct object_block *block)
8031 {
8032 class constant_descriptor_rtx *desc;
8033 unsigned int i;
8034 HOST_WIDE_INT offset;
8035 tree decl;
8036 rtx symbol;
8037
8038 if (!block->objects)
8039 return;
8040
8041 /* Switch to the section and make sure that the first byte is
8042 suitably aligned. */
8043 /* Special case VTV comdat sections similar to assemble_variable. */
8044 if (SECTION_STYLE (block->sect) == SECTION_NAMED
8045 && block->sect->named.name
8046 && (strcmp (block->sect->named.name, ".vtable_map_vars") == 0))
8047 handle_vtv_comdat_section (block->sect, block->sect->named.decl);
8048 else
8049 switch_to_section (block->sect, SYMBOL_REF_DECL ((*block->objects)[0]));
8050
8051 gcc_checking_assert (!(block->sect->common.flags & SECTION_MERGE));
8052 assemble_align (block->alignment);
8053
8054 /* Define the values of all anchors relative to the current section
8055 position. */
8056 FOR_EACH_VEC_SAFE_ELT (block->anchors, i, symbol)
8057 targetm.asm_out.output_anchor (symbol);
8058
8059 /* Output the objects themselves. */
8060 offset = 0;
8061 FOR_EACH_VEC_ELT (*block->objects, i, symbol)
8062 {
8063 /* Move to the object's offset, padding with zeros if necessary. */
8064 assemble_zeros (SYMBOL_REF_BLOCK_OFFSET (symbol) - offset);
8065 offset = SYMBOL_REF_BLOCK_OFFSET (symbol);
8066 if (CONSTANT_POOL_ADDRESS_P (symbol))
8067 {
8068 desc = SYMBOL_REF_CONSTANT (symbol);
8069 /* Pass 1 for align as we have already laid out everything in the block.
8070 So aligning shouldn't be necessary. */
8071 output_constant_pool_1 (desc, 1);
8072 offset += GET_MODE_SIZE (desc->mode);
8073 }
8074 else if (TREE_CONSTANT_POOL_ADDRESS_P (symbol))
8075 {
8076 HOST_WIDE_INT size;
8077 decl = SYMBOL_REF_DECL (symbol);
8078 assemble_constant_contents (DECL_INITIAL (decl), XSTR (symbol, 0),
8079 DECL_ALIGN (decl), false);
8080
8081 size = get_constant_size (DECL_INITIAL (decl));
8082 offset += size;
8083 if ((flag_sanitize & SANITIZE_ADDRESS)
8084 && TREE_CODE (DECL_INITIAL (decl)) == STRING_CST
8085 && asan_protect_global (DECL_INITIAL (decl)))
8086 {
8087 size = asan_red_zone_size (size);
8088 assemble_zeros (size);
8089 offset += size;
8090 }
8091 }
8092 else
8093 {
8094 HOST_WIDE_INT size;
8095 decl = SYMBOL_REF_DECL (symbol);
8096 assemble_variable_contents (decl, XSTR (symbol, 0), false, false);
8097 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
8098 offset += size;
8099 if ((flag_sanitize & SANITIZE_ADDRESS)
8100 && asan_protect_global (decl))
8101 {
8102 size = asan_red_zone_size (size);
8103 assemble_zeros (size);
8104 offset += size;
8105 }
8106 }
8107 }
8108 }
8109
8110 /* A callback for qsort to compare object_blocks. */
8111
8112 static int
output_object_block_compare(const void * x,const void * y)8113 output_object_block_compare (const void *x, const void *y)
8114 {
8115 object_block *p1 = *(object_block * const*)x;
8116 object_block *p2 = *(object_block * const*)y;
8117
8118 if (p1->sect->common.flags & SECTION_NAMED
8119 && !(p2->sect->common.flags & SECTION_NAMED))
8120 return 1;
8121
8122 if (!(p1->sect->common.flags & SECTION_NAMED)
8123 && p2->sect->common.flags & SECTION_NAMED)
8124 return -1;
8125
8126 if (p1->sect->common.flags & SECTION_NAMED
8127 && p2->sect->common.flags & SECTION_NAMED)
8128 return strcmp (p1->sect->named.name, p2->sect->named.name);
8129
8130 unsigned f1 = p1->sect->common.flags;
8131 unsigned f2 = p2->sect->common.flags;
8132 if (f1 == f2)
8133 return 0;
8134 return f1 < f2 ? -1 : 1;
8135 }
8136
8137 /* Output the definitions of all object_blocks. */
8138
8139 void
output_object_blocks(void)8140 output_object_blocks (void)
8141 {
8142 vec<object_block *, va_heap> v;
8143 v.create (object_block_htab->elements ());
8144 object_block *obj;
8145 hash_table<object_block_hasher>::iterator hi;
8146
8147 FOR_EACH_HASH_TABLE_ELEMENT (*object_block_htab, obj, object_block *, hi)
8148 v.quick_push (obj);
8149
8150 /* Sort them in order to output them in a deterministic manner,
8151 otherwise we may get .rodata sections in different orders with
8152 and without -g. */
8153 v.qsort (output_object_block_compare);
8154 unsigned i;
8155 FOR_EACH_VEC_ELT (v, i, obj)
8156 output_object_block (obj);
8157
8158 v.release ();
8159 }
8160
8161 /* This function provides a possible implementation of the
8162 TARGET_ASM_RECORD_GCC_SWITCHES target hook for ELF targets. When triggered
8163 by -frecord-gcc-switches it creates a new mergeable, string section in the
8164 assembler output file called TARGET_ASM_RECORD_GCC_SWITCHES_SECTION which
8165 contains the switches in ASCII format.
8166
8167 FIXME: This code does not correctly handle double quote characters
8168 that appear inside strings, (it strips them rather than preserving them).
8169 FIXME: ASM_OUTPUT_ASCII, as defined in config/elfos.h will not emit NUL
8170 characters - instead it treats them as sub-string separators. Since
8171 we want to emit NUL strings terminators into the object file we have to use
8172 ASM_OUTPUT_SKIP. */
8173
8174 void
elf_record_gcc_switches(const char * options)8175 elf_record_gcc_switches (const char *options)
8176 {
8177 section *sec = get_section (targetm.asm_out.record_gcc_switches_section,
8178 SECTION_DEBUG | SECTION_MERGE
8179 | SECTION_STRINGS | (SECTION_ENTSIZE & 1), NULL);
8180 switch_to_section (sec);
8181 ASM_OUTPUT_ASCII (asm_out_file, options, strlen (options) + 1);
8182 }
8183
8184 /* Emit text to declare externally defined symbols. It is needed to
8185 properly support non-default visibility. */
8186 void
default_elf_asm_output_external(FILE * file ATTRIBUTE_UNUSED,tree decl,const char * name ATTRIBUTE_UNUSED)8187 default_elf_asm_output_external (FILE *file ATTRIBUTE_UNUSED,
8188 tree decl,
8189 const char *name ATTRIBUTE_UNUSED)
8190 {
8191 /* We output the name if and only if TREE_SYMBOL_REFERENCED is
8192 set in order to avoid putting out names that are never really
8193 used. Always output visibility specified in the source. */
8194 if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))
8195 && (DECL_VISIBILITY_SPECIFIED (decl)
8196 || targetm.binds_local_p (decl)))
8197 maybe_assemble_visibility (decl);
8198 }
8199
8200 /* The default hook for TARGET_ASM_OUTPUT_SOURCE_FILENAME. */
8201
8202 void
default_asm_output_source_filename(FILE * file,const char * name)8203 default_asm_output_source_filename (FILE *file, const char *name)
8204 {
8205 #ifdef ASM_OUTPUT_SOURCE_FILENAME
8206 ASM_OUTPUT_SOURCE_FILENAME (file, name);
8207 #else
8208 fprintf (file, "\t.file\t");
8209 output_quoted_string (file, name);
8210 putc ('\n', file);
8211 #endif
8212 }
8213
8214 /* Output a file name in the form wanted by System V. */
8215
8216 void
output_file_directive(FILE * asm_file,const char * input_name)8217 output_file_directive (FILE *asm_file, const char *input_name)
8218 {
8219 int len;
8220 const char *na;
8221
8222 if (input_name == NULL)
8223 input_name = "<stdin>";
8224 else
8225 input_name = remap_debug_filename (input_name);
8226
8227 len = strlen (input_name);
8228 na = input_name + len;
8229
8230 /* NA gets INPUT_NAME sans directory names. */
8231 while (na > input_name)
8232 {
8233 if (IS_DIR_SEPARATOR (na[-1]))
8234 break;
8235 na--;
8236 }
8237
8238 targetm.asm_out.output_source_filename (asm_file, na);
8239 }
8240
8241 /* Create a DEBUG_EXPR_DECL / DEBUG_EXPR pair from RTL expression
8242 EXP. */
8243 rtx
make_debug_expr_from_rtl(const_rtx exp)8244 make_debug_expr_from_rtl (const_rtx exp)
8245 {
8246 tree ddecl = make_node (DEBUG_EXPR_DECL), type;
8247 machine_mode mode = GET_MODE (exp);
8248 rtx dval;
8249
8250 DECL_ARTIFICIAL (ddecl) = 1;
8251 if (REG_P (exp) && REG_EXPR (exp))
8252 type = TREE_TYPE (REG_EXPR (exp));
8253 else if (MEM_P (exp) && MEM_EXPR (exp))
8254 type = TREE_TYPE (MEM_EXPR (exp));
8255 else
8256 type = NULL_TREE;
8257 if (type && TYPE_MODE (type) == mode)
8258 TREE_TYPE (ddecl) = type;
8259 else
8260 TREE_TYPE (ddecl) = lang_hooks.types.type_for_mode (mode, 1);
8261 SET_DECL_MODE (ddecl, mode);
8262 dval = gen_rtx_DEBUG_EXPR (mode);
8263 DEBUG_EXPR_TREE_DECL (dval) = ddecl;
8264 SET_DECL_RTL (ddecl, dval);
8265 return dval;
8266 }
8267
8268 #ifdef ELF_ASCII_ESCAPES
8269 /* Default ASM_OUTPUT_LIMITED_STRING for ELF targets. */
8270
8271 void
default_elf_asm_output_limited_string(FILE * f,const char * s)8272 default_elf_asm_output_limited_string (FILE *f, const char *s)
8273 {
8274 int escape;
8275 unsigned char c;
8276
8277 fputs (STRING_ASM_OP, f);
8278 putc ('"', f);
8279 while (*s != '\0')
8280 {
8281 c = *s;
8282 escape = ELF_ASCII_ESCAPES[c];
8283 switch (escape)
8284 {
8285 case 0:
8286 putc (c, f);
8287 break;
8288 case 1:
8289 putc ('\\', f);
8290 putc ('0'+((c>>6)&7), f);
8291 putc ('0'+((c>>3)&7), f);
8292 putc ('0'+(c&7), f);
8293 break;
8294 default:
8295 putc ('\\', f);
8296 putc (escape, f);
8297 break;
8298 }
8299 s++;
8300 }
8301 putc ('\"', f);
8302 putc ('\n', f);
8303 }
8304
8305 /* Default ASM_OUTPUT_ASCII for ELF targets. */
8306
8307 void
default_elf_asm_output_ascii(FILE * f,const char * s,unsigned int len)8308 default_elf_asm_output_ascii (FILE *f, const char *s, unsigned int len)
8309 {
8310 const char *limit = s + len;
8311 const char *last_null = NULL;
8312 unsigned bytes_in_chunk = 0;
8313 unsigned char c;
8314 int escape;
8315
8316 for (; s < limit; s++)
8317 {
8318 const char *p;
8319
8320 if (bytes_in_chunk >= 60)
8321 {
8322 putc ('\"', f);
8323 putc ('\n', f);
8324 bytes_in_chunk = 0;
8325 }
8326
8327 if (s > last_null)
8328 {
8329 for (p = s; p < limit && *p != '\0'; p++)
8330 continue;
8331 last_null = p;
8332 }
8333 else
8334 p = last_null;
8335
8336 if (p < limit && (p - s) <= (long) ELF_STRING_LIMIT)
8337 {
8338 if (bytes_in_chunk > 0)
8339 {
8340 putc ('\"', f);
8341 putc ('\n', f);
8342 bytes_in_chunk = 0;
8343 }
8344
8345 default_elf_asm_output_limited_string (f, s);
8346 s = p;
8347 }
8348 else
8349 {
8350 if (bytes_in_chunk == 0)
8351 fputs (ASCII_DATA_ASM_OP "\"", f);
8352
8353 c = *s;
8354 escape = ELF_ASCII_ESCAPES[c];
8355 switch (escape)
8356 {
8357 case 0:
8358 putc (c, f);
8359 bytes_in_chunk++;
8360 break;
8361 case 1:
8362 putc ('\\', f);
8363 putc ('0'+((c>>6)&7), f);
8364 putc ('0'+((c>>3)&7), f);
8365 putc ('0'+(c&7), f);
8366 bytes_in_chunk += 4;
8367 break;
8368 default:
8369 putc ('\\', f);
8370 putc (escape, f);
8371 bytes_in_chunk += 2;
8372 break;
8373 }
8374
8375 }
8376 }
8377
8378 if (bytes_in_chunk > 0)
8379 {
8380 putc ('\"', f);
8381 putc ('\n', f);
8382 }
8383 }
8384 #endif
8385
8386 static GTY(()) section *elf_init_array_section;
8387 static GTY(()) section *elf_fini_array_section;
8388
8389 static section *
get_elf_initfini_array_priority_section(int priority,bool constructor_p)8390 get_elf_initfini_array_priority_section (int priority,
8391 bool constructor_p)
8392 {
8393 section *sec;
8394 if (priority != DEFAULT_INIT_PRIORITY)
8395 {
8396 char buf[18];
8397 sprintf (buf, "%s.%.5u",
8398 constructor_p ? ".init_array" : ".fini_array",
8399 priority);
8400 sec = get_section (buf, SECTION_WRITE | SECTION_NOTYPE, NULL_TREE);
8401 }
8402 else
8403 {
8404 if (constructor_p)
8405 {
8406 if (elf_init_array_section == NULL)
8407 elf_init_array_section
8408 = get_section (".init_array",
8409 SECTION_WRITE | SECTION_NOTYPE, NULL_TREE);
8410 sec = elf_init_array_section;
8411 }
8412 else
8413 {
8414 if (elf_fini_array_section == NULL)
8415 elf_fini_array_section
8416 = get_section (".fini_array",
8417 SECTION_WRITE | SECTION_NOTYPE, NULL_TREE);
8418 sec = elf_fini_array_section;
8419 }
8420 }
8421 return sec;
8422 }
8423
8424 /* Use .init_array section for constructors. */
8425
8426 void
default_elf_init_array_asm_out_constructor(rtx symbol,int priority)8427 default_elf_init_array_asm_out_constructor (rtx symbol, int priority)
8428 {
8429 section *sec = get_elf_initfini_array_priority_section (priority,
8430 true);
8431 assemble_addr_to_section (symbol, sec);
8432 }
8433
8434 /* Use .fini_array section for destructors. */
8435
8436 void
default_elf_fini_array_asm_out_destructor(rtx symbol,int priority)8437 default_elf_fini_array_asm_out_destructor (rtx symbol, int priority)
8438 {
8439 section *sec = get_elf_initfini_array_priority_section (priority,
8440 false);
8441 assemble_addr_to_section (symbol, sec);
8442 }
8443
8444 /* Default TARGET_ASM_OUTPUT_IDENT hook.
8445
8446 This is a bit of a cheat. The real default is a no-op, but this
8447 hook is the default for all targets with a .ident directive. */
8448
8449 void
default_asm_output_ident_directive(const char * ident_str)8450 default_asm_output_ident_directive (const char *ident_str)
8451 {
8452 const char *ident_asm_op = "\t.ident\t";
8453
8454 /* If we are still in the front end, do not write out the string
8455 to asm_out_file. Instead, add a fake top-level asm statement.
8456 This allows the front ends to use this hook without actually
8457 writing to asm_out_file, to handle #ident or Pragma Ident. */
8458 if (symtab->state == PARSING)
8459 {
8460 char *buf = ACONCAT ((ident_asm_op, "\"", ident_str, "\"\n", NULL));
8461 symtab->finalize_toplevel_asm (build_string (strlen (buf), buf));
8462 }
8463 else
8464 fprintf (asm_out_file, "%s\"%s\"\n", ident_asm_op, ident_str);
8465 }
8466
8467
8468 /* This function ensures that vtable_map variables are not only
8469 in the comdat section, but that each variable has its own unique
8470 comdat name. Without this the variables end up in the same section
8471 with a single comdat name.
8472
8473 FIXME: resolve_unique_section needs to deal better with
8474 decls with both DECL_SECTION_NAME and DECL_ONE_ONLY. Once
8475 that is fixed, this if-else statement can be replaced with
8476 a single call to "switch_to_section (sect)". */
8477
8478 static void
handle_vtv_comdat_section(section * sect,const_tree decl ATTRIBUTE_UNUSED)8479 handle_vtv_comdat_section (section *sect, const_tree decl ATTRIBUTE_UNUSED)
8480 {
8481 #if defined (OBJECT_FORMAT_ELF)
8482 targetm.asm_out.named_section (sect->named.name,
8483 sect->named.common.flags
8484 | SECTION_LINKONCE,
8485 DECL_NAME (decl));
8486 in_section = sect;
8487 #else
8488 /* Neither OBJECT_FORMAT_PE, nor OBJECT_FORMAT_COFF is set here.
8489 Therefore the following check is used.
8490 In case a the target is PE or COFF a comdat group section
8491 is created, e.g. .vtable_map_vars$foo. The linker places
8492 everything in .vtable_map_vars at the end.
8493
8494 A fix could be made in
8495 gcc/config/i386/winnt.cc: i386_pe_unique_section. */
8496 if (TARGET_PECOFF)
8497 {
8498 char *name;
8499
8500 if (TREE_CODE (DECL_NAME (decl)) == IDENTIFIER_NODE)
8501 name = ACONCAT ((sect->named.name, "$",
8502 IDENTIFIER_POINTER (DECL_NAME (decl)), NULL));
8503 else
8504 name = ACONCAT ((sect->named.name, "$",
8505 IDENTIFIER_POINTER (DECL_COMDAT_GROUP (DECL_NAME (decl))),
8506 NULL));
8507
8508 targetm.asm_out.named_section (name,
8509 sect->named.common.flags
8510 | SECTION_LINKONCE,
8511 DECL_NAME (decl));
8512 in_section = sect;
8513 }
8514 else
8515 switch_to_section (sect);
8516 #endif
8517 }
8518
8519 #include "gt-varasm.h"
8520